Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • bootstrap4
  • intros
  • master
  • modules
  • postgres_integration
  • s3compatible
6 results

Target

Select target project
  • jannik/website
  • vincent/website
  • dominic/website
  • romank/website
  • videoaginfra/website
5 results
Select Git revision
  • bootstrap4
  • intros
  • live_sources
  • master
  • modules
5 results
Show changes
Commits on Source (192)
......@@ -12,3 +12,4 @@ nginx.conf
uwsgi.sock
.coverage
htmlcov/
.idea/
unittest:
image: debian:stretch
linter:
image: registry.git.fsmpi.rwth-aachen.de/videoaginfra/testenvs/bullseye
stage: test
script:
- apt update
- apt install -y python3
- uname -a
- python3 -V
- pylint --version
- pylint --rcfile=.pylintrc *.py | tee pylint.txt
artifacts:
paths:
- pylint.txt
unittest: &unittest
image: registry.git.fsmpi.rwth-aachen.de/videoaginfra/testenvs/bullseye
stage: test
script:
- uname -a
- apt install -y sqlite3 locales-all git python3-flask python3-ldap3 python3-requests python3-lxml python3-icalendar python3-mysql.connector python3-requests python3-coverage
- python3 -m coverage run tests.py
- python3 -V
- python3 -m coverage run run_tests.py
- python3 -m coverage report --include "./*"
- python3 -m coverage report -m --include "./*" > report.txt
- python3 -m coverage html --include "./*"
......@@ -16,21 +25,3 @@ unittest:
- htmlcov/*
- .coverage
- report.txt
livetest:
image: debian:stretch
stage: test
script:
- apt update
- apt install -y python3
- python3 -V
- uname -a
- apt install -y python3-requests
# - ./tests/urlcheck_sinf.py
deploy_staging:
image: archlinux/base
stage: deploy
script:
- pacman --noconfirm -Sy ansible git
\ No newline at end of file
[MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-whitelist=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use.
jobs=1
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Specify a configuration file.
#rcfile=
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=consider-using-dict-items,
consider-using-f-string,
consider-using-with,
cyclic-import, # remove me later, should be fixed but needs major refactoring
function-redefined, # remove me later
implicit-str-concat,
invalid-name,
line-too-long,
missing-function-docstring,
missing-module-docstring,
no-else-return,
pointless-string-statement, # docstrings were misdetected
redefined-builtin,
redefined-outer-name, # remove me later
too-many-return-statements,
trailing-whitespace,
unneeded-not,
unspecified-encoding,
unused-variable,
unused-wildcard-import,
use-implicit-booleaness-not-comparison,
use-maxsplit-arg,
wildcard-import
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[REPORTS]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'error', 'warning', 'refactor', and 'convention'
# which contain the number of messages in each category, as well as 'statement'
# which is the total number of statements analyzed. This score is used by the
# global evaluation report (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=yes
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit
[SIMILARITIES]
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
# Minimum lines number of a similarity.
min-similarity-lines=4
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
# List of decorators that change the signature of a decorated function.
signature-mutators=
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style.
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style.
#class-attribute-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
e,
k,
f,
r,
ex,
Run,
_,
id,
db,
ip,
app,
config,
cur
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style.
#variable-rgx=
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid defining new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
[STRING]
# This flag controls whether the implicit-str-concat-in-sequence should
# generate a warning on implicit string concatenation in sequences defined over
# several lines.
check-str-concat-over-line-jumps=no
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. Available dictionaries: none. To make it work,
# install the python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains the private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
spelling-store-unknown-words=no
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=1
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=\t
# Maximum number of characters on a single line.
max-line-length=160
# Maximum number of lines in a module.
max-module-lines=1000
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[LOGGING]
# Format style used to check logging format string. `old` means using %
# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings.
logging-format-style=old
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[IMPORTS]
# List of modules that can be imported at any level, not just the top level
# one.
allow-any-import-level=
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=optparse,tkinter.tix
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled).
ext-import-graph=
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled).
import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Couples of modules and preferred modules, separated by a comma.
preferred-modules=
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp,
__post_init__
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=cls
[DESIGN]
# Maximum number of arguments for function / method.
max-args=5
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=5
# Maximum number of branch for function / method body.
max-branches=12
# Maximum number of locals for function / method body.
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body.
max-returns=6
# Maximum number of statements in function / method body.
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "BaseException, Exception".
overgeneral-exceptions=BaseException,
Exception
FROM ubuntu
RUN mkdir -p /code
COPY requirements.txt /code
WORKDIR /code
RUN apt update && apt install python3 python3-flask sqlite python3-requests python3-lxml python3-ldap3 python3-icalendar python3-mysql.connector locales -y
RUN locale-gen de_DE.utf8
RUN apt install git -y
# Install uwsgi
RUN apt update && apt install python3-pip -y
RUN pip3 install uwsgi
# Install pylint
RUN pip3 install pylint
# Install nginx
RUN apt install nginx -y
COPY . /code
CMD ["bash", "/code/docker_start.sh"]
......@@ -6,7 +6,7 @@ Hinweis: diese Variante startet eine lokale Testversion der Website, es sind nic
1. Repo Clonen
2. Verzeichnis betreten
3. (optional) config.py.example anpassen und als config.py neu speichern
3. (optional) config.py.example anpassen und als config.py neu speichern (z.B. DEBUG = True)
4. Schauen, ob alle Dependencies erfüllt sind (siehe weiter unten)
5. `./run.py` ausführen
6. Unter [http://localhost:5000](http://localhost:5000) ist die Website verfügbar
......@@ -15,9 +15,9 @@ Hinweis: diese Variante startet eine lokale Testversion der Website, es sind nic
Alternativ, insbesondere zum Testen der Zugriffsbeschränkungen: Siehe `nginx.example.conf`.
### Unittests
Tests können mittels `./tests.py` ausgeführt werden.
Tests können mittels `./run_tests.py` ausgeführt werden.
Coverage Tests können mittels `rm .coverage; python -m coverage run tests.py; python -m coverage html` ausgeführt werden. Dies erstellt einen Ordner `htmlcov` in dem HTML Output liegt.
Coverage Tests können mittels `rm .coverage; python -m coverage run run_tests.py; python -m coverage html` ausgeführt werden. Dies erstellt einen Ordner `htmlcov` in dem HTML Output liegt.
### Zum Mitmachen:
1. Repo für den eigenen User forken, dafür den "Fork-Button" auf der Website verwenden
......@@ -35,17 +35,41 @@ Origin stellt hier euren User da, Upstream das Original der Gruppe videoagwebsit
### Abhängigkeiten
Notwendig:
* python (Version 3)
* sqlite
* sqlite3 (Python builtin)
* python-flask
* python-requests (wird vom L2P und vom Kalenderimport verwendet, kann nicht optional eingebunden werden)
* git (zum Anzeigen der aktuellen Version)
Optional (wird für einzelne Features benötigt):
* python-lxml (Campus Import)
* python-ldap (Login mit Fachschaftsaccount)
* python-icalendar (SoGo-Kalenderimport für Sitzungsankündigungen)
* python-lxml (Campus- und RO-Import)
* python-pytz (RO-Import)
* python-ldap3 (Login mit Fachschaftsaccount)
* python-icalendar (RO-Import, Kalenderimport für Sitzungsankündigungen)
* python-mysql-connector (wenn MySQL als Datenbank verwendet werden soll)
* python-coverage (Für Coverage Tests benötigt)
Kurzform unter Ubuntu:
`sudo apt install python3 python3-flask sqlite python3-requests python3-lxml python3-ldap3 python3-icalendar python3-mysql.connector`
Mit python-eigenem Paketmanager:
`pip install -r requirements.txt`
---
### Alternative: Docker Image
Alternativ zu vorigem Setup kann zum lokalen Testen Docker verwendet werden, um die Testversion zu starten:
1. Lokal das Image erstellen mittels `docker build -t videoag .` in diesem Ordner.
2. Einen entsprechenden Container starten, z.B.: `docker run --rm --name=videoag -p 5000:5000 videoag`
- `--rm` löscht den Container nach dessen Terminierung
- `-p 5000:5000` mappt den Port, damit der Host auf die Webseite zugreifen kann. Nicht den lokalen Port ändern, da ansonsten ggf. Thumbnails oder Videos nicht mehr geladen werden können.
- Zusätzlich kann mittels `-v /lokaler/pfad/:/code` der Source-Code im Container mit dem Host gemounted werden.
3. Webseite unter `localhost:5000` besuchen.
In dieser Variante sollte in der `config.py` folgendes gesetzt werden:
```
SERVER_IP = 'localhost'
VIDEOPREFIX = '/files'
```
......@@ -2,7 +2,7 @@ import json
from server import *
@job_handler('probe', 'probe-raw')
def import_xmp_chapters(jobid, jobtype, data, state, status):
def import_xmp_chapters(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
if 'lecture_id' not in data or not data.get('import-chapters', False):
return
times = set()
......@@ -14,19 +14,19 @@ def import_xmp_chapters(jobid, jobtype, data, state, status):
for chapter in status.get('xmp_chapters', []):
if int(chapter['time']) in times:
continue
modify('INSERT INTO chapters (lecture_id, time, text, visible, time_created, time_updated) VALUES (?, ?, ?, 0, ?, ?)',
modify(
'INSERT INTO chapters (lecture_id, time, text, visible, time_created, time_updated) VALUES (?, ?, ?, false, ?, ?)',
data['lecture_id'], int(chapter['time']), chapter['text'],
datetime.now(), datetime.now())
datetime.now(), datetime.now()
)
@app.route('/internal/newchapter/<int:lectureid>', methods=['POST', 'GET'])
def suggest_chapter(lectureid):
time = request.values['time']
text = request.values['text']
assert(time and text)
assert 'time' in request.values and 'text' in request.values
try:
x = datetime.strptime(time,'%H:%M:%S')
time= timedelta(hours=x.hour,minutes=x.minute,seconds=x.second).total_seconds()
time = int(time)
parsed_datetime = datetime.strptime(request.values['time'], '%H:%M:%S')
chapter_start = int(timedelta(hours=parsed_datetime.hour, minutes=parsed_datetime.minute, seconds=parsed_datetime.second).total_seconds())
except ValueError:
if 'ref' in request.values:
flash('Falsches Zeitformat, "%H:%M:%S" wird erwartet. Z.B. "01:39:42" für eine Kapitel bei Stunde 1, Minute 39, Sekunde 42')
......@@ -38,9 +38,13 @@ def suggest_chapter(lectureid):
submitter = request.environ['REMOTE_ADDR']
lecture = query('SELECT * FROM lectures WHERE id = ?', lectureid)[0]
course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0]
id = modify('INSERT INTO chapters (lecture_id, time, text, time_created, time_updated, created_by, submitted_by) VALUES (?, ?, ?, ?, ?, ?, ?)',
lectureid, time, text, datetime.now(), datetime.now(), session.get('user', {'dbid':None})['dbid'], submitter)
id = modify(
'INSERT INTO chapters (lecture_id, time, text, time_created, time_updated, created_by, submitted_by) VALUES (?, ?, ?, ?, ?, ?, ?)',
lectureid, chapter_start, text, datetime.now(), datetime.now(), session.get('user', {'dbid':None})['dbid'], submitter,
get_id=True
)
chapter = query('SELECT * FROM chapters WHERE id = ?', id)[0]
if not ismod():
notify_mods('chapter_submitted', course['id'], course=course, lecture=lecture, chapter=chapter)
if 'ref' in request.values:
return redirect(request.values['ref'])
......@@ -52,10 +56,10 @@ def chapters(lectureid):
if not chapters:
return 'No chapters found', 404
last = None
for c in chapters:
c['start'] = c['time']
c['end'] = last['start'] if last else 9999
last = c
for chapter in chapters:
chapter['start'] = chapter['time']
chapter['end'] = last['start'] if last else 9999 #pylint: disable=unsubscriptable-object
last = chapter
if 'json' in request.values:
return Response(json.dumps([{'time': c['time'], 'text': c['text']} for c in chapters]), mimetype='application/json')
return Response(render_template('chapters.srt', chapters=chapters), 200, {'Content-Type':'text/vtt'})
# Defaults for development ,do not use in production!
DEBUG = False
SERVER_IP = 'localhost'
VIDEOPREFIX = 'https://videoag.fsmpi.rwth-aachen.de'
VIDEOPREFIX = '/files'
VIDEOMOUNT = [{'mountpoint': 'files/protected/', 'prefix':'protected/'},{'mountpoint':'files/pub/','prefix':'pub/' }, {'mountpoint':'files/vpnonline/','prefix':'vpnonline/' }]
#SECRET_KEY = 'something random'
......@@ -16,6 +16,13 @@ DB_DATA = 'db_example.sql'
#MYSQL_PASSWD = 'somuchsecret'
#MYSQL_DB = 'videos'
#DB_ENGINE = 'postgres'
POSTGRES_HOST = '10.0.0.101'
POSTGRES_PORT = 5432
POSTGRES_USER = 'videoag'
POSTGRES_PASSWORD = ''
POSTGRES_DATABASE = 'videoag'
DB_ENGINE = 'sqlite'
SQLITE_DB = 'db.sqlite'
SQLITE_INIT_SCHEMA = True
......@@ -29,7 +36,8 @@ LDAP_GROUPS = ['fachschaft']
#ICAL_URL = 'https://user:password@mail.fsmpi.rwth-aachen.de/SOGo/....ics'
ERROR_PAGE = 'static/500.html'
RWTH_IP_RANGES = ['134.130.0.0/16', '137.226.0.0/16', '134.61.0.0/16', '192.35.229.0/24', '2a00:8a60::/32']
FSMPI_IP_RANGES = ['137.226.35.192/29', '137.226.75.0/27', '137.226.127.32/27', '137.226.231.192/26', '134.130.102.0/26' ]
FSMPI_IP_RANGES = ['137.226.35.192/29', '137.226.75.0/27', '137.226.127.32/27', '137.226.231.192/26', '134.130.102.0/26', '127.0.0.1/32']
INTERNAL_IP_RANGES = ['127.0.0.0/8', '192.168.155.0/24', 'fd78:4d90:6fe4::/48']
DISABLE_SCHEDULER = False
#MAIL_SERVER = 'mail.fsmpi.rwth-aachen.de'
MAIL_FROM = 'Video AG-Website <videoag-it@lists.fsmpi.rwth-aachen.de>'
......@@ -37,3 +45,5 @@ MAIL_FROM = 'Video AG-Website <videoag-it@lists.fsmpi.rwth-aachen.de>'
MAIL_SUFFIX = 'fsmpi.rwth-aachen.de'
MAIL_DEFAULT = 'Video AG <videoag@fsmpi.rwth-aachen.de>'
MAIL_ADMINS = 'videoag-it@lists.fsmpi.rwth-aachen.de'
STREAMING_SERVER = 'rtmp://video-web-0.fsmpi.rwth-aachen.de/src/'
BACKUP_STREAMING_SERVER = 'rtmp://video-web-1.fsmpi.rwth-aachen.de/src/'
from server import *
from datetime import time
@register_navbar('Schnittfortschritt User', icon='spinner', iconlib='fa', userendpoint=True, endpoint='cutprogress_user')
@register_navbar('Schnittfortschritt', icon='spinner', iconlib='fa')
......@@ -7,22 +6,85 @@ from datetime import time
@app.route('/internal/user/<int:user>/cutprogress', endpoint='cutprogress_user')
@mod_required
def cutprogress(user=None):
allsemester = query('SELECT DISTINCT semester from courses ORDER BY semester DESC');
allsemester = query('SELECT DISTINCT semester from courses ORDER BY semester DESC')
semester = request.values.get('semester', allsemester[0]['semester'])
coursesraw = query('SELECT courses.id, courses.handle, courses.short FROM courses WHERE semester = ?', semester)
courses = []
maxlecturecount = 0
for course in coursesraw:
course['lectures'] = query('SELECT lectures.title, lectures.time, lectures.id FROM lectures WHERE lectures.course_id= ? AND NOT lectures.deleted AND NOT lectures.norecording ORDER BY lectures.time', course['id'])
for lecture in course['lectures']:
lecture['videos'] = query('SELECT videos.path, formats.description as formatdesc, videos.visible FROM videos JOIN formats ON (videos.video_format = formats.id) WHERE videos.lecture_id = ? AND NOT videos.deleted', lecture['id'])
course['responsible'] = query('''SELECT users.*
FROM responsible
JOIN users ON (responsible.user_id = users.id AND responsible.course_id = ?)
ORDER BY users.realname ASC''', course['id'])
if len(course['responsible']) == 0:
course['responsible'] = [{"realname": "Niemand", "id": -1}]
if not user or user in [ r['id'] for r in course['responsible'] ]:
courses.append(course)
maxlecturecount = max(len(course['lectures']),maxlecturecount)
return render_template('cutprogress.html', allsemester=allsemester, semester=semester, courses=courses, maxlecturecount=maxlecturecount, user=query('SELECT * FROM users WHERE id = ?', user)[0] if user else None)
courses = query('''
SELECT courses.id, courses.handle, courses.short
FROM courses
WHERE semester = ?
ORDER by id DESC
''', semester)
# Fetch list of people responsible for every course
for course in courses:
people = query('''
SELECT users.*
FROM users
JOIN responsible ON responsible.user_id = users.id
WHERE responsible.course_id = ?
ORDER BY users.realname ASC
''', course['id'])
if not people:
people = [{'realname': 'Niemand', 'id': -1}]
course['responsible'] = people
if user is not None:
courses = [
c for c in courses
if user in (r['id'] for r in c['responsible'])
]
# Fetch lectures for all courses
lectures = []
for course in courses:
lectures += query('''
SELECT
lectures.id,
lectures.course_id,
lectures.time,
lectures.title,
COALESCE(video_counts.videos_total, 0),
COALESCE(video_counts.videos_visible, 0)
FROM lectures
JOIN courses ON ( courses.id = lectures.course_id )
LEFT JOIN (
SELECT
videos.lecture_id,
COUNT(videos.id) as videos_total,
COUNT(videos.visible) as videos_visible
FROM videos
GROUP BY videos.lecture_id
) AS video_counts ON ( video_counts.lecture_id = lectures.id )
WHERE courses.id = ?
AND lectures.time <= ?
AND NOT lectures.norecording
ORDER BY lectures.time ASC, lectures.id ASC
''', course['id'], datetime.now())
# Generate list of days, figure out when weeks change
dates = sorted({row['time'].date() for row in lectures}, reverse=True)
is_new_weeks = [
False if (i == 0) else thisdate.isocalendar()[1] != dates[i-1].isocalendar()[1]
for i, thisdate in enumerate(dates)
]
# Sort into cells
tablebody = [
{
'date': date, # row header
'is_new_week': is_new_week,
'cells': [ # this is the body of the row
[ # this list is a cell
lecture
for lecture in lectures
if lecture['course_id'] == course['id'] and lecture['time'].date() == date
]
for course in courses
]
}
for date, is_new_week in zip(dates, is_new_weeks)
]
return render_template('cutprogress.html',
# dropdown selection
allsemester=allsemester, # options
semester=semester, # choice
user=query('SELECT * FROM users WHERE id = ?', user)[0] if user else None,
# content
courses=courses,
tablebody=tablebody
)
import sqlite3
from flask import g
from server import *
if config['DB_ENGINE'] == 'sqlite':
import sqlite3
# From sqlite3 module, but with error catching
def convert_timestamp(val):
try:
......@@ -19,13 +20,13 @@ if config['DB_ENGINE'] == 'sqlite':
sqlite3.register_converter('timestamp', convert_timestamp)
if config['DB_ENGINE'] == 'sqlite':
created = not os.path.exists(config['SQLITE_DB'])
DBCREATED = not os.path.exists(config['SQLITE_DB'])
db = sqlite3.connect(config['SQLITE_DB'])
cur = db.cursor()
if config['SQLITE_INIT_SCHEMA']:
print('Init db schema')
cur.executescript(open(config['DB_SCHEMA']).read())
if config['SQLITE_INIT_DATA'] and created:
if config['SQLITE_INIT_DATA'] and DBCREATED:
print('Init db data')
cur.executescript(open(config['DB_DATA']).read())
db.commit()
......@@ -43,15 +44,21 @@ if config['DB_ENGINE'] == 'sqlite':
params = [(p.replace(microsecond=0) if isinstance(p, datetime) else p) for p in params]
return operation, params
def show(operation, host=None):
def show(operation, host=None): #pylint: disable=unused-argument
return {}
elif config['DB_ENGINE'] == 'mysql':
import mysql.connector
def get_dbcursor():
if 'db' not in g or not g.db.is_connected():
g.db = mysql.connector.connect(user=config['MYSQL_USER'], password=config['MYSQL_PASSWD'], host=config.get('MYSQL_HOST', None), port=config.get('MYSQL_PORT', 3306), unix_socket=config.get('MYSQL_UNIX', None), database=config['MYSQL_DB'])
g.db = mysql.connector.connect(
user=config['MYSQL_USER'],
password=config['MYSQL_PASSWD'],
host=config.get('MYSQL_HOST', None),
port=config.get('MYSQL_PORT', 3306),
unix_socket=config.get('MYSQL_UNIX', None),
database=config['MYSQL_DB'])
g.db.cmd_query("SET SESSION sql_mode = 'ANSI_QUOTES'")
if not hasattr(request, 'db'):
request.db = g.db.cursor()
return request.db
......@@ -71,8 +78,8 @@ elif config['DB_ENGINE'] == 'mysql':
rows = []
try:
rows = cur.fetchall()
except mysql.connector.errors.InterfaceError as ie:
if ie.msg == 'No result set to fetch from.':
except mysql.connector.errors.InterfaceError as e:
if e.msg == 'No result set to fetch from.':
# no problem, we were just at the end of the result set
pass
else:
......@@ -83,26 +90,50 @@ elif config['DB_ENGINE'] == 'mysql':
cur.close()
db.close()
return res
elif config['DB_ENGINE'] == 'postgres':
import psycopg2 # pylint: disable=import-error
def get_dbcursor():
if 'db' not in g or g.db.closed:
g.db = psycopg2.connect(
host=config["POSTGRES_HOST"],
port=config["POSTGRES_PORT"],
user=config["POSTGRES_USER"],
password=config["POSTGRES_PASSWORD"],
dbname=config["POSTGRES_DATABASE"]
)
if not hasattr(request, 'db'):
request.db = g.db.cursor()
return request.db
def fix_query(operation, params):
operation = operation.replace('?', '%s')
params = [(p.replace(microsecond=0) if isinstance(p, datetime) else p) for p in params]
return operation, params
def show(operation, host=None): #pylint: disable=unused-argument
return {}
def query(operation, *params, delim="sep", nlfix=True):
operation, params = fix_query(operation, params)
tries = 0
while (tries < 10):
retry = True
while tries < 10 and retry:
retry = False
try:
cur = get_dbcursor()
cur.execute(operation, params)
except mysql.connector.errors.InternalError as e:
if e.msg == 'Deadlock found when trying to get lock; try restarting transaction':
except Exception as e: # pylint: disable=broad-except
if str(e) == 'Deadlock found when trying to get lock; try restarting transaction':
tries += 1
continue
retry = True
else:
raise
break
rows = []
try:
rows = cur.fetchall()
except mysql.connector.errors.InterfaceError as ie:
if ie.msg == 'No result set to fetch from.':
except Exception as e: # pylint: disable=broad-except
if str(e) == 'no results to fetch' or str(e) == "the last operation didn't produce a result":
# no problem, we were just at the end of the result set
pass
else:
......@@ -116,42 +147,34 @@ def query(operation, *params, delim="sep", nlfix=True):
if name == delim:
ptr = res[-1][col] = {}
continue
if type(col) == str and nlfix:
if isinstance(col, str) and nlfix:
col = col.replace('\\n', '\n').replace('\\r', '\r')
ptr[name] = col
return res
def modify(operation, *params):
def modify(operation, *params, get_id=False):
operation, params = fix_query(operation, params)
if get_id and config["DB_ENGINE"] == "postgres":
operation += " RETURNING id" # Not nice, but works for now
cur = get_dbcursor()
cur.execute(operation, params)
if not get_id:
return None
if config["DB_ENGINE"] != "postgres":
return cur.lastrowid
all_res = cur.fetchall()
if len(all_res) <= 0:
raise ValueError("Got no id")
return int(all_res[0][0])
@app.teardown_request
def commit_db(*args):
def commit_db(*args): #pylint: disable=unused-argument
if hasattr(request, 'db'):
request.db.close()
g.db.commit()
@app.teardown_appcontext
def close_db(*args):
def close_db(*args): #pylint: disable=unused-argument
if 'db' in g:
g.db.close()
del g.db
def searchquery(text, columns, match, tables, suffix, *suffixparams):
params = []
subexprs = []
words = text.split(' ')
prio = len(words)+1
for word in words:
if word == '' or word.isspace():
continue
matchexpr = ' OR '.join(['%s LIKE ?'%column for column in match])
subexprs.append('SELECT %s, %s AS _prio FROM %s WHERE %s'%(columns, str(prio), tables, matchexpr))
params += ['%'+word+'%']*len(match)
prio -= 1
if subexprs == []:
return []
expr = 'SELECT *,SUM(_prio) AS _score FROM (%s) AS _tmp %s'%(' UNION '.join(subexprs), suffix)
return query(expr, *(list(params)+list(suffixparams)))
......@@ -8611,6 +8611,7 @@ INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,
INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`) VALUES (9681,7012,1,0,1,'','','','pub/16ss-dsal/16ss-dsal-160715-1080p_1.mp4','2016-08-07 22:54:46','2016-08-07 21:02:37','2016-08-07 21:02:43',46,1402602183,4,'e036f7cbd51afd3ab7be10cf77747c00');
INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`) VALUES (9682,7012,1,0,1,'','','','pub/16ss-dsal/16ss-dsal-160715-360p_1.mp4','2016-08-07 22:45:34','2016-08-07 21:02:38','2016-08-07 21:02:45',46,368611109,10,'fae2bda2da55a3005aa6329a2d0227c3');
INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`) VALUES (9683,7012,1,0,1,'','','','pub/16ss-dsal/16ss-dsal-160715-720p_1.mp4','2016-08-07 22:46:00','2016-08-07 21:02:40','2016-08-07 21:02:44',46,721141077,5,'083c0b7693c82078c513707d1402096b');
INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`, `source`) VALUES (16080,7012,1,0,1,'','','','pub/17ws-cgbp/17ws-cgbp-171114-720p.mp4','2018-01-12 04:36:44','2018-01-12 04:36:44','2018-01-12 04:36:44',-1,607257928,5,"8fa956b14162ec42c1dabc11d53671c5",89);
INSERT INTO `users` (`id`,`name`,`realname`,`level`,`fsacc`,`last_login`,`calendar_key`,`rfc6238`) VALUES (1,'gustav1','Gustav Geier',0,'',NULL,'','');
INSERT INTO `users` (`id`,`name`,`realname`,`level`,`fsacc`,`last_login`,`calendar_key`,`rfc6238`) VALUES (2,'gustav2','Gustav Geier',0,'',NULL,'','');
INSERT INTO `users` (`id`,`name`,`realname`,`level`,`fsacc`,`last_login`,`calendar_key`,`rfc6238`) VALUES (4,'gustav4','Gustav Geier',0,'',NULL,'','');
......@@ -14646,4 +14647,5 @@ INSERT INTO `areas` (`area`,`abbreviation`,`default`,`rank`,`coordinates`) VALUE
INSERT INTO `profiles` (`name`,`format`) VALUES ('default',4);
INSERT INTO `profiles` (`name`,`format`) VALUES ('default',5);
INSERT INTO `profiles` (`name`,`format`) VALUES ('default',10);
INSERT INTO `sources` (`id`, `lecture_id`, `path`, `type`, `hash`, `time_created`) VALUES (89, 7012, 'autoencode/something', 'plain', '000000000', '2024-01-01 00:00:00');
COMMIT;
......@@ -47,7 +47,7 @@ CREATE TABLE IF NOT EXISTS `courses_data` (
`deleted` INTEGER NOT NULL DEFAULT '0',
`title` text NOT NULL DEFAULT '',
`short` varchar(32) NOT NULL DEFAULT '',
`handle` varchar(32) NOT NULL DEFAULT '',
`handle` varchar(32) NOT NULL,
`organizer` text NOT NULL DEFAULT '',
`subject` varchar(32) NOT NULL DEFAULT '',
`credits` INTEGER NOT NULL DEFAULT '0',
......@@ -66,7 +66,8 @@ CREATE TABLE IF NOT EXISTS `courses_data` (
`coursechapters` INTEGER NOT NULL DEFAULT 0,
`autopublish` INTEGER NOT NULL DEFAULT 0,
`autovisible` INTEGER NOT NULL DEFAULT 0,
`profile` varchar(64) NOT NULL DEFAULT 'default'
`profile` varchar(64) NOT NULL DEFAULT 'default',
`login_info` text NOT NULL DEFAULT ''
);
CREATE TABLE IF NOT EXISTS `filesizes` (
`path` varchar(255) NOT NULL PRIMARY KEY,
......@@ -107,7 +108,9 @@ CREATE TABLE IF NOT EXISTS `lectures_data` (
`titlefile` varchar(255) NOT NULL DEFAULT '',
`live` INTEGER NOT NULL DEFAULT 0,
`norecording` INTEGER NOT NULL DEFAULT 0,
`profile` varchar(64)
`profile` varchar(64),
`stream_settings` text NOT NULL DEFAULT '',
`stream_job` INTEGER
);
CREATE TABLE IF NOT EXISTS `places` (
`place` varchar(20) NOT NULL PRIMARY KEY,
......@@ -153,6 +156,7 @@ CREATE TABLE IF NOT EXISTS `log` (
CREATE TABLE IF NOT EXISTS `hlslog` (
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
`time` datetime NOT NULL,
`segment` INTEGER,
`source` INTEGER,
`lecture` INTEGER,
`handle` varchar(32),
......@@ -179,6 +183,22 @@ CREATE TABLE IF NOT EXISTS `streams` (
`poster` text NOT NULL,
`job_id` INTEGER
);
CREATE TABLE IF NOT EXISTS `live_sources` (
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
`key` varchar(32) UNIQUE,
`preview_key` varchar(32),
`name` text NOT NULL,
`description` text NOT NULL DEFAULT '',
`options` text NOT NULL DEFAULT '',
`server` varchar(32),
`server_public` varchar(32),
`clientid` INTEGER,
`last_active` datetime,
`time_created` datetime NOT NULL,
`time_updated` datetime NOT NULL,
`created_by` INTEGER NOT NULL,
`deleted` INTEGER NOT NULL DEFAULT '0'
);
CREATE TABLE IF NOT EXISTS `stream_stats` (
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
`handle` varchar(32) NOT NULL,
......@@ -240,8 +260,8 @@ CREATE TABLE IF NOT EXISTS `announcements` (
`level` INTEGER NOT NULL DEFAULT 0,
`visible` INTEGER NOT NULL DEFAULT 0,
`deleted` INTEGER NOT NULL DEFAULT 0,
`time_publish` datetime DEFAULT '',
`time_expire` datetime DEFAULT '',
`time_publish` datetime,
`time_expire` datetime,
`time_created` datetime NOT NULL,
`time_updated` datetime NOT NULL,
`created_by` INTEGER NOT NULL
......@@ -251,7 +271,7 @@ CREATE TABLE IF NOT EXISTS `featured` (
`title` text NOT NULL DEFAULT '',
`text` text NOT NULL DEFAULT '',
`internal` text NOT NULL DEFAULT '',
`type` varchar(32) NOT NULL DEFAULT '',
`type` varchar(32) NOT NULL,
`param` text NOT NULL DEFAULT '',
`param2` text NOT NULL DEFAULT '',
`order` INTEGER DEFAULT NULL,
......
#!/bin/bash
# This file is executed when the docker container starts!
cd /code;
nginx -c nginx.conf.example -p . &
# Use -C argument to tell uwsgi to chmod 666 /uswgi.sock
exec uwsgi -C -s uwsgi.sock --manage-script-name --mount /=server:app --plugin python --enable-threads
import math
from server import *
# field types:
......@@ -7,7 +9,8 @@ from server import *
# datetime
# duration
# videotime
editable_tables = {
editable_tables = { #pylint: disable=invalid-name
'courses': {
'table': 'courses_data',
'idcolumn': 'id',
......@@ -28,7 +31,9 @@ editable_tables = {
'external': {'type': 'boolean', 'description': 'Soll die Veranstaltung nicht im Drehplan angezeigt werden?'},
'coursechapters': {'type': 'boolean', 'description': 'Sollen auf der Kursseite die Kapitelmarker der Videos angezeigt werden?'},
'autopublish': {'type': 'boolean', 'description': 'Sollen encodete Videos automatisch verschoben werden?'},
'autovisible': {'type': 'boolean', 'description': 'Sollen neue Videos automatisch sichtbar sein?'}},
'autovisible': {'type': 'boolean', 'description': 'Sollen neue Videos automatisch sichtbar sein?'},
'login_info': {'type': 'text', 'description': 'Zusätliche Informationen, die dem Nutzer angezeigt werden, wenn er sich anmelden muss.'}
},
'creationtime_fields': ['created_by', 'time_created', 'time_updated']},
'lectures': {
'table': 'lectures_data',
......@@ -45,7 +50,9 @@ editable_tables = {
'jumplist': {'type': ''},
'deleted': {'type': 'boolean'},
'live': {'type': 'boolean', 'description': 'Ist ein Livestream geplant? Muss gesetzt sein damit der RTMP Stream zugeordnet wird.'},
'norecording': {'type': 'boolean', 'description:': 'Führt dazu, dass der Termin ausgegraut wird.'}},
'norecording': {'type': 'boolean', 'description': 'Führt dazu, dass der Termin ausgegraut wird.'},
'stream_settings': {'type': 'text'}
},
'creationtime_fields': ['course_id', 'time_created', 'time_updated']},
'videos': {
'table': 'videos_data',
......@@ -111,7 +118,16 @@ editable_tables = {
'notify_new_video': {'type': 'boolean'},
'notify_edit': {'type': 'boolean'}
},
'creationtime_fields': [] }
'creationtime_fields': []},
'live_sources': {
'table': 'live_sources',
'idcolumn': 'id',
'editable_fields': {
'name': {'type': 'shortstring'},
'description': {'type': 'text'},
'deleted': {'type': 'boolean'}
},
'creationtime_fields': ['created_by', 'time_created', 'time_updated']}
}
#parses the path to a dict, containing the table, id, field and field type
......@@ -124,25 +140,31 @@ def parseeditpath(path):
return {'table': table, 'id': id, 'column': column, 'type': type, 'tableinfo': editable_tables[table]}
@app.template_filter(name='getfielddescription')
def getfielddescription(path):
p = parseeditpath(path)
desc = p['tableinfo']['editable_fields'][p['column']].get('description', '')
def getfielddescription(inputpath):
path = parseeditpath(inputpath)
desc = path['tableinfo']['editable_fields'][path['column']].get('description', '')
if desc != '':
desc = '<br>'+desc
return desc
@app.template_filter(name='getfieldchangelog')
def getfieldchangelog(path):
p = parseeditpath(path)
changelog = query('SELECT * FROM changelog LEFT JOIN users ON (changelog.who = users.id) WHERE `table` = ? AND `id_value` = ? and `field` = ? ORDER BY `when` DESC LIMIT 5', p['table'], p['id'], p['column'])
def getfieldchangelog(inputpath):
path = parseeditpath(inputpath)
changelog = query('SELECT * FROM changelog \
LEFT JOIN users ON (changelog.who = users.id) WHERE "table" = ? AND "id_value" = ? and "field" = ? \
ORDER BY "when" DESC LIMIT 5', path['table'], path['id'], path['column'])
for entry in changelog:
entry['id_value'] = str(entry['id_value'])
entry['value_new'] = str(entry['value_new'])
entry['path'] = '.'.join([entry['table'], entry['id_value'], entry['field']])
return changelog
@app.route('/internal/edit', methods=['GET', 'POST'])
@mod_required
@csrf_protect
def edit(prefix='', ignore=[]):
def edit(prefix='', ignore=None):
if not ignore:
ignore = []
# All editable tables are expected to have a 'time_updated' field
ignore.append('ref')
ignore.append('prefix')
......@@ -157,10 +179,24 @@ def edit(prefix='', ignore=[]):
continue
key = prefix+key
path = parseeditpath(key)
modify('INSERT INTO changelog (`table`,id_value, id_key, field, value_new, value_old, `when`, who, executed) \
VALUES (?,?,?,?,?,(SELECT `%s` FROM %s WHERE %s = ?),?,?,1)'%(path['column'], path['tableinfo']['table'], path['tableinfo']['idcolumn']),
path['table'], path['id'], path['tableinfo']['idcolumn'], path['column'], val, path['id'], datetime.now(), session['user']['dbid'])
modify('UPDATE %s SET `%s` = ?, time_updated = ? WHERE `%s` = ?'%(path['tableinfo']['table'], path['column'], path['tableinfo']['idcolumn']), val, datetime.now(),path['id'])
modify('INSERT INTO changelog \
("table",id_value, id_key, field, value_new, value_old, "when", who, executed) \
VALUES (?,?,?,?,?, \
(SELECT "%s" FROM %s WHERE %s = ?),?,?,true)'%(
path['column'],
path['tableinfo']['table'],
path['tableinfo']['idcolumn']
),
path['table'],
path['id'],
path['tableinfo']['idcolumn'],
path['column'],
val,
path['id'],
datetime.now(),
session['user']['dbid'])
modify('UPDATE %s SET "%s" = ?, time_updated = ? WHERE "%s" = ?'%(path['tableinfo']['table'], path['column'], path['tableinfo']['idcolumn']),
val, datetime.now(), path['id'])
for func in edit_handlers.get(path['table'], {}).get(None, []):
func(path['table'], path['column'], val, path['id'], session['user']['dbid'])
for func in edit_handlers.get(path['table'], {}).get(path['column'], []):
......@@ -185,14 +221,16 @@ def create(table):
if (request.method == 'POST') and (request.get_json()):
args = request.get_json().items()
for column, val in args:
if (column == 'ref') or (column == '_csrf_token'):
if column in ['ref', '_csrf_token']:
continue
assert column in list(editable_tables[table]['editable_fields'].keys())+editable_tables[table]['creationtime_fields']
assert column not in defaults
columns.append('`'+column+'`')
columns.append('"'+column+'"')
values.append(val)
assert editable_tables[table]['idcolumn'] == 'id'
id = modify('INSERT INTO %s (%s) VALUES (%s)'%(editable_tables[table]['table'],
','.join(columns), ','.join(['?']*len(values))), *values)
','.join(columns), ','.join(['?']*len(values))), *values,
get_id=True)
if table == 'courses':
set_responsible(id, session['user']['dbid'], 1)
if 'ref' in request.values:
......@@ -203,15 +241,9 @@ def create(table):
@register_navbar('Changelog', icon='book', group='weitere')
@mod_required
def changelog():
if 'page' in request.args:
page = max(0, int(request.args['page']))
else:
page = 0
if 'pagesize' in request.args:
pagesize = min(500, int(request.args['pagesize']))
else:
pagesize = 50
changelog = query('SELECT * FROM changelog LEFT JOIN users ON (changelog.who = users.id) ORDER BY `when` DESC LIMIT ? OFFSET ?', pagesize, page*pagesize)
page = max(0, int(request.args.get('page', 0)))
pagesize = min(500, int(request.args.get('pagesize', 50)))
changelog = query('SELECT * FROM changelog LEFT JOIN users ON (changelog.who = users.id) ORDER BY "when" DESC LIMIT ? OFFSET ?', pagesize, page*pagesize)
pagecount = math.ceil(query('SELECT count(id) as count FROM changelog')[0]['count']/pagesize)
for entry in changelog:
entry['path'] = '.'.join([entry['table'], entry['id_value'], entry['field']])
......@@ -223,12 +255,13 @@ def changelog():
@csrf_protect
def set_responsible(course_id, user_id, value):
if value:
modify('REPLACE INTO responsible (course_id, user_id) values (?, ?)', course_id, user_id);
if not query('SELECT id FROM responsible WHERE course_id = ? AND user_id = ?', course_id, user_id):
modify('INSERT INTO responsible (course_id, user_id) VALUES (?, ?)', course_id, user_id)
else:
modify('DELETE FROM responsible WHERE course_id = ? AND user_id = ?', course_id, user_id);
modify('DELETE FROM responsible WHERE course_id = ? AND user_id = ?', course_id, user_id)
return "OK", 200
edit_handlers = {}
edit_handlers = {} #pylint: disable=invalid-name
def edit_handler(*tables, field=None):
def wrapper(func):
for table in tables:
......
from server import *
import os.path
import json
from server import *
from sorter import insert_video
from edit import edit_handler
def set_metadata(dest, course, lecture):
chapters = query('SELECT text, time FROM chapters WHERE lecture_id = ? AND visible ORDER BY time', lecture['id'])
......@@ -10,6 +14,14 @@ def set_metadata(dest, course, lecture):
dest['metadata'] = metadata
dest['chapters'] = chapters
# Incomplete and not enabled currently
#def schedule_intro(lectureid):
# lecture = query('SELECT * FROM lectures where id = ?', lectureid)
# course = query('SELECT * FROM course where id = ?', lecture['course_id'])
# data = {'path': path, 'lecture_id': lectureid}
# set_metadata(data, course, lecture)
# schedule_job('intro', data)
def schedule_remux(lectureid, videoid=None):
lecture = query('SELECT * FROM lectures WHERE id = ?', lectureid)[0]
course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0]
......@@ -46,8 +58,8 @@ def add_remux_job():
def schedule_transcode(source, fmt_id=None, video=None):
if video:
fmt_id = video['video_format']
assert(video['lecture_id'] == source['lecture_id'])
assert(fmt_id != None)
assert video['lecture_id'] == source['lecture_id']
assert fmt_id is not None
fmt = query('SELECT * FROM formats WHERE id = ?', fmt_id)[0]
lecture = query('SELECT * FROM lectures WHERE id = ?', source['lecture_id'])[0]
course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0]
......@@ -58,7 +70,7 @@ def schedule_transcode(source, fmt_id=None, video=None):
stream = {'name': 'audio', 'type': 'audio'}
data['input']['streams'].append(stream)
else:
assert(False)
assert False
set_metadata(data['output'], course, lecture)
basename = os.path.basename(source['path']).rsplit('.', 1)[0]
data['output']['path'] = 'pub/'+course['handle']+'/'+basename+fmt['suffix']
......@@ -72,7 +84,23 @@ def schedule_transcode(source, fmt_id=None, video=None):
data['lecture_id'] = lecture['id']
data['format_id'] = fmt['id']
data['source_id'] = source['id']
schedule_job('transcode', data, queue="background")
return schedule_job('transcode', data, queue="background")
@job_handler('transcode')
def insert_transcoded_video(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
if 'lecture_id' not in data or 'source_id' not in data or 'format_id' not in data:
return
if 'video_id' in data:
return
video_id = insert_video(
data['lecture_id'],
data['output']['path'],
data['format_id'],
status['hash'],
status['filesize'],
status['duration'],
data['source_id'])
schedule_remux(data['lecture_id'], video_id)
@app.route('/internal/jobs/add/reencode', methods=['GET', 'POST'])
@mod_required
......@@ -87,10 +115,12 @@ def add_reencode_job():
schedule_transcode(source, video=video)
return redirect(request.values.get('ref', url_for('jobs_overview')))
@job_handler('probe-raw')
def update_lecture_videos(jobid, jobtype, data, state, status):
@job_handler('probe-raw', 'intro')
def update_lecture_videos(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
# info: sql no test cover
if 'lecture_id' not in data:
return
if jobtype == 'probe-raw':
if 'source_id' not in data:
modify('INSERT INTO sources (lecture_id, path, type, hash, time_created) VALUES (?, ?, ?, ?, ?)',
data['lecture_id'], data['path'], 'plain', status['hash'], datetime.now())
......@@ -98,6 +128,9 @@ def update_lecture_videos(jobid, jobtype, data, state, status):
if not sources:
return
latest = sources[-1]
# Incomplete and not enabled currently
#if False and jobtype == 'probe-raw':
# schedule_intro(data['lecture_id'])
videos = query('SELECT * FROM videos WHERE videos.lecture_id = ?', data['lecture_id'])
current_fmts = [v['video_format'] for v in videos]
formats = query('''SELECT formats.* FROM formats
......@@ -113,7 +146,7 @@ def update_lecture_videos(jobid, jobtype, data, state, status):
schedule_transcode(latest, video=video)
@edit_handler('chapters')
def chapter_changed(table, column, value, id, user):
def chapter_changed(table, column, value, id, user): #pylint: disable=unused-argument
chapters = query('SELECT * FROM chapters WHERE id = ?', id)
if not chapters:
return
......@@ -122,7 +155,7 @@ def chapter_changed(table, column, value, id, user):
schedule_remux(chapter['lecture_id'])
@edit_handler('courses')
def course_changed(table, column, value, id, user):
def course_changed(table, column, value, id, user): #pylint: disable=unused-argument
if column not in ['title', 'organizer']:
return
lectures = query('SELECT * FROM lectures WHERE course_id = ?', id)
......@@ -130,7 +163,6 @@ def course_changed(table, column, value, id, user):
schedule_remux(lecture['id'])
@edit_handler('lectures')
def lecture_changed(table, column, value, id, user):
def lecture_changed(table, column, value, id, user): #pylint: disable=unused-argument
if column in ['title', 'comment', 'time', 'speaker']:
schedule_remux(id)
import hashlib
from datetime import MINYEAR
from server import *
def gen_atomid(s):
return 'urn:md5:'+hashlib.md5(s.encode('utf-8')).hexdigest().upper()
def gen_atomid(value):
return 'urn:md5:'+hashlib.md5(value.encode('utf-8')).hexdigest().upper()
def fixdate(d):
if not isinstance(d, datetime):
def fixdate(value):
if not isinstance(value, datetime):
return datetime(MINYEAR, 1, 1)
return d
return value
@app.route('/feed')
@app.route('/<handle>/feed')
@handle_errors(None, 'Diese Veranstaltung existiert nicht!', 404, IndexError)
def feed(handle=None):
id = None
course = {'id': None, 'title': 'Neueste Videos', 'time_created': None, 'time_updated': None}
course['atomid'] = gen_atomid('FROM videos SELECT *')
if handle:
course = query('SELECT * FROM courses WHERE handle = ? AND visible', handle)[0]
course['atomid'] = gen_atomid('Video AG, courses['+str(course['id'])+']: '+course['handle'])
id = course['id']
entries = query('''
SELECT lectures.*, "video" AS sep, videos.*, formats.description AS format_description, formats.prio, "course" AS sep, courses.*
course_id = course['id']
entries = query(f'''
SELECT lectures.*, 'video' AS sep, videos.*, formats.description AS format_description, formats.prio, \'course\' AS sep, courses.*
FROM lectures
JOIN courses ON (courses.id = lectures.course_id)
JOIN videos ON (lectures.id = videos.lecture_id)
JOIN formats ON (formats.id = videos.video_format)
WHERE ((? IS NULL AND courses.listed) OR course_id = ?) AND courses.visible AND lectures.visible AND videos.visible
WHERE {"courses.listed" if course_id is None else "course_id = ?"} AND courses.visible AND lectures.visible AND videos.visible
ORDER BY videos.time_created DESC, prio ASC
LIMIT 100''',
course['id'], course['id'])
*([] if course_id is None else [course_id]))
updated = max(course['time_updated'], course['time_created'], key=fixdate)
for entry in entries:
entry['updated'] = max(entry['video']['time_created'], entry['video']['time_updated'], entry['time_created'], entry['time_updated'], key=fixdate)
......@@ -51,21 +53,28 @@ def rss_feed(handle):
GROUP BY formats.id
ORDER BY formats.player_prio DESC''', course['id'])
if not formats:
# info: sql no test cover
formats = query('SELECT * FROM formats WHERE id = 4 OR id = 5 OR id = 10') # 360p, 720p, 1080p
if 'format_id' not in request.values:
return redirect(url_for('rss_feed', handle=handle, format_id=formats[0]['id']))
fmt = query('SELECT * FROM formats WHERE id = ?', request.values.get('format_id', request.values['format_id']))[0]
items = query('''SELECT lectures.*, "video" AS sep, videos.*
items = query('''SELECT lectures.*, 'video' AS sep, videos.*
FROM lectures
JOIN courses ON courses.id = lectures.course_id
JOIN videos ON lectures.id = videos.lecture_id
WHERE courses.id = ? AND videos.video_format = ? AND courses.visible AND lectures.visible AND videos.visible
ORDER BY lectures.time DESC
LIMIT 100''', course['id'], fmt['id'])
chapters = query('SELECT chapters.* FROM chapters JOIN lectures ON lectures.id = chapters.lecture_id WHERE lectures.course_id = ? AND NOT chapters.deleted AND chapters.visible ORDER BY time ASC', course['id'])
chapters = query('SELECT chapters.* FROM chapters \
JOIN lectures ON lectures.id = chapters.lecture_id \
WHERE lectures.course_id = ? AND NOT chapters.deleted AND chapters.visible \
ORDER BY time ASC', course['id'])
for item in items:
item['updated'] = max(item['video']['time_created'], item['video']['time_updated'], item['time_created'], item['time_updated'], key=fixdate)
return Response(render_template('feed.rss', course=course, format=fmt, formats=formats, items=items, chapters=chapters), 200, {'Content-Type': 'application/rss+xml; charset=UTF-8'})
return Response(
render_template('feed.rss', course=course, format=fmt, formats=formats, items=items, chapters=chapters),
200,
{'Content-Type': 'application/rss+xml; charset=UTF-8'})
@app.route('/courses/feed')
def courses_feed():
......
from server import *
from datetime import timedelta, datetime
from ipaddress import ip_address, ip_network
import icalendar
from werkzeug.datastructures import Headers
from datetime import timedelta, datetime
from server import *
def export_lectures(lectures, responsible, name):
cal = icalendar.Calendar()
cal.add('prodid', '-//Video AG//rwth.video//')
cal.add('version', '1.0')
for l in lectures:
for lecture in lectures:
resp = []
for r in responsible:
if r['course_id'] == l['course_id']:
for r in responsible: #pylint: disable=invalid-name
if r['course_id'] == lecture['course_id']:
resp.append(r['realname'])
event = icalendar.Event()
event.add('summary', l['course']['short']+': '+l['title'])
event.add('summary', lecture['course']['short']+': '+lecture['title'])
event.add('description', '\n\n'.join([s for s in [
l['comment'],
l['internal'],
lecture['comment'],
lecture['internal'],
'Zuständig: '+', '.join(resp) if resp else ''
] if s]))
event.add('uid', '%i@rwth.video'%l['id'])
event.add('uid', '%i@rwth.video'%lecture['id'])
event.add('dtstamp', datetime.utcnow())
event.add('categories', l['course']['short'])
event.add('dtstart', l['time'])
event.add('location', l['place'])
event.add('dtend', l['time'] + timedelta(minutes=l['duration']))
event.add('categories', lecture['course']['short'])
event.add('dtstart', lecture['time'])
event.add('location', lecture['place'])
event.add('dtend', lecture['time'] + timedelta(minutes=lecture['duration']))
cal.add_component(event)
h = Headers()
h.add_header("Content-Disposition", "inline", filename=name)
return Response(cal.to_ical(), mimetype="text/calendar", headers=h)
headers = Headers()
headers.add_header("Content-Disposition", "inline", filename=name)
return Response(cal.to_ical(), mimetype="text/calendar", headers=headers)
def calperm(func):
@wraps(func)
......@@ -45,7 +47,6 @@ def calperm(func):
permission = True
if permission:
return func(*args, **kwargs)
else:
return Response("Login required", 401, {'WWW-Authenticate': 'Basic realm="FS-Login required"'})
return decorator
......@@ -56,7 +57,7 @@ def get_responsible():
@app.route('/internal/ical/all')
@calperm
def ical_all():
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.*
return export_lectures(query('''SELECT lectures.*, 'course' AS sep, courses.*
FROM lectures JOIN courses ON courses.id = lectures.course_id
WHERE NOT norecording AND NOT external
ORDER BY time DESC LIMIT ?''', request.values.get('limit', 1000)),
......@@ -66,7 +67,7 @@ def ical_all():
@calperm
def ical_user(user):
username = query('SELECT name FROM users WHERE users.id = ?', user)[0]['name']
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.*
return export_lectures(query('''SELECT lectures.*, 'course' AS sep, courses.*
FROM lectures
JOIN courses ON courses.id = lectures.course_id
JOIN responsible ON responsible.course_id = courses.id
......@@ -78,7 +79,7 @@ def ical_user(user):
@calperm
def ical_notuser(user):
username = query('SELECT name FROM users WHERE users.id = ?', user)[0]['name']
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.*
return export_lectures(query('''SELECT lectures.*, 'course' AS sep, courses.*
FROM lectures
JOIN courses ON courses.id = lectures.course_id
LEFT JOIN responsible ON (responsible.course_id = courses.id AND responsible.user_id = ?)
......@@ -89,7 +90,7 @@ def ical_notuser(user):
@app.route('/internal/ical/course/<course>')
@calperm
def ical_course(course):
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.*
return export_lectures(query('''SELECT lectures.*, 'course' AS sep, courses.*
FROM lectures JOIN courses ON courses.id = lectures.course_id
WHERE courses.handle = ? AND NOT norecording AND NOT external ORDER BY time DESC''', course),
get_responsible(), 'videoag_%s.ics'%course)
import urllib.request
import urllib.parse
from server import *
@app.route('/internal/import/<int:id>', methods=['GET', 'POST'])
......@@ -15,39 +18,28 @@ def list_import_sources(id):
for i in campus:
if i.startswith('new'):
if campus[i]['url'] != '':
modify('INSERT INTO import_campus (url, type, course_id, last_checked, changed) VALUES (?, ?, ?, ?, 1)',campus[i]['url'],campus[i]['type'],id,datetime.now())
modify('INSERT INTO import_campus (url, type, course_id, last_checked, changed) VALUES (?, ?, ?, ?, 1)',
campus[i]['url'], campus[i]['type'], id, datetime.now())
else:
# info: sql no test cover
if campus[i]['url'] != '':
query('UPDATE import_campus SET url = ?, `type` = ? WHERE (course_id = ?) AND (id = ?)', campus[i]['url'],campus[i]['type'],id,int(i))
query('UPDATE import_campus SET url = ?, "type" = ? WHERE (course_id = ?) AND (id = ?)', campus[i]['url'], campus[i]['type'], id, int(i))
else:
query('DELETE FROM import_campus WHERE (id = ?) AND (course_id = ?)', int(i), id)
import_campus = query('SELECT * FROM import_campus WHERE course_id = ?', id)
return render_template('import_campus.html', course=courses, import_campus=import_campus, events=[])
@app.route('/internal/import/<int:id>/now', methods=['GET', 'POST'])
@mod_required
def import_from(id):
courses = query('SELECT * FROM courses WHERE id = ?', id)[0]
lectures = query('SELECT * FROM lectures WHERE course_id = ?', courses['id'])
import_campus = query('SELECT * FROM import_campus WHERE course_id = ?',id)
def fetch_co_course_events(i):
# pylint: disable=too-many-locals,too-many-branches,too-many-statements,invalid-name,bare-except
from lxml import html # pylint: disable=import-outside-toplevel
events = []
try:
from lxml import html
from lxml import etree
import urllib.request
# if u have to port this to anything new, god be with you.
for i in import_campus:
try:
remote_html = urllib.request.urlopen(i['url']).read()
except:
flash("Ungültige URL: '"+i['url']+"'")
tablexpath = "//td[text()='Termine und Ort']/following::table[1]"
basetable = html.fromstring(remote_html).xpath(tablexpath)[0]
parsebase = html.tostring(basetable);
#parse recurring events
toparse = [i['url']]
......@@ -108,7 +100,9 @@ def import_from(id):
e['duration'] = int((datetime.strptime("%s %s"%(k, j['end']), fmt) - e['time']).seconds/60)
j['place'] = str(j['place'])
if j['place'] != '':
dbplace = query("SELECT name FROM places WHERE (campus_room = ?) OR (campus_name = ?) OR ((NOT campus_name) AND name = ?)",j['place'],j['place'],j['place'])
# info: sql no test cover
dbplace = query("SELECT name FROM places WHERE (campus_room = ?) OR (campus_name = ?) OR (campus_name = '' AND name = ?)",
j['place'], j['place'], j['place'])
if dbplace:
e['place'] = dbplace[0]['name']
else:
......@@ -118,11 +112,85 @@ def import_from(id):
e['title'] = i['type']
events.append(e)
# it is parsed.
return events
def fetch_ro_event_ical(ids):
data = {'pMode': 'T', 'pInclPruef': 'N', 'pInclPruefGepl': 'N', 'pOutputFormat': '99', 'pCharset': 'UTF8', 'pMaskAction': 'DOWNLOAD'}
data = list(data.items())
for id in ids:
data.append(('pTerminNr', id))
data = urllib.parse.urlencode(data).encode('utf-8')
req = urllib.request.Request('https://online.rwth-aachen.de/RWTHonline/pl/ui/%24ctx/wbKalender.wbExport',
data=data, method='POST')
with urllib.request.urlopen(req) as f:
return f.read().decode('utf-8')
def fetch_ro_course_ical(id):
# pylint: disable=import-outside-toplevel
from lxml import html
url = 'https://online.rwth-aachen.de/RWTHonline/pl/ui/%24ctx/wbTermin_List.wbLehrveranstaltung?pStpSpNr='+'%i'%(int(id))
req = urllib.request.urlopen(url)
dom = html.fromstring(req.read())
event_ids = [x.value for x in dom.xpath('//input[@name="pTerminNr"]')]
return fetch_ro_event_ical(event_ids)
def fetch_ro_course_events(item):
# pylint: disable=import-outside-toplevel
import icalendar
import pytz
localtz = pytz.timezone('Europe/Berlin')
# First fix crappy javascript fragment-Paths
url = urllib.parse.urlparse(item['url'].replace('#/', ''))
args = urllib.parse.parse_qs(url.query)
if 'pStpSpNr' in args: # Legacy URLs
id = args['pStpSpNr'][0]
elif len(url.path.split('/')) > 1 and url.path.split('/')[-2] == 'courses': # New URLs
id = url.path.split('/')[-1]
else:
flash("Ungültige URL: '"+url.geturl()+"'")
return [] #cant get events from wrong URL so just return empty list
cal = icalendar.Calendar().from_ical(fetch_ro_course_ical(id))
events = []
for comp in cal.subcomponents:
if comp.name != 'VEVENT':
continue
if comp.get('STATUS') != 'CONFIRMED':
continue
event = {}
place = str(comp.get('LOCATION', ''))
if place:
campus_room = place.split('(')[-1].split(')')[0]
# info: sql no test cover
dbplace = query('SELECT name FROM places WHERE campus_room = ?', campus_room)
if dbplace:
event['place'] = dbplace[0]['name']
else:
event['place'] = 'Unbekannter Ort ('+place+')'
else:
event['place'] = ''
event['time'] = comp['DTSTART'].dt.astimezone(localtz).replace(tzinfo=None)
event['duration'] = int((comp['DTEND'].dt - comp['DTSTART'].dt).seconds/60)
event['title'] = item['type']
events.append(event)
return events
@app.route('/internal/import/<int:id>/now', methods=['GET', 'POST'])
@mod_required
def import_from(id):
# pylint: disable=too-many-branches
courses = query('SELECT * FROM courses WHERE id = ?', id)[0]
lectures = query('SELECT * FROM lectures WHERE course_id = ?', courses['id'])
import_campus = query('SELECT * FROM import_campus WHERE course_id = ?', id)
events = []
try:
# if u have to port this to anything new, god be with you.
for i in import_campus:
if 'www.campus.rwth-aachen.de' in i['url']:
events += fetch_co_course_events(i)
else:
events += fetch_ro_course_events(i)
except ImportError:
flash('python-lxml not found, campus import will not work.')
flash('python-lxml or python-pytz not found, campus and ro import will not work!')
# events to add
newevents = []
......
from datetime import datetime, timedelta
import traceback
import json
from server import modify, query, date_json_handler, sched_func, notify_admins
job_handlers = {} #pylint: disable=invalid-name
def job_handler(*types, state='finished'):
def wrapper(func):
for jobtype in types:
if jobtype not in job_handlers:
job_handlers[jobtype] = {}
if state not in job_handlers[jobtype]:
job_handlers[jobtype][state] = []
job_handlers[jobtype][state].append(func)
return func
return wrapper
def job_handler_handle(id, state):
job = query('SELECT * FROM jobs WHERE id = ?', id, nlfix=False)[0]
type = job['type']
for func in job_handlers.get(type, {}).get(state, []):
try:
func(id, job['type'], json.loads(job['data']), state, json.loads(job['status']))
except Exception: #pylint: disable=broad-except
notify_admins('scheduler_exception', name=func.__name__, traceback=traceback.format_exc())
traceback.print_exc()
@sched_func(10)
def job_catch_broken():
# scheduled but never pinged
modify("BEGIN")
query('UPDATE jobs SET state=\'ready\' WHERE state=\'scheduled\' and time_scheduled < ?', datetime.now() - timedelta(seconds=10))
try:
modify("COMMIT")
except: #pylint: disable=bare-except
pass
# no pings since 60s
modify("BEGIN")
query('UPDATE jobs SET state=\'failed\' WHERE state=\'running\' and last_ping < ?', datetime.now() - timedelta(seconds=60))
try:
modify("COMMIT")
except: #pylint: disable=bare-except
pass
def job_set_state(id, state):
query('UPDATE jobs SET state=? WHERE id=?', state, id)
def schedule_job(jobtype, data=None, priority=0, queue="default"):
if not data:
data = {}
return modify('INSERT INTO jobs (type, priority, queue, data, time_created) VALUES (?, ?, ?, ?, ?)',
jobtype, priority, queue, json.dumps(data, default=date_json_handler), datetime.now(),
get_id=True)
def cancel_job(job_id):
query('UPDATE jobs SET state = \'deleted\' WHERE id = ? AND state = \'ready\'', job_id)
query('UPDATE jobs SET canceled = true WHERE id = ?', job_id)
def restart_job(job_id, canceled=False):
# info: sql no test cover
if canceled:
query('UPDATE jobs SET state = \'ready\', canceled = false WHERE id = ? AND state = \'failed\'', job_id)
else:
query('UPDATE jobs SET state = \'ready\' WHERE id = ? AND state = \'failed\' AND NOT canceled', job_id)
from server import *
import traceback
import json
import random
import math
from time import sleep
job_handlers = {}
def job_handler(*types, state='finished'):
def wrapper(func):
for jobtype in types:
if jobtype not in job_handlers:
job_handlers[jobtype] = {}
if state not in job_handlers[jobtype]:
job_handlers[jobtype][state] = []
job_handlers[jobtype][state].append(func)
return func
return wrapper
def schedule_job(jobtype, data=None, priority=0, queue="default"):
if not data:
data = {}
return modify('INSERT INTO jobs (type, priority, queue, data, time_created) VALUES (?, ?, ?, ?, ?)',
jobtype, priority, queue, json.dumps(data, default=date_json_handler), datetime.now())
def cancel_job(job_id):
modify('UPDATE jobs SET state = "deleted" WHERE id = ? AND state = "ready"', job_id)
modify('UPDATE jobs SET canceled = 1 WHERE id = ?', job_id)
def restart_job(job_id, canceled=False):
if canceled:
modify('UPDATE jobs SET state = "ready", canceled = 0 WHERE id = ? AND state = "failed"', job_id)
else:
modify('UPDATE jobs SET state = "ready" WHERE id = ? AND state = "failed" AND NOT canceled', job_id)
from server import *
@app.route('/internal/jobs/overview')
@register_navbar('Jobs', iconlib='fa', icon='suitcase', group='weitere')
@mod_required
def jobs_overview():
if 'page' in request.args:
page = max(0, int(request.args['page']))
else:
page = 0
if 'pagesize' in request.args:
pagesize = min(500, int(request.args['pagesize']))
else:
pagesize = 50
page = max(0, int(request.args.get('page', 0)))
pagesize = min(500, int(request.args.get('pagesize', 50)))
worker = query('SELECT * FROM worker ORDER BY last_ping DESC')
# get filter options
......@@ -59,9 +25,36 @@ def jobs_overview():
'state': request.args.get('state', 'failed'),
'worker': request.args.get('worker', '%')}
pagecount = math.ceil(query('SELECT count(id) as count FROM jobs WHERE (type like ?) AND (worker like ? OR (worker IS NULL AND ? = "%")) AND (state like ?)', filter['type'], filter['worker'], filter['worker'], filter['state'])[0]['count']/pagesize)
jobs = query('SELECT * FROM jobs WHERE (type like ?) AND (worker like ? OR (worker IS NULL AND ? = "%")) AND (state like ?) ORDER BY `time_created` DESC LIMIT ? OFFSET ?', filter['type'], filter['worker'], filter['worker'], filter['state'], pagesize, page*pagesize)
return render_template('jobs_overview.html',worker=worker,jobs=jobs, filter_values=filter_values, filter=filter, page=page, pagesize=pagesize, pagecount=pagecount)
condition_values = []
if filter['worker'] == '%':
condition = 'WHERE (type like ?) AND (state like ?)'
condition_values.extend([filter['type'], filter['state']])
else:
condition = 'WHERE (type like ?) AND (worker like ?) AND (state like ?)'
condition_values.extend([filter['type'], filter['worker'], filter['state']])
pagecount = math.ceil(query(f'SELECT count(id) as count FROM jobs {condition}',
*condition_values)[0]['count']/pagesize)
jobs = query(f'SELECT * FROM jobs \
{condition} \
ORDER BY "time_created" DESC LIMIT ? OFFSET ?',
*[*condition_values, pagesize, page*pagesize])
active_streams = query('SELECT lectures.*, \'course\' AS sep, courses.*, \'job\' AS sep, jobs.* FROM lectures \
JOIN courses ON (courses.id = lectures.course_id) \
JOIN jobs ON (jobs.id = lectures.stream_job) WHERE lectures.stream_job IS NOT NULL')
for stream in active_streams:
try:
stream['destbase'] = json.loads((stream['job']['data'] or '{}')).get('destbase')
except: #pylint: disable=bare-except
pass
return render_template('jobs_overview.html',
worker=worker,
jobs=jobs,
filter_values=filter_values,
filter=filter,
page=page,
pagesize=pagesize,
pagecount=pagecount,
active_streams=active_streams)
@app.route('/internal/jobs/action/<action>', methods=['GET', 'POST'])
@app.route('/internal/jobs/action/<action>/<jobid>', methods=['GET', 'POST'])
......@@ -69,93 +62,65 @@ def jobs_overview():
@csrf_protect
def jobs_action(action, jobid=None):
if action == 'clear_failed':
query('UPDATE jobs SET state = "deleted" WHERE state = "failed" AND (id = ? OR ? IS NULL)', jobid, jobid)
if jobid:
query('UPDATE jobs SET state = \'deleted\' WHERE state = \'failed\' AND id = ?', jobid)
else:
query('UPDATE jobs SET state = \'deleted\' WHERE state = \'failed\'')
elif action == 'retry_failed':
query('UPDATE jobs SET state = "ready", canceled = 0 WHERE state = "failed" AND (id = ? OR ? IS NULL)', jobid, jobid)
if jobid:
query('UPDATE jobs SET state = \'ready\', canceled = false WHERE state = \'failed\' AND id = ?', jobid)
else:
query('UPDATE jobs SET state = \'ready\', canceled = false WHERE state = \'failed\'')
elif action == 'copy' and jobid:
query("INSERT INTO jobs (type, priority, queue, state, data, time_created) SELECT type, priority, queue, 'ready', data, ? FROM jobs where id = ?", datetime.now(), jobid)
query("INSERT INTO jobs (type, priority, queue, state, data, time_created) \
SELECT type, priority, queue, 'ready', data, ? FROM jobs where id = ?",
datetime.now(), jobid)
elif action == 'delete' and jobid:
query('UPDATE jobs SET state = "deleted" WHERE id = ?', jobid)
query('UPDATE jobs SET state = \'deleted\' WHERE id = ?', jobid)
elif action == 'cancel' and jobid:
cancel_job(jobid)
return redirect(request.values.get('ref', url_for('jobs_overview')))
def jobs_api_token_required(func):
@wraps(func)
def decorator(*args, **kwargs):
if 'apikey' in request.values:
token = request.values['apikey']
elif request.get_json() and ('apikey' in request.get_json()):
token = request.get_json()['apikey']
else:
token = None
if not token == config.get('JOBS_API_KEY', [None]):
return 'Permission denied', 403
else:
return func(*args, **kwargs)
return decorator
@sched_func(10)
def jobs_catch_broken():
# scheduled but never pinged
query('BEGIN')
query('UPDATE jobs SET state="ready" WHERE state="scheduled" and time_scheduled < ?', datetime.now() - timedelta(seconds=10))
try:
query('COMMIT')
except:
pass
# no pings since 60s
query('BEGIN')
query('UPDATE jobs SET state="failed" WHERE state="running" and last_ping < ?', datetime.now() - timedelta(seconds=60))
try:
query('COMMIT')
except:
pass
@app.route('/internal/jobs/api/job/<int:id>/ping', methods=['GET', 'POST'])
@jobs_api_token_required
@api_token_required('JOBS_API_KEY')
def jobs_ping(id):
hostname = request.values['host']
status = json.dumps(json.loads(request.values['status']), default=date_json_handler)
state = request.values['state']
if state == 'finished':
query('UPDATE jobs SET time_finished = ?, status = ?, state = "finished" where id = ?', datetime.now(), status, id)
query('UPDATE jobs SET time_finished = ?, status = ?, state = \'finished\' where id = ?', datetime.now(), status, id)
else:
query('UPDATE jobs SET worker = ?, last_ping = ?, status = ?, state = ? where id = ?', hostname, datetime.now(), status, state, id)
job_handler_handle(id, state)
job = query('SELECT * FROM jobs WHERE id = ?', id, nlfix=False)[0]
for func in job_handlers.get(job['type'], {}).get(state, []):
try:
func(id, job['type'], json.loads(job['data']), state, json.loads(job['status']))
except Exception:
traceback.print_exc()
if job['canceled']:
return 'Job canceled', 205
else:
return 'OK', 200
@app.route('/internal/jobs/api/worker/<hostname>/schedule', methods=['POST'])
@jobs_api_token_required
@api_token_required('JOBS_API_KEY')
def jobs_schedule(hostname):
query('REPLACE INTO worker (hostname, last_ping) values (?, ?)', hostname, datetime.now())
if query("SELECT hostname FROM worker WHERE hostname = ?", hostname):
query("UPDATE worker SET last_ping = ? WHERE hostname = ?", datetime.now(), hostname)
else:
query("INSERT INTO worker (hostname, last_ping) VALUES (?, ?)", hostname, datetime.now())
hostdata = request.get_json()
if not hostdata:
return 'no hostdata sent', 400
job = None
tries = 0
jobtypes = hostdata['jobtypes'] if 'jobtypes' in hostdata else []
while (not job):
while not job:
try:
query("BEGIN")
for i in query('SELECT * FROM jobs WHERE state = "ready" ORDER BY priority DESC'):
modify("BEGIN")
for i in query('SELECT * FROM jobs WHERE state = \'ready\' ORDER BY priority DESC'):
if i['type'] in hostdata['jobtypes'] and i['queue'] in hostdata['queues']:
job = i
break
if not job:
return 'no jobs', 503
modify('UPDATE jobs SET state="scheduled", worker = ?, time_scheduled = ? WHERE id = ?', hostname, datetime.now(), job['id'])
query("COMMIT")
except:
modify('UPDATE jobs SET state=\'scheduled\', worker = ?, time_scheduled = ? WHERE id = ?', hostname, datetime.now(), job['id'])
modify("COMMIT")
except: #pylint: disable=bare-except
tries += 1
job = None
sleep(random.random())
......@@ -163,3 +128,11 @@ def jobs_schedule(hostname):
return 'no jobs', 503
return Response(json.dumps(job, default=date_json_handler), mimetype='application/json')
@app.route('/internal/jobs/add/forward', methods=['GET', 'POST'])
@mod_required
@csrf_protect
def add_forward_job():
schedule_job('live_forward', {'src': request.values['src'],
'dest': request.values['dest'], 'format': 'flv'}, priority=9)
return redirect(request.values.get('ref', url_for('jobs_overview')))
from server import *
import requests
L2P_BASE = 'https://www3.elearning.rwth-aachen.de/_vti_bin/l2pservices/api.svc/v1/'
from server import *
OAUTH_BASE = 'https://oauth.campus.rwth-aachen.de/oauth2waitress/oauth2.svc/'
MOODLE_BASE = 'https://moped.ecampus.rwth-aachen.de/proxy/api/v2/eLearning/Moodle/'
def l2pget(endpoint, token, **args):
args['accessToken'] = token
r = requests.request('GET', L2P_BASE+endpoint, params=args)
def moodleget(endpoint, token, **args):
args['token'] = token
r = requests.request('GET', MOODLE_BASE+endpoint, params=args)
return r.json()
def oauthget(endpoint, **args):
......@@ -16,13 +17,21 @@ def oauthget(endpoint, **args):
@app.route('/internal/l2pauth')
def start_l2pauth():
return "L2P is no longer available."
@app.route('/internal/moodleauth')
def start_moodleauth():
if 'L2P_APIKEY' not in config:
return render_template("500.html"), 500
code = oauthget('code', scope='l2p2013.rwth')
code = oauthget('code', scope='moodle.rwth')
session['oauthcode'] = code['device_code']
session['oauthscope'] = 'l2p'
session['oauthscope'] = 'moodle'
return redirect(code['verification_url']+'?q=verify&d='+code['user_code'])
@app.route('/internal/moodlel2pauth')
def start_moodlel2pauth():
return start_moodleauth()
@app.route('/internal/rwthauth')
def start_rwthauth():
if 'L2P_APIKEY' not in config:
......@@ -42,12 +51,17 @@ def finish_oauth():
if token.get('status') != 'ok':
return
del session['oauthcode']
if session['oauthscope'] not in ['l2p', 'rwth']:
if session['oauthscope'] not in ['l2p', 'rwth', 'moodle', 'l2pandmoodle']:
return
session['rwthintern'] = True
if session['oauthscope'] == 'l2p':
session['l2p_courses'] = []
for course in l2pget('viewAllCourseInfo', token['access_token'])['dataSet']:
session['l2p_courses'].append(course['uniqueid'])
if session['oauthscope'] == 'moodle' or session['oauthscope'] == 'l2pandmoodle':
data = moodleget('getmyenrolledcourses', token['access_token'])
if data and data.get('Data'):
session['moodle_courses'] = []
for course in data['Data']:
session['moodle_courses'].append(str(course['id']))
else:
notify_admins('endpoint_exception', traceback="finish_oauth failed while getting moodle courses, data={}".format(str(data)))
del session['oauthscope']
oauthget('token', refresh_token=token['refresh_token'], grant_type='invalidate')