Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision

Target

Select target project
  • jannik/website
  • vincent/website
  • dominic/website
  • romank/website
  • videoaginfra/website
5 results
Select Git revision
Show changes
Commits on Source (157)
...@@ -12,3 +12,4 @@ nginx.conf ...@@ -12,3 +12,4 @@ nginx.conf
uwsgi.sock uwsgi.sock
.coverage .coverage
htmlcov/ htmlcov/
.idea/
unittest: linter:
image: debian:stretch image: registry.git.fsmpi.rwth-aachen.de/videoaginfra/testenvs/bullseye
stage: test stage: test
script: script:
- apt update - uname -a
- apt install -y python3
- python3 -V - python3 -V
- pylint --version
- pylint --rcfile=.pylintrc *.py | tee pylint.txt
artifacts:
paths:
- pylint.txt
unittest: &unittest
image: registry.git.fsmpi.rwth-aachen.de/videoaginfra/testenvs/bullseye
stage: test
script:
- uname -a - uname -a
- apt install -y sqlite3 locales-all git python3-flask python3-ldap3 python3-requests python3-lxml python3-icalendar python3-mysql.connector python3-requests python3-coverage - python3 -V
- python3 -m coverage run runTests.py - python3 -m coverage run run_tests.py
- python3 -m coverage report --include "./*" - python3 -m coverage report --include "./*"
- python3 -m coverage report -m --include "./*" > report.txt - python3 -m coverage report -m --include "./*" > report.txt
- python3 -m coverage html --include "./*" - python3 -m coverage html --include "./*"
...@@ -16,21 +25,3 @@ unittest: ...@@ -16,21 +25,3 @@ unittest:
- htmlcov/* - htmlcov/*
- .coverage - .coverage
- report.txt - report.txt
livetest:
image: debian:stretch
stage: test
script:
- apt update
- apt install -y python3
- python3 -V
- uname -a
- apt install -y python3-requests
# - ./tests/urlcheck_sinf.py
deploy_staging:
image: archlinux/base
stage: deploy
script:
- pacman --noconfirm -Sy ansible git
[MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-whitelist=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use.
jobs=1
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Specify a configuration file.
#rcfile=
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=consider-using-dict-items,
consider-using-f-string,
consider-using-with,
cyclic-import, # remove me later, should be fixed but needs major refactoring
function-redefined, # remove me later
implicit-str-concat,
invalid-name,
line-too-long,
missing-function-docstring,
missing-module-docstring,
no-else-return,
pointless-string-statement, # docstrings were misdetected
redefined-builtin,
redefined-outer-name, # remove me later
too-many-return-statements,
trailing-whitespace,
unneeded-not,
unspecified-encoding,
unused-variable,
unused-wildcard-import,
use-implicit-booleaness-not-comparison,
use-maxsplit-arg,
wildcard-import
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[REPORTS]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'error', 'warning', 'refactor', and 'convention'
# which contain the number of messages in each category, as well as 'statement'
# which is the total number of statements analyzed. This score is used by the
# global evaluation report (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=yes
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit
[SIMILARITIES]
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
# Minimum lines number of a similarity.
min-similarity-lines=4
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
# List of decorators that change the signature of a decorated function.
signature-mutators=
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style.
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style.
#class-attribute-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
e,
k,
f,
r,
ex,
Run,
_,
id,
db,
ip,
app,
config,
cur
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style.
#variable-rgx=
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid defining new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
[STRING]
# This flag controls whether the implicit-str-concat-in-sequence should
# generate a warning on implicit string concatenation in sequences defined over
# several lines.
check-str-concat-over-line-jumps=no
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. Available dictionaries: none. To make it work,
# install the python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains the private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
spelling-store-unknown-words=no
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=1
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=\t
# Maximum number of characters on a single line.
max-line-length=160
# Maximum number of lines in a module.
max-module-lines=1000
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[LOGGING]
# Format style used to check logging format string. `old` means using %
# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings.
logging-format-style=old
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[IMPORTS]
# List of modules that can be imported at any level, not just the top level
# one.
allow-any-import-level=
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=optparse,tkinter.tix
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled).
ext-import-graph=
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled).
import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Couples of modules and preferred modules, separated by a comma.
preferred-modules=
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp,
__post_init__
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=cls
[DESIGN]
# Maximum number of arguments for function / method.
max-args=5
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=5
# Maximum number of branch for function / method body.
max-branches=12
# Maximum number of locals for function / method body.
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body.
max-returns=6
# Maximum number of statements in function / method body.
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "BaseException, Exception".
overgeneral-exceptions=BaseException,
Exception
FROM ubuntu
RUN mkdir -p /code
COPY requirements.txt /code
WORKDIR /code
RUN apt update && apt install python3 python3-flask sqlite python3-requests python3-lxml python3-ldap3 python3-icalendar python3-mysql.connector locales -y
RUN locale-gen de_DE.utf8
RUN apt install git -y
# Install uwsgi
RUN apt update && apt install python3-pip -y
RUN pip3 install uwsgi
# Install pylint
RUN pip3 install pylint
# Install nginx
RUN apt install nginx -y
COPY . /code
CMD ["bash", "/code/docker_start.sh"]
...@@ -6,7 +6,7 @@ Hinweis: diese Variante startet eine lokale Testversion der Website, es sind nic ...@@ -6,7 +6,7 @@ Hinweis: diese Variante startet eine lokale Testversion der Website, es sind nic
1. Repo Clonen 1. Repo Clonen
2. Verzeichnis betreten 2. Verzeichnis betreten
3. (optional) config.py.example anpassen und als config.py neu speichern 3. (optional) config.py.example anpassen und als config.py neu speichern (z.B. DEBUG = True)
4. Schauen, ob alle Dependencies erfüllt sind (siehe weiter unten) 4. Schauen, ob alle Dependencies erfüllt sind (siehe weiter unten)
5. `./run.py` ausführen 5. `./run.py` ausführen
6. Unter [http://localhost:5000](http://localhost:5000) ist die Website verfügbar 6. Unter [http://localhost:5000](http://localhost:5000) ist die Website verfügbar
...@@ -15,9 +15,9 @@ Hinweis: diese Variante startet eine lokale Testversion der Website, es sind nic ...@@ -15,9 +15,9 @@ Hinweis: diese Variante startet eine lokale Testversion der Website, es sind nic
Alternativ, insbesondere zum Testen der Zugriffsbeschränkungen: Siehe `nginx.example.conf`. Alternativ, insbesondere zum Testen der Zugriffsbeschränkungen: Siehe `nginx.example.conf`.
### Unittests ### Unittests
Tests können mittels `./runTests.py` ausgeführt werden. Tests können mittels `./run_tests.py` ausgeführt werden.
Coverage Tests können mittels `rm .coverage; python -m coverage run runTests.py; python -m coverage html` ausgeführt werden. Dies erstellt einen Ordner `htmlcov` in dem HTML Output liegt. Coverage Tests können mittels `rm .coverage; python -m coverage run run_tests.py; python -m coverage html` ausgeführt werden. Dies erstellt einen Ordner `htmlcov` in dem HTML Output liegt.
### Zum Mitmachen: ### Zum Mitmachen:
1. Repo für den eigenen User forken, dafür den "Fork-Button" auf der Website verwenden 1. Repo für den eigenen User forken, dafür den "Fork-Button" auf der Website verwenden
...@@ -35,17 +35,41 @@ Origin stellt hier euren User da, Upstream das Original der Gruppe videoagwebsit ...@@ -35,17 +35,41 @@ Origin stellt hier euren User da, Upstream das Original der Gruppe videoagwebsit
### Abhängigkeiten ### Abhängigkeiten
Notwendig: Notwendig:
* python (Version 3) * python (Version 3)
* sqlite * sqlite3 (Python builtin)
* python-flask * python-flask
* python-requests (wird vom L2P und vom Kalenderimport verwendet, kann nicht optional eingebunden werden) * python-requests (wird vom L2P und vom Kalenderimport verwendet, kann nicht optional eingebunden werden)
* git (zum Anzeigen der aktuellen Version) * git (zum Anzeigen der aktuellen Version)
Optional (wird für einzelne Features benötigt): Optional (wird für einzelne Features benötigt):
* python-lxml (Campus Import) * python-lxml (Campus- und RO-Import)
* python-ldap (Login mit Fachschaftsaccount) * python-pytz (RO-Import)
* python-icalendar (SoGo-Kalenderimport für Sitzungsankündigungen) * python-ldap3 (Login mit Fachschaftsaccount)
* python-icalendar (RO-Import, Kalenderimport für Sitzungsankündigungen)
* python-mysql-connector (wenn MySQL als Datenbank verwendet werden soll) * python-mysql-connector (wenn MySQL als Datenbank verwendet werden soll)
* python-coverage (Für Coverage Tests benötigt) * python-coverage (Für Coverage Tests benötigt)
Kurzform unter Ubuntu: Kurzform unter Ubuntu:
`sudo apt install python3 python3-flask sqlite python3-requests python3-lxml python3-ldap3 python3-icalendar python3-mysql.connector` `sudo apt install python3 python3-flask sqlite python3-requests python3-lxml python3-ldap3 python3-icalendar python3-mysql.connector`
Mit python-eigenem Paketmanager:
`pip install -r requirements.txt`
---
### Alternative: Docker Image
Alternativ zu vorigem Setup kann zum lokalen Testen Docker verwendet werden, um die Testversion zu starten:
1. Lokal das Image erstellen mittels `docker build -t videoag .` in diesem Ordner.
2. Einen entsprechenden Container starten, z.B.: `docker run --rm --name=videoag -p 5000:5000 videoag`
- `--rm` löscht den Container nach dessen Terminierung
- `-p 5000:5000` mappt den Port, damit der Host auf die Webseite zugreifen kann. Nicht den lokalen Port ändern, da ansonsten ggf. Thumbnails oder Videos nicht mehr geladen werden können.
- Zusätzlich kann mittels `-v /lokaler/pfad/:/code` der Source-Code im Container mit dem Host gemounted werden.
3. Webseite unter `localhost:5000` besuchen.
In dieser Variante sollte in der `config.py` folgendes gesetzt werden:
```
SERVER_IP = 'localhost'
VIDEOPREFIX = '/files'
```
...@@ -2,7 +2,7 @@ import json ...@@ -2,7 +2,7 @@ import json
from server import * from server import *
@job_handler('probe', 'probe-raw') @job_handler('probe', 'probe-raw')
def import_xmp_chapters(jobid, jobtype, data, state, status): def import_xmp_chapters(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
if 'lecture_id' not in data or not data.get('import-chapters', False): if 'lecture_id' not in data or not data.get('import-chapters', False):
return return
times = set() times = set()
...@@ -14,19 +14,19 @@ def import_xmp_chapters(jobid, jobtype, data, state, status): ...@@ -14,19 +14,19 @@ def import_xmp_chapters(jobid, jobtype, data, state, status):
for chapter in status.get('xmp_chapters', []): for chapter in status.get('xmp_chapters', []):
if int(chapter['time']) in times: if int(chapter['time']) in times:
continue continue
modify('INSERT INTO chapters (lecture_id, time, text, visible, time_created, time_updated) VALUES (?, ?, ?, 0, ?, ?)', modify(
'INSERT INTO chapters (lecture_id, time, text, visible, time_created, time_updated) VALUES (?, ?, ?, false, ?, ?)',
data['lecture_id'], int(chapter['time']), chapter['text'], data['lecture_id'], int(chapter['time']), chapter['text'],
datetime.now(), datetime.now()) datetime.now(), datetime.now()
)
@app.route('/internal/newchapter/<int:lectureid>', methods=['POST', 'GET']) @app.route('/internal/newchapter/<int:lectureid>', methods=['POST', 'GET'])
def suggest_chapter(lectureid): def suggest_chapter(lectureid):
time = request.values['time']
text = request.values['text'] text = request.values['text']
assert(time and text) assert 'time' in request.values and 'text' in request.values
try: try:
x = datetime.strptime(time,'%H:%M:%S') parsed_datetime = datetime.strptime(request.values['time'], '%H:%M:%S')
time= timedelta(hours=x.hour,minutes=x.minute,seconds=x.second).total_seconds() chapter_start = int(timedelta(hours=parsed_datetime.hour, minutes=parsed_datetime.minute, seconds=parsed_datetime.second).total_seconds())
time = int(time)
except ValueError: except ValueError:
if 'ref' in request.values: if 'ref' in request.values:
flash('Falsches Zeitformat, "%H:%M:%S" wird erwartet. Z.B. "01:39:42" für eine Kapitel bei Stunde 1, Minute 39, Sekunde 42') flash('Falsches Zeitformat, "%H:%M:%S" wird erwartet. Z.B. "01:39:42" für eine Kapitel bei Stunde 1, Minute 39, Sekunde 42')
...@@ -38,9 +38,13 @@ def suggest_chapter(lectureid): ...@@ -38,9 +38,13 @@ def suggest_chapter(lectureid):
submitter = request.environ['REMOTE_ADDR'] submitter = request.environ['REMOTE_ADDR']
lecture = query('SELECT * FROM lectures WHERE id = ?', lectureid)[0] lecture = query('SELECT * FROM lectures WHERE id = ?', lectureid)[0]
course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0] course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0]
id = modify('INSERT INTO chapters (lecture_id, time, text, time_created, time_updated, created_by, submitted_by) VALUES (?, ?, ?, ?, ?, ?, ?)', id = modify(
lectureid, time, text, datetime.now(), datetime.now(), session.get('user', {'dbid':None})['dbid'], submitter) 'INSERT INTO chapters (lecture_id, time, text, time_created, time_updated, created_by, submitted_by) VALUES (?, ?, ?, ?, ?, ?, ?)',
lectureid, chapter_start, text, datetime.now(), datetime.now(), session.get('user', {'dbid':None})['dbid'], submitter,
get_id=True
)
chapter = query('SELECT * FROM chapters WHERE id = ?', id)[0] chapter = query('SELECT * FROM chapters WHERE id = ?', id)[0]
if not ismod():
notify_mods('chapter_submitted', course['id'], course=course, lecture=lecture, chapter=chapter) notify_mods('chapter_submitted', course['id'], course=course, lecture=lecture, chapter=chapter)
if 'ref' in request.values: if 'ref' in request.values:
return redirect(request.values['ref']) return redirect(request.values['ref'])
...@@ -52,10 +56,10 @@ def chapters(lectureid): ...@@ -52,10 +56,10 @@ def chapters(lectureid):
if not chapters: if not chapters:
return 'No chapters found', 404 return 'No chapters found', 404
last = None last = None
for c in chapters: for chapter in chapters:
c['start'] = c['time'] chapter['start'] = chapter['time']
c['end'] = last['start'] if last else 9999 chapter['end'] = last['start'] if last else 9999 #pylint: disable=unsubscriptable-object
last = c last = chapter
if 'json' in request.values: if 'json' in request.values:
return Response(json.dumps([{'time': c['time'], 'text': c['text']} for c in chapters]), mimetype='application/json') return Response(json.dumps([{'time': c['time'], 'text': c['text']} for c in chapters]), mimetype='application/json')
return Response(render_template('chapters.srt', chapters=chapters), 200, {'Content-Type':'text/vtt'}) return Response(render_template('chapters.srt', chapters=chapters), 200, {'Content-Type':'text/vtt'})
# Defaults for development ,do not use in production! # Defaults for development ,do not use in production!
DEBUG = False DEBUG = False
SERVER_IP = 'localhost' SERVER_IP = 'localhost'
VIDEOPREFIX = 'https://videoag.fsmpi.rwth-aachen.de' VIDEOPREFIX = '/files'
VIDEOMOUNT = [{'mountpoint': 'files/protected/', 'prefix':'protected/'},{'mountpoint':'files/pub/','prefix':'pub/' }, {'mountpoint':'files/vpnonline/','prefix':'vpnonline/' }] VIDEOMOUNT = [{'mountpoint': 'files/protected/', 'prefix':'protected/'},{'mountpoint':'files/pub/','prefix':'pub/' }, {'mountpoint':'files/vpnonline/','prefix':'vpnonline/' }]
#SECRET_KEY = 'something random' #SECRET_KEY = 'something random'
...@@ -16,6 +16,13 @@ DB_DATA = 'db_example.sql' ...@@ -16,6 +16,13 @@ DB_DATA = 'db_example.sql'
#MYSQL_PASSWD = 'somuchsecret' #MYSQL_PASSWD = 'somuchsecret'
#MYSQL_DB = 'videos' #MYSQL_DB = 'videos'
#DB_ENGINE = 'postgres'
POSTGRES_HOST = '10.0.0.101'
POSTGRES_PORT = 5432
POSTGRES_USER = 'videoag'
POSTGRES_PASSWORD = ''
POSTGRES_DATABASE = 'videoag'
DB_ENGINE = 'sqlite' DB_ENGINE = 'sqlite'
SQLITE_DB = 'db.sqlite' SQLITE_DB = 'db.sqlite'
SQLITE_INIT_SCHEMA = True SQLITE_INIT_SCHEMA = True
...@@ -38,3 +45,5 @@ MAIL_FROM = 'Video AG-Website <videoag-it@lists.fsmpi.rwth-aachen.de>' ...@@ -38,3 +45,5 @@ MAIL_FROM = 'Video AG-Website <videoag-it@lists.fsmpi.rwth-aachen.de>'
MAIL_SUFFIX = 'fsmpi.rwth-aachen.de' MAIL_SUFFIX = 'fsmpi.rwth-aachen.de'
MAIL_DEFAULT = 'Video AG <videoag@fsmpi.rwth-aachen.de>' MAIL_DEFAULT = 'Video AG <videoag@fsmpi.rwth-aachen.de>'
MAIL_ADMINS = 'videoag-it@lists.fsmpi.rwth-aachen.de' MAIL_ADMINS = 'videoag-it@lists.fsmpi.rwth-aachen.de'
STREAMING_SERVER = 'rtmp://video-web-0.fsmpi.rwth-aachen.de/src/'
BACKUP_STREAMING_SERVER = 'rtmp://video-web-1.fsmpi.rwth-aachen.de/src/'
from server import * from server import *
from datetime import time
@register_navbar('Schnittfortschritt User', icon='spinner', iconlib='fa', userendpoint=True, endpoint='cutprogress_user') @register_navbar('Schnittfortschritt User', icon='spinner', iconlib='fa', userendpoint=True, endpoint='cutprogress_user')
@register_navbar('Schnittfortschritt', icon='spinner', iconlib='fa') @register_navbar('Schnittfortschritt', icon='spinner', iconlib='fa')
...@@ -7,22 +6,85 @@ from datetime import time ...@@ -7,22 +6,85 @@ from datetime import time
@app.route('/internal/user/<int:user>/cutprogress', endpoint='cutprogress_user') @app.route('/internal/user/<int:user>/cutprogress', endpoint='cutprogress_user')
@mod_required @mod_required
def cutprogress(user=None): def cutprogress(user=None):
allsemester = query('SELECT DISTINCT semester from courses ORDER BY semester DESC'); allsemester = query('SELECT DISTINCT semester from courses ORDER BY semester DESC')
semester = request.values.get('semester', allsemester[0]['semester']) semester = request.values.get('semester', allsemester[0]['semester'])
coursesraw = query('SELECT courses.id, courses.handle, courses.short FROM courses WHERE semester = ?', semester) courses = query('''
courses = [] SELECT courses.id, courses.handle, courses.short
maxlecturecount = 0 FROM courses
for course in coursesraw: WHERE semester = ?
course['lectures'] = query('SELECT lectures.title, lectures.time, lectures.id FROM lectures WHERE lectures.course_id= ? AND NOT lectures.deleted AND NOT lectures.norecording ORDER BY lectures.time', course['id']) ORDER by id DESC
for lecture in course['lectures']: ''', semester)
lecture['videos'] = query('SELECT videos.path, formats.description as formatdesc, videos.visible FROM videos JOIN formats ON (videos.video_format = formats.id) WHERE videos.lecture_id = ? AND NOT videos.deleted', lecture['id']) # Fetch list of people responsible for every course
course['responsible'] = query('''SELECT users.* for course in courses:
FROM responsible people = query('''
JOIN users ON (responsible.user_id = users.id AND responsible.course_id = ?) SELECT users.*
ORDER BY users.realname ASC''', course['id']) FROM users
if len(course['responsible']) == 0: JOIN responsible ON responsible.user_id = users.id
course['responsible'] = [{"realname": "Niemand", "id": -1}] WHERE responsible.course_id = ?
if not user or user in [ r['id'] for r in course['responsible'] ]: ORDER BY users.realname ASC
courses.append(course) ''', course['id'])
maxlecturecount = max(len(course['lectures']),maxlecturecount) if not people:
return render_template('cutprogress.html', allsemester=allsemester, semester=semester, courses=courses, maxlecturecount=maxlecturecount, user=query('SELECT * FROM users WHERE id = ?', user)[0] if user else None) people = [{'realname': 'Niemand', 'id': -1}]
course['responsible'] = people
if user is not None:
courses = [
c for c in courses
if user in (r['id'] for r in c['responsible'])
]
# Fetch lectures for all courses
lectures = []
for course in courses:
lectures += query('''
SELECT
lectures.id,
lectures.course_id,
lectures.time,
lectures.title,
COALESCE(video_counts.videos_total, 0),
COALESCE(video_counts.videos_visible, 0)
FROM lectures
JOIN courses ON ( courses.id = lectures.course_id )
LEFT JOIN (
SELECT
videos.lecture_id,
COUNT(videos.id) as videos_total,
COUNT(videos.visible) as videos_visible
FROM videos
GROUP BY videos.lecture_id
) AS video_counts ON ( video_counts.lecture_id = lectures.id )
WHERE courses.id = ?
AND lectures.time <= ?
AND NOT lectures.norecording
ORDER BY lectures.time ASC, lectures.id ASC
''', course['id'], datetime.now())
# Generate list of days, figure out when weeks change
dates = sorted({row['time'].date() for row in lectures}, reverse=True)
is_new_weeks = [
False if (i == 0) else thisdate.isocalendar()[1] != dates[i-1].isocalendar()[1]
for i, thisdate in enumerate(dates)
]
# Sort into cells
tablebody = [
{
'date': date, # row header
'is_new_week': is_new_week,
'cells': [ # this is the body of the row
[ # this list is a cell
lecture
for lecture in lectures
if lecture['course_id'] == course['id'] and lecture['time'].date() == date
]
for course in courses
]
}
for date, is_new_week in zip(dates, is_new_weeks)
]
return render_template('cutprogress.html',
# dropdown selection
allsemester=allsemester, # options
semester=semester, # choice
user=query('SELECT * FROM users WHERE id = ?', user)[0] if user else None,
# content
courses=courses,
tablebody=tablebody
)
import sqlite3
from flask import g
from server import * from server import *
if config['DB_ENGINE'] == 'sqlite': if config['DB_ENGINE'] == 'sqlite':
import sqlite3
# From sqlite3 module, but with error catching # From sqlite3 module, but with error catching
def convert_timestamp(val): def convert_timestamp(val):
try: try:
...@@ -19,13 +20,13 @@ if config['DB_ENGINE'] == 'sqlite': ...@@ -19,13 +20,13 @@ if config['DB_ENGINE'] == 'sqlite':
sqlite3.register_converter('timestamp', convert_timestamp) sqlite3.register_converter('timestamp', convert_timestamp)
if config['DB_ENGINE'] == 'sqlite': if config['DB_ENGINE'] == 'sqlite':
created = not os.path.exists(config['SQLITE_DB']) DBCREATED = not os.path.exists(config['SQLITE_DB'])
db = sqlite3.connect(config['SQLITE_DB']) db = sqlite3.connect(config['SQLITE_DB'])
cur = db.cursor() cur = db.cursor()
if config['SQLITE_INIT_SCHEMA']: if config['SQLITE_INIT_SCHEMA']:
print('Init db schema') print('Init db schema')
cur.executescript(open(config['DB_SCHEMA']).read()) cur.executescript(open(config['DB_SCHEMA']).read())
if config['SQLITE_INIT_DATA'] and created: if config['SQLITE_INIT_DATA'] and DBCREATED:
print('Init db data') print('Init db data')
cur.executescript(open(config['DB_DATA']).read()) cur.executescript(open(config['DB_DATA']).read())
db.commit() db.commit()
...@@ -43,15 +44,21 @@ if config['DB_ENGINE'] == 'sqlite': ...@@ -43,15 +44,21 @@ if config['DB_ENGINE'] == 'sqlite':
params = [(p.replace(microsecond=0) if isinstance(p, datetime) else p) for p in params] params = [(p.replace(microsecond=0) if isinstance(p, datetime) else p) for p in params]
return operation, params return operation, params
def show(operation, host=None): def show(operation, host=None): #pylint: disable=unused-argument
return {} return {}
elif config['DB_ENGINE'] == 'mysql': elif config['DB_ENGINE'] == 'mysql':
import mysql.connector import mysql.connector
def get_dbcursor(): def get_dbcursor():
if 'db' not in g or not g.db.is_connected(): if 'db' not in g or not g.db.is_connected():
g.db = mysql.connector.connect(user=config['MYSQL_USER'], password=config['MYSQL_PASSWD'], host=config.get('MYSQL_HOST', None), port=config.get('MYSQL_PORT', 3306), unix_socket=config.get('MYSQL_UNIX', None), database=config['MYSQL_DB']) g.db = mysql.connector.connect(
user=config['MYSQL_USER'],
password=config['MYSQL_PASSWD'],
host=config.get('MYSQL_HOST', None),
port=config.get('MYSQL_PORT', 3306),
unix_socket=config.get('MYSQL_UNIX', None),
database=config['MYSQL_DB'])
g.db.cmd_query("SET SESSION sql_mode = 'ANSI_QUOTES'")
if not hasattr(request, 'db'): if not hasattr(request, 'db'):
request.db = g.db.cursor() request.db = g.db.cursor()
return request.db return request.db
...@@ -71,8 +78,8 @@ elif config['DB_ENGINE'] == 'mysql': ...@@ -71,8 +78,8 @@ elif config['DB_ENGINE'] == 'mysql':
rows = [] rows = []
try: try:
rows = cur.fetchall() rows = cur.fetchall()
except mysql.connector.errors.InterfaceError as ie: except mysql.connector.errors.InterfaceError as e:
if ie.msg == 'No result set to fetch from.': if e.msg == 'No result set to fetch from.':
# no problem, we were just at the end of the result set # no problem, we were just at the end of the result set
pass pass
else: else:
...@@ -83,26 +90,50 @@ elif config['DB_ENGINE'] == 'mysql': ...@@ -83,26 +90,50 @@ elif config['DB_ENGINE'] == 'mysql':
cur.close() cur.close()
db.close() db.close()
return res return res
elif config['DB_ENGINE'] == 'postgres':
import psycopg2 # pylint: disable=import-error
def get_dbcursor():
if 'db' not in g or g.db.closed:
g.db = psycopg2.connect(
host=config["POSTGRES_HOST"],
port=config["POSTGRES_PORT"],
user=config["POSTGRES_USER"],
password=config["POSTGRES_PASSWORD"],
dbname=config["POSTGRES_DATABASE"]
)
if not hasattr(request, 'db'):
request.db = g.db.cursor()
return request.db
def fix_query(operation, params):
operation = operation.replace('?', '%s')
params = [(p.replace(microsecond=0) if isinstance(p, datetime) else p) for p in params]
return operation, params
def show(operation, host=None): #pylint: disable=unused-argument
return {}
def query(operation, *params, delim="sep", nlfix=True): def query(operation, *params, delim="sep", nlfix=True):
operation, params = fix_query(operation, params) operation, params = fix_query(operation, params)
tries = 0 tries = 0
while (tries < 10): retry = True
while tries < 10 and retry:
retry = False
try: try:
cur = get_dbcursor() cur = get_dbcursor()
cur.execute(operation, params) cur.execute(operation, params)
except mysql.connector.errors.InternalError as e: except Exception as e: # pylint: disable=broad-except
if e.msg == 'Deadlock found when trying to get lock; try restarting transaction': if str(e) == 'Deadlock found when trying to get lock; try restarting transaction':
tries += 1 tries += 1
continue retry = True
else: else:
raise raise
break
rows = [] rows = []
try: try:
rows = cur.fetchall() rows = cur.fetchall()
except mysql.connector.errors.InterfaceError as ie: except Exception as e: # pylint: disable=broad-except
if ie.msg == 'No result set to fetch from.': if str(e) == 'no results to fetch' or str(e) == "the last operation didn't produce a result":
# no problem, we were just at the end of the result set # no problem, we were just at the end of the result set
pass pass
else: else:
...@@ -116,42 +147,34 @@ def query(operation, *params, delim="sep", nlfix=True): ...@@ -116,42 +147,34 @@ def query(operation, *params, delim="sep", nlfix=True):
if name == delim: if name == delim:
ptr = res[-1][col] = {} ptr = res[-1][col] = {}
continue continue
if type(col) == str and nlfix: if isinstance(col, str) and nlfix:
col = col.replace('\\n', '\n').replace('\\r', '\r') col = col.replace('\\n', '\n').replace('\\r', '\r')
ptr[name] = col ptr[name] = col
return res return res
def modify(operation, *params): def modify(operation, *params, get_id=False):
operation, params = fix_query(operation, params) operation, params = fix_query(operation, params)
if get_id and config["DB_ENGINE"] == "postgres":
operation += " RETURNING id" # Not nice, but works for now
cur = get_dbcursor() cur = get_dbcursor()
cur.execute(operation, params) cur.execute(operation, params)
if not get_id:
return None
if config["DB_ENGINE"] != "postgres":
return cur.lastrowid return cur.lastrowid
all_res = cur.fetchall()
if len(all_res) <= 0:
raise ValueError("Got no id")
return int(all_res[0][0])
@app.teardown_request @app.teardown_request
def commit_db(*args): def commit_db(*args): #pylint: disable=unused-argument
if hasattr(request, 'db'): if hasattr(request, 'db'):
request.db.close() request.db.close()
g.db.commit() g.db.commit()
@app.teardown_appcontext @app.teardown_appcontext
def close_db(*args): def close_db(*args): #pylint: disable=unused-argument
if 'db' in g: if 'db' in g:
g.db.close() g.db.close()
del g.db del g.db
def searchquery(text, columns, match, tables, suffix, *suffixparams):
params = []
subexprs = []
words = text.split(' ')
prio = len(words)+1
for word in words:
if word == '' or word.isspace():
continue
matchexpr = ' OR '.join(['%s LIKE ?'%column for column in match])
subexprs.append('SELECT %s, %s AS _prio FROM %s WHERE %s'%(columns, str(prio), tables, matchexpr))
params += ['%'+word+'%']*len(match)
prio -= 1
if subexprs == []:
return []
expr = 'SELECT *,SUM(_prio) AS _score FROM (%s) AS _tmp %s'%(' UNION '.join(subexprs), suffix)
return query(expr, *(list(params)+list(suffixparams)))
...@@ -8611,6 +8611,7 @@ INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`, ...@@ -8611,6 +8611,7 @@ INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,
INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`) VALUES (9681,7012,1,0,1,'','','','pub/16ss-dsal/16ss-dsal-160715-1080p_1.mp4','2016-08-07 22:54:46','2016-08-07 21:02:37','2016-08-07 21:02:43',46,1402602183,4,'e036f7cbd51afd3ab7be10cf77747c00'); INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`) VALUES (9681,7012,1,0,1,'','','','pub/16ss-dsal/16ss-dsal-160715-1080p_1.mp4','2016-08-07 22:54:46','2016-08-07 21:02:37','2016-08-07 21:02:43',46,1402602183,4,'e036f7cbd51afd3ab7be10cf77747c00');
INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`) VALUES (9682,7012,1,0,1,'','','','pub/16ss-dsal/16ss-dsal-160715-360p_1.mp4','2016-08-07 22:45:34','2016-08-07 21:02:38','2016-08-07 21:02:45',46,368611109,10,'fae2bda2da55a3005aa6329a2d0227c3'); INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`) VALUES (9682,7012,1,0,1,'','','','pub/16ss-dsal/16ss-dsal-160715-360p_1.mp4','2016-08-07 22:45:34','2016-08-07 21:02:38','2016-08-07 21:02:45',46,368611109,10,'fae2bda2da55a3005aa6329a2d0227c3');
INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`) VALUES (9683,7012,1,0,1,'','','','pub/16ss-dsal/16ss-dsal-160715-720p_1.mp4','2016-08-07 22:46:00','2016-08-07 21:02:40','2016-08-07 21:02:44',46,721141077,5,'083c0b7693c82078c513707d1402096b'); INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`) VALUES (9683,7012,1,0,1,'','','','pub/16ss-dsal/16ss-dsal-160715-720p_1.mp4','2016-08-07 22:46:00','2016-08-07 21:02:40','2016-08-07 21:02:44',46,721141077,5,'083c0b7693c82078c513707d1402096b');
INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`, `source`) VALUES (16080,7012,1,0,1,'','','','pub/17ws-cgbp/17ws-cgbp-171114-720p.mp4','2018-01-12 04:36:44','2018-01-12 04:36:44','2018-01-12 04:36:44',-1,607257928,5,"8fa956b14162ec42c1dabc11d53671c5",89);
INSERT INTO `users` (`id`,`name`,`realname`,`level`,`fsacc`,`last_login`,`calendar_key`,`rfc6238`) VALUES (1,'gustav1','Gustav Geier',0,'',NULL,'',''); INSERT INTO `users` (`id`,`name`,`realname`,`level`,`fsacc`,`last_login`,`calendar_key`,`rfc6238`) VALUES (1,'gustav1','Gustav Geier',0,'',NULL,'','');
INSERT INTO `users` (`id`,`name`,`realname`,`level`,`fsacc`,`last_login`,`calendar_key`,`rfc6238`) VALUES (2,'gustav2','Gustav Geier',0,'',NULL,'',''); INSERT INTO `users` (`id`,`name`,`realname`,`level`,`fsacc`,`last_login`,`calendar_key`,`rfc6238`) VALUES (2,'gustav2','Gustav Geier',0,'',NULL,'','');
INSERT INTO `users` (`id`,`name`,`realname`,`level`,`fsacc`,`last_login`,`calendar_key`,`rfc6238`) VALUES (4,'gustav4','Gustav Geier',0,'',NULL,'',''); INSERT INTO `users` (`id`,`name`,`realname`,`level`,`fsacc`,`last_login`,`calendar_key`,`rfc6238`) VALUES (4,'gustav4','Gustav Geier',0,'',NULL,'','');
...@@ -14646,4 +14647,5 @@ INSERT INTO `areas` (`area`,`abbreviation`,`default`,`rank`,`coordinates`) VALUE ...@@ -14646,4 +14647,5 @@ INSERT INTO `areas` (`area`,`abbreviation`,`default`,`rank`,`coordinates`) VALUE
INSERT INTO `profiles` (`name`,`format`) VALUES ('default',4); INSERT INTO `profiles` (`name`,`format`) VALUES ('default',4);
INSERT INTO `profiles` (`name`,`format`) VALUES ('default',5); INSERT INTO `profiles` (`name`,`format`) VALUES ('default',5);
INSERT INTO `profiles` (`name`,`format`) VALUES ('default',10); INSERT INTO `profiles` (`name`,`format`) VALUES ('default',10);
INSERT INTO `sources` (`id`, `lecture_id`, `path`, `type`, `hash`, `time_created`) VALUES (89, 7012, 'autoencode/something', 'plain', '000000000', '2024-01-01 00:00:00');
COMMIT; COMMIT;
...@@ -47,7 +47,7 @@ CREATE TABLE IF NOT EXISTS `courses_data` ( ...@@ -47,7 +47,7 @@ CREATE TABLE IF NOT EXISTS `courses_data` (
`deleted` INTEGER NOT NULL DEFAULT '0', `deleted` INTEGER NOT NULL DEFAULT '0',
`title` text NOT NULL DEFAULT '', `title` text NOT NULL DEFAULT '',
`short` varchar(32) NOT NULL DEFAULT '', `short` varchar(32) NOT NULL DEFAULT '',
`handle` varchar(32) NOT NULL DEFAULT '', `handle` varchar(32) NOT NULL,
`organizer` text NOT NULL DEFAULT '', `organizer` text NOT NULL DEFAULT '',
`subject` varchar(32) NOT NULL DEFAULT '', `subject` varchar(32) NOT NULL DEFAULT '',
`credits` INTEGER NOT NULL DEFAULT '0', `credits` INTEGER NOT NULL DEFAULT '0',
...@@ -66,7 +66,8 @@ CREATE TABLE IF NOT EXISTS `courses_data` ( ...@@ -66,7 +66,8 @@ CREATE TABLE IF NOT EXISTS `courses_data` (
`coursechapters` INTEGER NOT NULL DEFAULT 0, `coursechapters` INTEGER NOT NULL DEFAULT 0,
`autopublish` INTEGER NOT NULL DEFAULT 0, `autopublish` INTEGER NOT NULL DEFAULT 0,
`autovisible` INTEGER NOT NULL DEFAULT 0, `autovisible` INTEGER NOT NULL DEFAULT 0,
`profile` varchar(64) NOT NULL DEFAULT 'default' `profile` varchar(64) NOT NULL DEFAULT 'default',
`login_info` text NOT NULL DEFAULT ''
); );
CREATE TABLE IF NOT EXISTS `filesizes` ( CREATE TABLE IF NOT EXISTS `filesizes` (
`path` varchar(255) NOT NULL PRIMARY KEY, `path` varchar(255) NOT NULL PRIMARY KEY,
...@@ -107,7 +108,9 @@ CREATE TABLE IF NOT EXISTS `lectures_data` ( ...@@ -107,7 +108,9 @@ CREATE TABLE IF NOT EXISTS `lectures_data` (
`titlefile` varchar(255) NOT NULL DEFAULT '', `titlefile` varchar(255) NOT NULL DEFAULT '',
`live` INTEGER NOT NULL DEFAULT 0, `live` INTEGER NOT NULL DEFAULT 0,
`norecording` INTEGER NOT NULL DEFAULT 0, `norecording` INTEGER NOT NULL DEFAULT 0,
`profile` varchar(64) `profile` varchar(64),
`stream_settings` text NOT NULL DEFAULT '',
`stream_job` INTEGER
); );
CREATE TABLE IF NOT EXISTS `places` ( CREATE TABLE IF NOT EXISTS `places` (
`place` varchar(20) NOT NULL PRIMARY KEY, `place` varchar(20) NOT NULL PRIMARY KEY,
...@@ -153,6 +156,7 @@ CREATE TABLE IF NOT EXISTS `log` ( ...@@ -153,6 +156,7 @@ CREATE TABLE IF NOT EXISTS `log` (
CREATE TABLE IF NOT EXISTS `hlslog` ( CREATE TABLE IF NOT EXISTS `hlslog` (
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, `id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
`time` datetime NOT NULL, `time` datetime NOT NULL,
`segment` INTEGER,
`source` INTEGER, `source` INTEGER,
`lecture` INTEGER, `lecture` INTEGER,
`handle` varchar(32), `handle` varchar(32),
...@@ -256,8 +260,8 @@ CREATE TABLE IF NOT EXISTS `announcements` ( ...@@ -256,8 +260,8 @@ CREATE TABLE IF NOT EXISTS `announcements` (
`level` INTEGER NOT NULL DEFAULT 0, `level` INTEGER NOT NULL DEFAULT 0,
`visible` INTEGER NOT NULL DEFAULT 0, `visible` INTEGER NOT NULL DEFAULT 0,
`deleted` INTEGER NOT NULL DEFAULT 0, `deleted` INTEGER NOT NULL DEFAULT 0,
`time_publish` datetime DEFAULT '', `time_publish` datetime,
`time_expire` datetime DEFAULT '', `time_expire` datetime,
`time_created` datetime NOT NULL, `time_created` datetime NOT NULL,
`time_updated` datetime NOT NULL, `time_updated` datetime NOT NULL,
`created_by` INTEGER NOT NULL `created_by` INTEGER NOT NULL
...@@ -267,7 +271,7 @@ CREATE TABLE IF NOT EXISTS `featured` ( ...@@ -267,7 +271,7 @@ CREATE TABLE IF NOT EXISTS `featured` (
`title` text NOT NULL DEFAULT '', `title` text NOT NULL DEFAULT '',
`text` text NOT NULL DEFAULT '', `text` text NOT NULL DEFAULT '',
`internal` text NOT NULL DEFAULT '', `internal` text NOT NULL DEFAULT '',
`type` varchar(32) NOT NULL DEFAULT '', `type` varchar(32) NOT NULL,
`param` text NOT NULL DEFAULT '', `param` text NOT NULL DEFAULT '',
`param2` text NOT NULL DEFAULT '', `param2` text NOT NULL DEFAULT '',
`order` INTEGER DEFAULT NULL, `order` INTEGER DEFAULT NULL,
......
#!/bin/bash
# This file is executed when the docker container starts!
cd /code;
nginx -c nginx.conf.example -p . &
# Use -C argument to tell uwsgi to chmod 666 /uswgi.sock
exec uwsgi -C -s uwsgi.sock --manage-script-name --mount /=server:app --plugin python --enable-threads
import math
from server import * from server import *
# field types: # field types:
...@@ -7,7 +9,8 @@ from server import * ...@@ -7,7 +9,8 @@ from server import *
# datetime # datetime
# duration # duration
# videotime # videotime
editable_tables = {
editable_tables = { #pylint: disable=invalid-name
'courses': { 'courses': {
'table': 'courses_data', 'table': 'courses_data',
'idcolumn': 'id', 'idcolumn': 'id',
...@@ -28,7 +31,9 @@ editable_tables = { ...@@ -28,7 +31,9 @@ editable_tables = {
'external': {'type': 'boolean', 'description': 'Soll die Veranstaltung nicht im Drehplan angezeigt werden?'}, 'external': {'type': 'boolean', 'description': 'Soll die Veranstaltung nicht im Drehplan angezeigt werden?'},
'coursechapters': {'type': 'boolean', 'description': 'Sollen auf der Kursseite die Kapitelmarker der Videos angezeigt werden?'}, 'coursechapters': {'type': 'boolean', 'description': 'Sollen auf der Kursseite die Kapitelmarker der Videos angezeigt werden?'},
'autopublish': {'type': 'boolean', 'description': 'Sollen encodete Videos automatisch verschoben werden?'}, 'autopublish': {'type': 'boolean', 'description': 'Sollen encodete Videos automatisch verschoben werden?'},
'autovisible': {'type': 'boolean', 'description': 'Sollen neue Videos automatisch sichtbar sein?'}}, 'autovisible': {'type': 'boolean', 'description': 'Sollen neue Videos automatisch sichtbar sein?'},
'login_info': {'type': 'text', 'description': 'Zusätliche Informationen, die dem Nutzer angezeigt werden, wenn er sich anmelden muss.'}
},
'creationtime_fields': ['created_by', 'time_created', 'time_updated']}, 'creationtime_fields': ['created_by', 'time_created', 'time_updated']},
'lectures': { 'lectures': {
'table': 'lectures_data', 'table': 'lectures_data',
...@@ -45,7 +50,9 @@ editable_tables = { ...@@ -45,7 +50,9 @@ editable_tables = {
'jumplist': {'type': ''}, 'jumplist': {'type': ''},
'deleted': {'type': 'boolean'}, 'deleted': {'type': 'boolean'},
'live': {'type': 'boolean', 'description': 'Ist ein Livestream geplant? Muss gesetzt sein damit der RTMP Stream zugeordnet wird.'}, 'live': {'type': 'boolean', 'description': 'Ist ein Livestream geplant? Muss gesetzt sein damit der RTMP Stream zugeordnet wird.'},
'norecording': {'type': 'boolean', 'description:': 'Führt dazu, dass der Termin ausgegraut wird.'}}, 'norecording': {'type': 'boolean', 'description': 'Führt dazu, dass der Termin ausgegraut wird.'},
'stream_settings': {'type': 'text'}
},
'creationtime_fields': ['course_id', 'time_created', 'time_updated']}, 'creationtime_fields': ['course_id', 'time_created', 'time_updated']},
'videos': { 'videos': {
'table': 'videos_data', 'table': 'videos_data',
...@@ -118,6 +125,7 @@ editable_tables = { ...@@ -118,6 +125,7 @@ editable_tables = {
'editable_fields': { 'editable_fields': {
'name': {'type': 'shortstring'}, 'name': {'type': 'shortstring'},
'description': {'type': 'text'}, 'description': {'type': 'text'},
'deleted': {'type': 'boolean'}
}, },
'creationtime_fields': ['created_by', 'time_created', 'time_updated']} 'creationtime_fields': ['created_by', 'time_created', 'time_updated']}
} }
...@@ -132,25 +140,31 @@ def parseeditpath(path): ...@@ -132,25 +140,31 @@ def parseeditpath(path):
return {'table': table, 'id': id, 'column': column, 'type': type, 'tableinfo': editable_tables[table]} return {'table': table, 'id': id, 'column': column, 'type': type, 'tableinfo': editable_tables[table]}
@app.template_filter(name='getfielddescription') @app.template_filter(name='getfielddescription')
def getfielddescription(path): def getfielddescription(inputpath):
p = parseeditpath(path) path = parseeditpath(inputpath)
desc = p['tableinfo']['editable_fields'][p['column']].get('description', '') desc = path['tableinfo']['editable_fields'][path['column']].get('description', '')
if desc != '': if desc != '':
desc = '<br>'+desc desc = '<br>'+desc
return desc return desc
@app.template_filter(name='getfieldchangelog') @app.template_filter(name='getfieldchangelog')
def getfieldchangelog(path): def getfieldchangelog(inputpath):
p = parseeditpath(path) path = parseeditpath(inputpath)
changelog = query('SELECT * FROM changelog LEFT JOIN users ON (changelog.who = users.id) WHERE `table` = ? AND `id_value` = ? and `field` = ? ORDER BY `when` DESC LIMIT 5', p['table'], p['id'], p['column']) changelog = query('SELECT * FROM changelog \
LEFT JOIN users ON (changelog.who = users.id) WHERE "table" = ? AND "id_value" = ? and "field" = ? \
ORDER BY "when" DESC LIMIT 5', path['table'], path['id'], path['column'])
for entry in changelog: for entry in changelog:
entry['id_value'] = str(entry['id_value'])
entry['value_new'] = str(entry['value_new'])
entry['path'] = '.'.join([entry['table'], entry['id_value'], entry['field']]) entry['path'] = '.'.join([entry['table'], entry['id_value'], entry['field']])
return changelog return changelog
@app.route('/internal/edit', methods=['GET', 'POST']) @app.route('/internal/edit', methods=['GET', 'POST'])
@mod_required @mod_required
@csrf_protect @csrf_protect
def edit(prefix='', ignore=[]): def edit(prefix='', ignore=None):
if not ignore:
ignore = []
# All editable tables are expected to have a 'time_updated' field # All editable tables are expected to have a 'time_updated' field
ignore.append('ref') ignore.append('ref')
ignore.append('prefix') ignore.append('prefix')
...@@ -165,10 +179,24 @@ def edit(prefix='', ignore=[]): ...@@ -165,10 +179,24 @@ def edit(prefix='', ignore=[]):
continue continue
key = prefix+key key = prefix+key
path = parseeditpath(key) path = parseeditpath(key)
modify('INSERT INTO changelog (`table`,id_value, id_key, field, value_new, value_old, `when`, who, executed) \ modify('INSERT INTO changelog \
VALUES (?,?,?,?,?,(SELECT `%s` FROM %s WHERE %s = ?),?,?,1)'%(path['column'], path['tableinfo']['table'], path['tableinfo']['idcolumn']), ("table",id_value, id_key, field, value_new, value_old, "when", who, executed) \
path['table'], path['id'], path['tableinfo']['idcolumn'], path['column'], val, path['id'], datetime.now(), session['user']['dbid']) VALUES (?,?,?,?,?, \
modify('UPDATE %s SET `%s` = ?, time_updated = ? WHERE `%s` = ?'%(path['tableinfo']['table'], path['column'], path['tableinfo']['idcolumn']), val, datetime.now(),path['id']) (SELECT "%s" FROM %s WHERE %s = ?),?,?,true)'%(
path['column'],
path['tableinfo']['table'],
path['tableinfo']['idcolumn']
),
path['table'],
path['id'],
path['tableinfo']['idcolumn'],
path['column'],
val,
path['id'],
datetime.now(),
session['user']['dbid'])
modify('UPDATE %s SET "%s" = ?, time_updated = ? WHERE "%s" = ?'%(path['tableinfo']['table'], path['column'], path['tableinfo']['idcolumn']),
val, datetime.now(), path['id'])
for func in edit_handlers.get(path['table'], {}).get(None, []): for func in edit_handlers.get(path['table'], {}).get(None, []):
func(path['table'], path['column'], val, path['id'], session['user']['dbid']) func(path['table'], path['column'], val, path['id'], session['user']['dbid'])
for func in edit_handlers.get(path['table'], {}).get(path['column'], []): for func in edit_handlers.get(path['table'], {}).get(path['column'], []):
...@@ -193,14 +221,16 @@ def create(table): ...@@ -193,14 +221,16 @@ def create(table):
if (request.method == 'POST') and (request.get_json()): if (request.method == 'POST') and (request.get_json()):
args = request.get_json().items() args = request.get_json().items()
for column, val in args: for column, val in args:
if (column == 'ref') or (column == '_csrf_token'): if column in ['ref', '_csrf_token']:
continue continue
assert column in list(editable_tables[table]['editable_fields'].keys())+editable_tables[table]['creationtime_fields'] assert column in list(editable_tables[table]['editable_fields'].keys())+editable_tables[table]['creationtime_fields']
assert column not in defaults assert column not in defaults
columns.append('`'+column+'`') columns.append('"'+column+'"')
values.append(val) values.append(val)
assert editable_tables[table]['idcolumn'] == 'id'
id = modify('INSERT INTO %s (%s) VALUES (%s)'%(editable_tables[table]['table'], id = modify('INSERT INTO %s (%s) VALUES (%s)'%(editable_tables[table]['table'],
','.join(columns), ','.join(['?']*len(values))), *values) ','.join(columns), ','.join(['?']*len(values))), *values,
get_id=True)
if table == 'courses': if table == 'courses':
set_responsible(id, session['user']['dbid'], 1) set_responsible(id, session['user']['dbid'], 1)
if 'ref' in request.values: if 'ref' in request.values:
...@@ -211,15 +241,9 @@ def create(table): ...@@ -211,15 +241,9 @@ def create(table):
@register_navbar('Changelog', icon='book', group='weitere') @register_navbar('Changelog', icon='book', group='weitere')
@mod_required @mod_required
def changelog(): def changelog():
if 'page' in request.args: page = max(0, int(request.args.get('page', 0)))
page = max(0, int(request.args['page'])) pagesize = min(500, int(request.args.get('pagesize', 50)))
else: changelog = query('SELECT * FROM changelog LEFT JOIN users ON (changelog.who = users.id) ORDER BY "when" DESC LIMIT ? OFFSET ?', pagesize, page*pagesize)
page = 0
if 'pagesize' in request.args:
pagesize = min(500, int(request.args['pagesize']))
else:
pagesize = 50
changelog = query('SELECT * FROM changelog LEFT JOIN users ON (changelog.who = users.id) ORDER BY `when` DESC LIMIT ? OFFSET ?', pagesize, page*pagesize)
pagecount = math.ceil(query('SELECT count(id) as count FROM changelog')[0]['count']/pagesize) pagecount = math.ceil(query('SELECT count(id) as count FROM changelog')[0]['count']/pagesize)
for entry in changelog: for entry in changelog:
entry['path'] = '.'.join([entry['table'], entry['id_value'], entry['field']]) entry['path'] = '.'.join([entry['table'], entry['id_value'], entry['field']])
...@@ -231,12 +255,13 @@ def changelog(): ...@@ -231,12 +255,13 @@ def changelog():
@csrf_protect @csrf_protect
def set_responsible(course_id, user_id, value): def set_responsible(course_id, user_id, value):
if value: if value:
modify('REPLACE INTO responsible (course_id, user_id) values (?, ?)', course_id, user_id); if not query('SELECT id FROM responsible WHERE course_id = ? AND user_id = ?', course_id, user_id):
modify('INSERT INTO responsible (course_id, user_id) VALUES (?, ?)', course_id, user_id)
else: else:
modify('DELETE FROM responsible WHERE course_id = ? AND user_id = ?', course_id, user_id); modify('DELETE FROM responsible WHERE course_id = ? AND user_id = ?', course_id, user_id)
return "OK", 200 return "OK", 200
edit_handlers = {} edit_handlers = {} #pylint: disable=invalid-name
def edit_handler(*tables, field=None): def edit_handler(*tables, field=None):
def wrapper(func): def wrapper(func):
for table in tables: for table in tables:
......
from server import *
from sorter import insert_video
import os.path import os.path
import json import json
from server import *
from sorter import insert_video
from edit import edit_handler
def set_metadata(dest, course, lecture): def set_metadata(dest, course, lecture):
chapters = query('SELECT text, time FROM chapters WHERE lecture_id = ? AND visible ORDER BY time', lecture['id']) chapters = query('SELECT text, time FROM chapters WHERE lecture_id = ? AND visible ORDER BY time', lecture['id'])
metadata = {'title': lecture['title'], 'album': course['title'], metadata = {'title': lecture['title'], 'album': course['title'],
...@@ -12,6 +14,14 @@ def set_metadata(dest, course, lecture): ...@@ -12,6 +14,14 @@ def set_metadata(dest, course, lecture):
dest['metadata'] = metadata dest['metadata'] = metadata
dest['chapters'] = chapters dest['chapters'] = chapters
# Incomplete and not enabled currently
#def schedule_intro(lectureid):
# lecture = query('SELECT * FROM lectures where id = ?', lectureid)
# course = query('SELECT * FROM course where id = ?', lecture['course_id'])
# data = {'path': path, 'lecture_id': lectureid}
# set_metadata(data, course, lecture)
# schedule_job('intro', data)
def schedule_remux(lectureid, videoid=None): def schedule_remux(lectureid, videoid=None):
lecture = query('SELECT * FROM lectures WHERE id = ?', lectureid)[0] lecture = query('SELECT * FROM lectures WHERE id = ?', lectureid)[0]
course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0] course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0]
...@@ -48,8 +58,8 @@ def add_remux_job(): ...@@ -48,8 +58,8 @@ def add_remux_job():
def schedule_transcode(source, fmt_id=None, video=None): def schedule_transcode(source, fmt_id=None, video=None):
if video: if video:
fmt_id = video['video_format'] fmt_id = video['video_format']
assert(video['lecture_id'] == source['lecture_id']) assert video['lecture_id'] == source['lecture_id']
assert(fmt_id != None) assert fmt_id is not None
fmt = query('SELECT * FROM formats WHERE id = ?', fmt_id)[0] fmt = query('SELECT * FROM formats WHERE id = ?', fmt_id)[0]
lecture = query('SELECT * FROM lectures WHERE id = ?', source['lecture_id'])[0] lecture = query('SELECT * FROM lectures WHERE id = ?', source['lecture_id'])[0]
course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0] course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0]
...@@ -60,7 +70,7 @@ def schedule_transcode(source, fmt_id=None, video=None): ...@@ -60,7 +70,7 @@ def schedule_transcode(source, fmt_id=None, video=None):
stream = {'name': 'audio', 'type': 'audio'} stream = {'name': 'audio', 'type': 'audio'}
data['input']['streams'].append(stream) data['input']['streams'].append(stream)
else: else:
assert(False) assert False
set_metadata(data['output'], course, lecture) set_metadata(data['output'], course, lecture)
basename = os.path.basename(source['path']).rsplit('.', 1)[0] basename = os.path.basename(source['path']).rsplit('.', 1)[0]
data['output']['path'] = 'pub/'+course['handle']+'/'+basename+fmt['suffix'] data['output']['path'] = 'pub/'+course['handle']+'/'+basename+fmt['suffix']
...@@ -77,12 +87,19 @@ def schedule_transcode(source, fmt_id=None, video=None): ...@@ -77,12 +87,19 @@ def schedule_transcode(source, fmt_id=None, video=None):
return schedule_job('transcode', data, queue="background") return schedule_job('transcode', data, queue="background")
@job_handler('transcode') @job_handler('transcode')
def insert_transcoded_video(jobid, jobtype, data, state, status): def insert_transcoded_video(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
if 'lecture_id' not in data or 'source_id' not in data or 'format_id' not in data: if 'lecture_id' not in data or 'source_id' not in data or 'format_id' not in data:
return return
if 'video_id' in data: if 'video_id' in data:
return return
video_id = insert_video(data['lecture_id'], data['output']['path'], data['format_id'], status['hash'], status['filesize'], status['duration'], data['source_id']) video_id = insert_video(
data['lecture_id'],
data['output']['path'],
data['format_id'],
status['hash'],
status['filesize'],
status['duration'],
data['source_id'])
schedule_remux(data['lecture_id'], video_id) schedule_remux(data['lecture_id'], video_id)
@app.route('/internal/jobs/add/reencode', methods=['GET', 'POST']) @app.route('/internal/jobs/add/reencode', methods=['GET', 'POST'])
...@@ -98,10 +115,12 @@ def add_reencode_job(): ...@@ -98,10 +115,12 @@ def add_reencode_job():
schedule_transcode(source, video=video) schedule_transcode(source, video=video)
return redirect(request.values.get('ref', url_for('jobs_overview'))) return redirect(request.values.get('ref', url_for('jobs_overview')))
@job_handler('probe-raw') @job_handler('probe-raw', 'intro')
def update_lecture_videos(jobid, jobtype, data, state, status): def update_lecture_videos(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
# info: sql no test cover
if 'lecture_id' not in data: if 'lecture_id' not in data:
return return
if jobtype == 'probe-raw':
if 'source_id' not in data: if 'source_id' not in data:
modify('INSERT INTO sources (lecture_id, path, type, hash, time_created) VALUES (?, ?, ?, ?, ?)', modify('INSERT INTO sources (lecture_id, path, type, hash, time_created) VALUES (?, ?, ?, ?, ?)',
data['lecture_id'], data['path'], 'plain', status['hash'], datetime.now()) data['lecture_id'], data['path'], 'plain', status['hash'], datetime.now())
...@@ -109,6 +128,9 @@ def update_lecture_videos(jobid, jobtype, data, state, status): ...@@ -109,6 +128,9 @@ def update_lecture_videos(jobid, jobtype, data, state, status):
if not sources: if not sources:
return return
latest = sources[-1] latest = sources[-1]
# Incomplete and not enabled currently
#if False and jobtype == 'probe-raw':
# schedule_intro(data['lecture_id'])
videos = query('SELECT * FROM videos WHERE videos.lecture_id = ?', data['lecture_id']) videos = query('SELECT * FROM videos WHERE videos.lecture_id = ?', data['lecture_id'])
current_fmts = [v['video_format'] for v in videos] current_fmts = [v['video_format'] for v in videos]
formats = query('''SELECT formats.* FROM formats formats = query('''SELECT formats.* FROM formats
...@@ -124,7 +146,7 @@ def update_lecture_videos(jobid, jobtype, data, state, status): ...@@ -124,7 +146,7 @@ def update_lecture_videos(jobid, jobtype, data, state, status):
schedule_transcode(latest, video=video) schedule_transcode(latest, video=video)
@edit_handler('chapters') @edit_handler('chapters')
def chapter_changed(table, column, value, id, user): def chapter_changed(table, column, value, id, user): #pylint: disable=unused-argument
chapters = query('SELECT * FROM chapters WHERE id = ?', id) chapters = query('SELECT * FROM chapters WHERE id = ?', id)
if not chapters: if not chapters:
return return
...@@ -133,7 +155,7 @@ def chapter_changed(table, column, value, id, user): ...@@ -133,7 +155,7 @@ def chapter_changed(table, column, value, id, user):
schedule_remux(chapter['lecture_id']) schedule_remux(chapter['lecture_id'])
@edit_handler('courses') @edit_handler('courses')
def course_changed(table, column, value, id, user): def course_changed(table, column, value, id, user): #pylint: disable=unused-argument
if column not in ['title', 'organizer']: if column not in ['title', 'organizer']:
return return
lectures = query('SELECT * FROM lectures WHERE course_id = ?', id) lectures = query('SELECT * FROM lectures WHERE course_id = ?', id)
...@@ -141,7 +163,6 @@ def course_changed(table, column, value, id, user): ...@@ -141,7 +163,6 @@ def course_changed(table, column, value, id, user):
schedule_remux(lecture['id']) schedule_remux(lecture['id'])
@edit_handler('lectures') @edit_handler('lectures')
def lecture_changed(table, column, value, id, user): def lecture_changed(table, column, value, id, user): #pylint: disable=unused-argument
if column in ['title', 'comment', 'time', 'speaker']: if column in ['title', 'comment', 'time', 'speaker']:
schedule_remux(id) schedule_remux(id)
import hashlib
from datetime import MINYEAR
from server import * from server import *
def gen_atomid(s): def gen_atomid(value):
return 'urn:md5:'+hashlib.md5(s.encode('utf-8')).hexdigest().upper() return 'urn:md5:'+hashlib.md5(value.encode('utf-8')).hexdigest().upper()
def fixdate(d): def fixdate(value):
if not isinstance(d, datetime): if not isinstance(value, datetime):
return datetime(MINYEAR, 1, 1) return datetime(MINYEAR, 1, 1)
return d return value
@app.route('/feed') @app.route('/feed')
@app.route('/<handle>/feed') @app.route('/<handle>/feed')
@handle_errors(None, 'Diese Veranstaltung existiert nicht!', 404, IndexError) @handle_errors(None, 'Diese Veranstaltung existiert nicht!', 404, IndexError)
def feed(handle=None): def feed(handle=None):
id = None
course = {'id': None, 'title': 'Neueste Videos', 'time_created': None, 'time_updated': None} course = {'id': None, 'title': 'Neueste Videos', 'time_created': None, 'time_updated': None}
course['atomid'] = gen_atomid('FROM videos SELECT *') course['atomid'] = gen_atomid('FROM videos SELECT *')
if handle: if handle:
course = query('SELECT * FROM courses WHERE handle = ? AND visible', handle)[0] course = query('SELECT * FROM courses WHERE handle = ? AND visible', handle)[0]
course['atomid'] = gen_atomid('Video AG, courses['+str(course['id'])+']: '+course['handle']) course['atomid'] = gen_atomid('Video AG, courses['+str(course['id'])+']: '+course['handle'])
id = course['id'] course_id = course['id']
entries = query(''' entries = query(f'''
SELECT lectures.*, "video" AS sep, videos.*, formats.description AS format_description, formats.prio, "course" AS sep, courses.* SELECT lectures.*, 'video' AS sep, videos.*, formats.description AS format_description, formats.prio, \'course\' AS sep, courses.*
FROM lectures FROM lectures
JOIN courses ON (courses.id = lectures.course_id) JOIN courses ON (courses.id = lectures.course_id)
JOIN videos ON (lectures.id = videos.lecture_id) JOIN videos ON (lectures.id = videos.lecture_id)
JOIN formats ON (formats.id = videos.video_format) JOIN formats ON (formats.id = videos.video_format)
WHERE ((? IS NULL AND courses.listed) OR course_id = ?) AND courses.visible AND lectures.visible AND videos.visible WHERE {"courses.listed" if course_id is None else "course_id = ?"} AND courses.visible AND lectures.visible AND videos.visible
ORDER BY videos.time_created DESC, prio ASC ORDER BY videos.time_created DESC, prio ASC
LIMIT 100''', LIMIT 100''',
course['id'], course['id']) *([] if course_id is None else [course_id]))
updated = max(course['time_updated'], course['time_created'], key=fixdate) updated = max(course['time_updated'], course['time_created'], key=fixdate)
for entry in entries: for entry in entries:
entry['updated'] = max(entry['video']['time_created'], entry['video']['time_updated'], entry['time_created'], entry['time_updated'], key=fixdate) entry['updated'] = max(entry['video']['time_created'], entry['video']['time_updated'], entry['time_created'], entry['time_updated'], key=fixdate)
...@@ -51,21 +53,28 @@ def rss_feed(handle): ...@@ -51,21 +53,28 @@ def rss_feed(handle):
GROUP BY formats.id GROUP BY formats.id
ORDER BY formats.player_prio DESC''', course['id']) ORDER BY formats.player_prio DESC''', course['id'])
if not formats: if not formats:
# info: sql no test cover
formats = query('SELECT * FROM formats WHERE id = 4 OR id = 5 OR id = 10') # 360p, 720p, 1080p formats = query('SELECT * FROM formats WHERE id = 4 OR id = 5 OR id = 10') # 360p, 720p, 1080p
if 'format_id' not in request.values: if 'format_id' not in request.values:
return redirect(url_for('rss_feed', handle=handle, format_id=formats[0]['id'])) return redirect(url_for('rss_feed', handle=handle, format_id=formats[0]['id']))
fmt = query('SELECT * FROM formats WHERE id = ?', request.values.get('format_id', request.values['format_id']))[0] fmt = query('SELECT * FROM formats WHERE id = ?', request.values.get('format_id', request.values['format_id']))[0]
items = query('''SELECT lectures.*, "video" AS sep, videos.* items = query('''SELECT lectures.*, 'video' AS sep, videos.*
FROM lectures FROM lectures
JOIN courses ON courses.id = lectures.course_id JOIN courses ON courses.id = lectures.course_id
JOIN videos ON lectures.id = videos.lecture_id JOIN videos ON lectures.id = videos.lecture_id
WHERE courses.id = ? AND videos.video_format = ? AND courses.visible AND lectures.visible AND videos.visible WHERE courses.id = ? AND videos.video_format = ? AND courses.visible AND lectures.visible AND videos.visible
ORDER BY lectures.time DESC ORDER BY lectures.time DESC
LIMIT 100''', course['id'], fmt['id']) LIMIT 100''', course['id'], fmt['id'])
chapters = query('SELECT chapters.* FROM chapters JOIN lectures ON lectures.id = chapters.lecture_id WHERE lectures.course_id = ? AND NOT chapters.deleted AND chapters.visible ORDER BY time ASC', course['id']) chapters = query('SELECT chapters.* FROM chapters \
JOIN lectures ON lectures.id = chapters.lecture_id \
WHERE lectures.course_id = ? AND NOT chapters.deleted AND chapters.visible \
ORDER BY time ASC', course['id'])
for item in items: for item in items:
item['updated'] = max(item['video']['time_created'], item['video']['time_updated'], item['time_created'], item['time_updated'], key=fixdate) item['updated'] = max(item['video']['time_created'], item['video']['time_updated'], item['time_created'], item['time_updated'], key=fixdate)
return Response(render_template('feed.rss', course=course, format=fmt, formats=formats, items=items, chapters=chapters), 200, {'Content-Type': 'application/rss+xml; charset=UTF-8'}) return Response(
render_template('feed.rss', course=course, format=fmt, formats=formats, items=items, chapters=chapters),
200,
{'Content-Type': 'application/rss+xml; charset=UTF-8'})
@app.route('/courses/feed') @app.route('/courses/feed')
def courses_feed(): def courses_feed():
......
from server import * from datetime import timedelta, datetime
from ipaddress import ip_address, ip_network
import icalendar import icalendar
from werkzeug.datastructures import Headers from werkzeug.datastructures import Headers
from datetime import timedelta, datetime
from server import *
def export_lectures(lectures, responsible, name): def export_lectures(lectures, responsible, name):
cal = icalendar.Calendar() cal = icalendar.Calendar()
cal.add('prodid', '-//Video AG//rwth.video//') cal.add('prodid', '-//Video AG//rwth.video//')
cal.add('version', '1.0') cal.add('version', '1.0')
for l in lectures: for lecture in lectures:
resp = [] resp = []
for r in responsible: for r in responsible: #pylint: disable=invalid-name
if r['course_id'] == l['course_id']: if r['course_id'] == lecture['course_id']:
resp.append(r['realname']) resp.append(r['realname'])
event = icalendar.Event() event = icalendar.Event()
event.add('summary', l['course']['short']+': '+l['title']) event.add('summary', lecture['course']['short']+': '+lecture['title'])
event.add('description', '\n\n'.join([s for s in [ event.add('description', '\n\n'.join([s for s in [
l['comment'], lecture['comment'],
l['internal'], lecture['internal'],
'Zuständig: '+', '.join(resp) if resp else '' 'Zuständig: '+', '.join(resp) if resp else ''
] if s])) ] if s]))
event.add('uid', '%i@rwth.video'%l['id']) event.add('uid', '%i@rwth.video'%lecture['id'])
event.add('dtstamp', datetime.utcnow()) event.add('dtstamp', datetime.utcnow())
event.add('categories', l['course']['short']) event.add('categories', lecture['course']['short'])
event.add('dtstart', l['time']) event.add('dtstart', lecture['time'])
event.add('location', l['place']) event.add('location', lecture['place'])
event.add('dtend', l['time'] + timedelta(minutes=l['duration'])) event.add('dtend', lecture['time'] + timedelta(minutes=lecture['duration']))
cal.add_component(event) cal.add_component(event)
h = Headers() headers = Headers()
h.add_header("Content-Disposition", "inline", filename=name) headers.add_header("Content-Disposition", "inline", filename=name)
return Response(cal.to_ical(), mimetype="text/calendar", headers=h) return Response(cal.to_ical(), mimetype="text/calendar", headers=headers)
def calperm(func): def calperm(func):
@wraps(func) @wraps(func)
...@@ -45,7 +47,6 @@ def calperm(func): ...@@ -45,7 +47,6 @@ def calperm(func):
permission = True permission = True
if permission: if permission:
return func(*args, **kwargs) return func(*args, **kwargs)
else:
return Response("Login required", 401, {'WWW-Authenticate': 'Basic realm="FS-Login required"'}) return Response("Login required", 401, {'WWW-Authenticate': 'Basic realm="FS-Login required"'})
return decorator return decorator
...@@ -56,7 +57,7 @@ def get_responsible(): ...@@ -56,7 +57,7 @@ def get_responsible():
@app.route('/internal/ical/all') @app.route('/internal/ical/all')
@calperm @calperm
def ical_all(): def ical_all():
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.* return export_lectures(query('''SELECT lectures.*, 'course' AS sep, courses.*
FROM lectures JOIN courses ON courses.id = lectures.course_id FROM lectures JOIN courses ON courses.id = lectures.course_id
WHERE NOT norecording AND NOT external WHERE NOT norecording AND NOT external
ORDER BY time DESC LIMIT ?''', request.values.get('limit', 1000)), ORDER BY time DESC LIMIT ?''', request.values.get('limit', 1000)),
...@@ -66,7 +67,7 @@ def ical_all(): ...@@ -66,7 +67,7 @@ def ical_all():
@calperm @calperm
def ical_user(user): def ical_user(user):
username = query('SELECT name FROM users WHERE users.id = ?', user)[0]['name'] username = query('SELECT name FROM users WHERE users.id = ?', user)[0]['name']
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.* return export_lectures(query('''SELECT lectures.*, 'course' AS sep, courses.*
FROM lectures FROM lectures
JOIN courses ON courses.id = lectures.course_id JOIN courses ON courses.id = lectures.course_id
JOIN responsible ON responsible.course_id = courses.id JOIN responsible ON responsible.course_id = courses.id
...@@ -78,7 +79,7 @@ def ical_user(user): ...@@ -78,7 +79,7 @@ def ical_user(user):
@calperm @calperm
def ical_notuser(user): def ical_notuser(user):
username = query('SELECT name FROM users WHERE users.id = ?', user)[0]['name'] username = query('SELECT name FROM users WHERE users.id = ?', user)[0]['name']
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.* return export_lectures(query('''SELECT lectures.*, 'course' AS sep, courses.*
FROM lectures FROM lectures
JOIN courses ON courses.id = lectures.course_id JOIN courses ON courses.id = lectures.course_id
LEFT JOIN responsible ON (responsible.course_id = courses.id AND responsible.user_id = ?) LEFT JOIN responsible ON (responsible.course_id = courses.id AND responsible.user_id = ?)
...@@ -89,7 +90,7 @@ def ical_notuser(user): ...@@ -89,7 +90,7 @@ def ical_notuser(user):
@app.route('/internal/ical/course/<course>') @app.route('/internal/ical/course/<course>')
@calperm @calperm
def ical_course(course): def ical_course(course):
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.* return export_lectures(query('''SELECT lectures.*, 'course' AS sep, courses.*
FROM lectures JOIN courses ON courses.id = lectures.course_id FROM lectures JOIN courses ON courses.id = lectures.course_id
WHERE courses.handle = ? AND NOT norecording AND NOT external ORDER BY time DESC''', course), WHERE courses.handle = ? AND NOT norecording AND NOT external ORDER BY time DESC''', course),
get_responsible(), 'videoag_%s.ics'%course) get_responsible(), 'videoag_%s.ics'%course)
import urllib.request
import urllib.parse
from server import * from server import *
@app.route('/internal/import/<int:id>', methods=['GET', 'POST']) @app.route('/internal/import/<int:id>', methods=['GET', 'POST'])
...@@ -15,39 +18,28 @@ def list_import_sources(id): ...@@ -15,39 +18,28 @@ def list_import_sources(id):
for i in campus: for i in campus:
if i.startswith('new'): if i.startswith('new'):
if campus[i]['url'] != '': if campus[i]['url'] != '':
modify('INSERT INTO import_campus (url, type, course_id, last_checked, changed) VALUES (?, ?, ?, ?, 1)',campus[i]['url'],campus[i]['type'],id,datetime.now()) modify('INSERT INTO import_campus (url, type, course_id, last_checked, changed) VALUES (?, ?, ?, ?, 1)',
campus[i]['url'], campus[i]['type'], id, datetime.now())
else: else:
# info: sql no test cover
if campus[i]['url'] != '': if campus[i]['url'] != '':
query('UPDATE import_campus SET url = ?, `type` = ? WHERE (course_id = ?) AND (id = ?)', campus[i]['url'],campus[i]['type'],id,int(i)) query('UPDATE import_campus SET url = ?, "type" = ? WHERE (course_id = ?) AND (id = ?)', campus[i]['url'], campus[i]['type'], id, int(i))
else: else:
query('DELETE FROM import_campus WHERE (id = ?) AND (course_id = ?)', int(i), id) query('DELETE FROM import_campus WHERE (id = ?) AND (course_id = ?)', int(i), id)
import_campus = query('SELECT * FROM import_campus WHERE course_id = ?', id) import_campus = query('SELECT * FROM import_campus WHERE course_id = ?', id)
return render_template('import_campus.html', course=courses, import_campus=import_campus, events=[]) return render_template('import_campus.html', course=courses, import_campus=import_campus, events=[])
@app.route('/internal/import/<int:id>/now', methods=['GET', 'POST']) def fetch_co_course_events(i):
@mod_required # pylint: disable=too-many-locals,too-many-branches,too-many-statements,invalid-name,bare-except
def import_from(id): from lxml import html # pylint: disable=import-outside-toplevel
courses = query('SELECT * FROM courses WHERE id = ?', id)[0]
lectures = query('SELECT * FROM lectures WHERE course_id = ?', courses['id'])
import_campus = query('SELECT * FROM import_campus WHERE course_id = ?',id)
events = [] events = []
try:
from lxml import html
from lxml import etree
import urllib.request
# if u have to port this to anything new, god be with you.
for i in import_campus:
try: try:
remote_html = urllib.request.urlopen(i['url']).read() remote_html = urllib.request.urlopen(i['url']).read()
except: except:
flash("Ungültige URL: '"+i['url']+"'") flash("Ungültige URL: '"+i['url']+"'")
tablexpath = "//td[text()='Termine und Ort']/following::table[1]" tablexpath = "//td[text()='Termine und Ort']/following::table[1]"
basetable = html.fromstring(remote_html).xpath(tablexpath)[0] basetable = html.fromstring(remote_html).xpath(tablexpath)[0]
parsebase = html.tostring(basetable);
#parse recurring events #parse recurring events
toparse = [i['url']] toparse = [i['url']]
...@@ -108,7 +100,9 @@ def import_from(id): ...@@ -108,7 +100,9 @@ def import_from(id):
e['duration'] = int((datetime.strptime("%s %s"%(k, j['end']), fmt) - e['time']).seconds/60) e['duration'] = int((datetime.strptime("%s %s"%(k, j['end']), fmt) - e['time']).seconds/60)
j['place'] = str(j['place']) j['place'] = str(j['place'])
if j['place'] != '': if j['place'] != '':
dbplace = query("SELECT name FROM places WHERE (campus_room = ?) OR (campus_name = ?) OR ((NOT campus_name) AND name = ?)",j['place'],j['place'],j['place']) # info: sql no test cover
dbplace = query("SELECT name FROM places WHERE (campus_room = ?) OR (campus_name = ?) OR (campus_name = '' AND name = ?)",
j['place'], j['place'], j['place'])
if dbplace: if dbplace:
e['place'] = dbplace[0]['name'] e['place'] = dbplace[0]['name']
else: else:
...@@ -118,11 +112,85 @@ def import_from(id): ...@@ -118,11 +112,85 @@ def import_from(id):
e['title'] = i['type'] e['title'] = i['type']
events.append(e) events.append(e)
# it is parsed. # it is parsed.
return events
def fetch_ro_event_ical(ids):
data = {'pMode': 'T', 'pInclPruef': 'N', 'pInclPruefGepl': 'N', 'pOutputFormat': '99', 'pCharset': 'UTF8', 'pMaskAction': 'DOWNLOAD'}
data = list(data.items())
for id in ids:
data.append(('pTerminNr', id))
data = urllib.parse.urlencode(data).encode('utf-8')
req = urllib.request.Request('https://online.rwth-aachen.de/RWTHonline/pl/ui/%24ctx/wbKalender.wbExport',
data=data, method='POST')
with urllib.request.urlopen(req) as f:
return f.read().decode('utf-8')
def fetch_ro_course_ical(id):
# pylint: disable=import-outside-toplevel
from lxml import html
url = 'https://online.rwth-aachen.de/RWTHonline/pl/ui/%24ctx/wbTermin_List.wbLehrveranstaltung?pStpSpNr='+'%i'%(int(id))
req = urllib.request.urlopen(url)
dom = html.fromstring(req.read())
event_ids = [x.value for x in dom.xpath('//input[@name="pTerminNr"]')]
return fetch_ro_event_ical(event_ids)
def fetch_ro_course_events(item):
# pylint: disable=import-outside-toplevel
import icalendar
import pytz
localtz = pytz.timezone('Europe/Berlin')
# First fix crappy javascript fragment-Paths
url = urllib.parse.urlparse(item['url'].replace('#/', ''))
args = urllib.parse.parse_qs(url.query)
if 'pStpSpNr' in args: # Legacy URLs
id = args['pStpSpNr'][0]
elif len(url.path.split('/')) > 1 and url.path.split('/')[-2] == 'courses': # New URLs
id = url.path.split('/')[-1]
else:
flash("Ungültige URL: '"+url.geturl()+"'")
return [] #cant get events from wrong URL so just return empty list
cal = icalendar.Calendar().from_ical(fetch_ro_course_ical(id))
events = []
for comp in cal.subcomponents:
if comp.name != 'VEVENT':
continue
if comp.get('STATUS') != 'CONFIRMED':
continue
event = {}
place = str(comp.get('LOCATION', ''))
if place:
campus_room = place.split('(')[-1].split(')')[0]
# info: sql no test cover
dbplace = query('SELECT name FROM places WHERE campus_room = ?', campus_room)
if dbplace:
event['place'] = dbplace[0]['name']
else:
event['place'] = 'Unbekannter Ort ('+place+')'
else:
event['place'] = ''
event['time'] = comp['DTSTART'].dt.astimezone(localtz).replace(tzinfo=None)
event['duration'] = int((comp['DTEND'].dt - comp['DTSTART'].dt).seconds/60)
event['title'] = item['type']
events.append(event)
return events
@app.route('/internal/import/<int:id>/now', methods=['GET', 'POST'])
@mod_required
def import_from(id):
# pylint: disable=too-many-branches
courses = query('SELECT * FROM courses WHERE id = ?', id)[0]
lectures = query('SELECT * FROM lectures WHERE course_id = ?', courses['id'])
import_campus = query('SELECT * FROM import_campus WHERE course_id = ?', id)
events = []
try:
# if u have to port this to anything new, god be with you.
for i in import_campus:
if 'www.campus.rwth-aachen.de' in i['url']:
events += fetch_co_course_events(i)
else:
events += fetch_ro_course_events(i)
except ImportError: except ImportError:
flash('python-lxml not found, campus import will not work.') flash('python-lxml or python-pytz not found, campus and ro import will not work!')
# events to add # events to add
newevents = [] newevents = []
......
from server import modify, query, date_json_handler, sched_func, notify_admins
from datetime import datetime, timedelta from datetime import datetime, timedelta
import traceback import traceback
import json import json
job_handlers = {} from server import modify, query, date_json_handler, sched_func, notify_admins
job_handlers = {} #pylint: disable=invalid-name
def job_handler(*types, state='finished'): def job_handler(*types, state='finished'):
def wrapper(func): def wrapper(func):
for jobtype in types: for jobtype in types:
...@@ -21,25 +22,25 @@ def job_handler_handle(id, state): ...@@ -21,25 +22,25 @@ def job_handler_handle(id, state):
for func in job_handlers.get(type, {}).get(state, []): for func in job_handlers.get(type, {}).get(state, []):
try: try:
func(id, job['type'], json.loads(job['data']), state, json.loads(job['status'])) func(id, job['type'], json.loads(job['data']), state, json.loads(job['status']))
except Exception: except Exception: #pylint: disable=broad-except
notify_admins('scheduler_exception', name=func.__name__, traceback=traceback.format_exc()) notify_admins('scheduler_exception', name=func.__name__, traceback=traceback.format_exc())
traceback.print_exc() traceback.print_exc()
@sched_func(10) @sched_func(10)
def job_catch_broken(): def job_catch_broken():
# scheduled but never pinged # scheduled but never pinged
query('BEGIN') modify("BEGIN")
query('UPDATE jobs SET state="ready" WHERE state="scheduled" and time_scheduled < ?', datetime.now() - timedelta(seconds=10)) query('UPDATE jobs SET state=\'ready\' WHERE state=\'scheduled\' and time_scheduled < ?', datetime.now() - timedelta(seconds=10))
try: try:
query('COMMIT') modify("COMMIT")
except: except: #pylint: disable=bare-except
pass pass
# no pings since 60s # no pings since 60s
query('BEGIN') modify("BEGIN")
query('UPDATE jobs SET state="failed" WHERE state="running" and last_ping < ?', datetime.now() - timedelta(seconds=60)) query('UPDATE jobs SET state=\'failed\' WHERE state=\'running\' and last_ping < ?', datetime.now() - timedelta(seconds=60))
try: try:
query('COMMIT') modify("COMMIT")
except: except: #pylint: disable=bare-except
pass pass
def job_set_state(id, state): def job_set_state(id, state):
...@@ -49,14 +50,16 @@ def schedule_job(jobtype, data=None, priority=0, queue="default"): ...@@ -49,14 +50,16 @@ def schedule_job(jobtype, data=None, priority=0, queue="default"):
if not data: if not data:
data = {} data = {}
return modify('INSERT INTO jobs (type, priority, queue, data, time_created) VALUES (?, ?, ?, ?, ?)', return modify('INSERT INTO jobs (type, priority, queue, data, time_created) VALUES (?, ?, ?, ?, ?)',
jobtype, priority, queue, json.dumps(data, default=date_json_handler), datetime.now()) jobtype, priority, queue, json.dumps(data, default=date_json_handler), datetime.now(),
get_id=True)
def cancel_job(job_id): def cancel_job(job_id):
query('UPDATE jobs SET state = "deleted" WHERE id = ? AND state = "ready"', job_id) query('UPDATE jobs SET state = \'deleted\' WHERE id = ? AND state = \'ready\'', job_id)
query('UPDATE jobs SET canceled = 1 WHERE id = ?', job_id) query('UPDATE jobs SET canceled = true WHERE id = ?', job_id)
def restart_job(job_id, canceled=False): def restart_job(job_id, canceled=False):
# info: sql no test cover
if canceled: if canceled:
query('UPDATE jobs SET state = "ready", canceled = 0 WHERE id = ? AND state = "failed"', job_id) query('UPDATE jobs SET state = \'ready\', canceled = false WHERE id = ? AND state = \'failed\'', job_id)
else: else:
query('UPDATE jobs SET state = "ready" WHERE id = ? AND state = "failed" AND NOT canceled', job_id) query('UPDATE jobs SET state = \'ready\' WHERE id = ? AND state = \'failed\' AND NOT canceled', job_id)
from server import *
import json import json
import random import random
import math
from time import sleep from time import sleep
from server import *
@app.route('/internal/jobs/overview') @app.route('/internal/jobs/overview')
@register_navbar('Jobs', iconlib='fa', icon='suitcase', group='weitere') @register_navbar('Jobs', iconlib='fa', icon='suitcase', group='weitere')
@mod_required @mod_required
def jobs_overview(): def jobs_overview():
if 'page' in request.args: page = max(0, int(request.args.get('page', 0)))
page = max(0, int(request.args['page'])) pagesize = min(500, int(request.args.get('pagesize', 50)))
else:
page = 0
if 'pagesize' in request.args:
pagesize = min(500, int(request.args['pagesize']))
else:
pagesize = 50
worker = query('SELECT * FROM worker ORDER BY last_ping DESC') worker = query('SELECT * FROM worker ORDER BY last_ping DESC')
# get filter options # get filter options
...@@ -30,9 +25,36 @@ def jobs_overview(): ...@@ -30,9 +25,36 @@ def jobs_overview():
'state': request.args.get('state', 'failed'), 'state': request.args.get('state', 'failed'),
'worker': request.args.get('worker', '%')} 'worker': request.args.get('worker', '%')}
pagecount = math.ceil(query('SELECT count(id) as count FROM jobs WHERE (type like ?) AND (worker like ? OR (worker IS NULL AND ? = "%")) AND (state like ?)', filter['type'], filter['worker'], filter['worker'], filter['state'])[0]['count']/pagesize) condition_values = []
jobs = query('SELECT * FROM jobs WHERE (type like ?) AND (worker like ? OR (worker IS NULL AND ? = "%")) AND (state like ?) ORDER BY `time_created` DESC LIMIT ? OFFSET ?', filter['type'], filter['worker'], filter['worker'], filter['state'], pagesize, page*pagesize) if filter['worker'] == '%':
return render_template('jobs_overview.html',worker=worker,jobs=jobs, filter_values=filter_values, filter=filter, page=page, pagesize=pagesize, pagecount=pagecount) condition = 'WHERE (type like ?) AND (state like ?)'
condition_values.extend([filter['type'], filter['state']])
else:
condition = 'WHERE (type like ?) AND (worker like ?) AND (state like ?)'
condition_values.extend([filter['type'], filter['worker'], filter['state']])
pagecount = math.ceil(query(f'SELECT count(id) as count FROM jobs {condition}',
*condition_values)[0]['count']/pagesize)
jobs = query(f'SELECT * FROM jobs \
{condition} \
ORDER BY "time_created" DESC LIMIT ? OFFSET ?',
*[*condition_values, pagesize, page*pagesize])
active_streams = query('SELECT lectures.*, \'course\' AS sep, courses.*, \'job\' AS sep, jobs.* FROM lectures \
JOIN courses ON (courses.id = lectures.course_id) \
JOIN jobs ON (jobs.id = lectures.stream_job) WHERE lectures.stream_job IS NOT NULL')
for stream in active_streams:
try:
stream['destbase'] = json.loads((stream['job']['data'] or '{}')).get('destbase')
except: #pylint: disable=bare-except
pass
return render_template('jobs_overview.html',
worker=worker,
jobs=jobs,
filter_values=filter_values,
filter=filter,
page=page,
pagesize=pagesize,
pagecount=pagecount,
active_streams=active_streams)
@app.route('/internal/jobs/action/<action>', methods=['GET', 'POST']) @app.route('/internal/jobs/action/<action>', methods=['GET', 'POST'])
@app.route('/internal/jobs/action/<action>/<jobid>', methods=['GET', 'POST']) @app.route('/internal/jobs/action/<action>/<jobid>', methods=['GET', 'POST'])
...@@ -40,72 +62,65 @@ def jobs_overview(): ...@@ -40,72 +62,65 @@ def jobs_overview():
@csrf_protect @csrf_protect
def jobs_action(action, jobid=None): def jobs_action(action, jobid=None):
if action == 'clear_failed': if action == 'clear_failed':
query('UPDATE jobs SET state = "deleted" WHERE state = "failed" AND (id = ? OR ? IS NULL)', jobid, jobid) if jobid:
query('UPDATE jobs SET state = \'deleted\' WHERE state = \'failed\' AND id = ?', jobid)
else:
query('UPDATE jobs SET state = \'deleted\' WHERE state = \'failed\'')
elif action == 'retry_failed': elif action == 'retry_failed':
query('UPDATE jobs SET state = "ready", canceled = 0 WHERE state = "failed" AND (id = ? OR ? IS NULL)', jobid, jobid) if jobid:
query('UPDATE jobs SET state = \'ready\', canceled = false WHERE state = \'failed\' AND id = ?', jobid)
else:
query('UPDATE jobs SET state = \'ready\', canceled = false WHERE state = \'failed\'')
elif action == 'copy' and jobid: elif action == 'copy' and jobid:
query("INSERT INTO jobs (type, priority, queue, state, data, time_created) SELECT type, priority, queue, 'ready', data, ? FROM jobs where id = ?", datetime.now(), jobid) query("INSERT INTO jobs (type, priority, queue, state, data, time_created) \
SELECT type, priority, queue, 'ready', data, ? FROM jobs where id = ?",
datetime.now(), jobid)
elif action == 'delete' and jobid: elif action == 'delete' and jobid:
query('UPDATE jobs SET state = "deleted" WHERE id = ?', jobid) query('UPDATE jobs SET state = \'deleted\' WHERE id = ?', jobid)
elif action == 'cancel' and jobid: elif action == 'cancel' and jobid:
cancel_job(jobid) cancel_job(jobid)
return redirect(request.values.get('ref', url_for('jobs_overview'))) return redirect(request.values.get('ref', url_for('jobs_overview')))
def jobs_api_token_required(func):
@wraps(func)
def decorator(*args, **kwargs):
if 'apikey' in request.values:
token = request.values['apikey']
elif request.get_json() and ('apikey' in request.get_json()):
token = request.get_json()['apikey']
else:
token = None
if not token == config.get('JOBS_API_KEY', [None]):
return 'Permission denied', 403
else:
return func(*args, **kwargs)
return decorator
@app.route('/internal/jobs/api/job/<int:id>/ping', methods=['GET', 'POST']) @app.route('/internal/jobs/api/job/<int:id>/ping', methods=['GET', 'POST'])
@jobs_api_token_required @api_token_required('JOBS_API_KEY')
def jobs_ping(id): def jobs_ping(id):
hostname = request.values['host'] hostname = request.values['host']
status = json.dumps(json.loads(request.values['status']), default=date_json_handler) status = json.dumps(json.loads(request.values['status']), default=date_json_handler)
state = request.values['state'] state = request.values['state']
if state == 'finished': if state == 'finished':
query('UPDATE jobs SET time_finished = ?, status = ?, state = "finished" where id = ?', datetime.now(), status, id) query('UPDATE jobs SET time_finished = ?, status = ?, state = \'finished\' where id = ?', datetime.now(), status, id)
else: else:
query('UPDATE jobs SET worker = ?, last_ping = ?, status = ?, state = ? where id = ?', hostname, datetime.now(), status, state, id) query('UPDATE jobs SET worker = ?, last_ping = ?, status = ?, state = ? where id = ?', hostname, datetime.now(), status, state, id)
job_handler_handle(id, state) job_handler_handle(id, state)
job = query('SELECT * FROM jobs WHERE id = ?', id, nlfix=False)[0] job = query('SELECT * FROM jobs WHERE id = ?', id, nlfix=False)[0]
if job['canceled']: if job['canceled']:
return 'Job canceled', 205 return 'Job canceled', 205
else:
return 'OK', 200 return 'OK', 200
@app.route('/internal/jobs/api/worker/<hostname>/schedule', methods=['POST']) @app.route('/internal/jobs/api/worker/<hostname>/schedule', methods=['POST'])
@jobs_api_token_required @api_token_required('JOBS_API_KEY')
def jobs_schedule(hostname): def jobs_schedule(hostname):
query('REPLACE INTO worker (hostname, last_ping) values (?, ?)', hostname, datetime.now()) if query("SELECT hostname FROM worker WHERE hostname = ?", hostname):
query("UPDATE worker SET last_ping = ? WHERE hostname = ?", datetime.now(), hostname)
else:
query("INSERT INTO worker (hostname, last_ping) VALUES (?, ?)", hostname, datetime.now())
hostdata = request.get_json() hostdata = request.get_json()
if not hostdata: if not hostdata:
return 'no hostdata sent', 400 return 'no hostdata sent', 400
job = None job = None
tries = 0 tries = 0
jobtypes = hostdata['jobtypes'] if 'jobtypes' in hostdata else [] while not job:
while (not job):
try: try:
query("BEGIN") modify("BEGIN")
for i in query('SELECT * FROM jobs WHERE state = "ready" ORDER BY priority DESC'): for i in query('SELECT * FROM jobs WHERE state = \'ready\' ORDER BY priority DESC'):
if i['type'] in hostdata['jobtypes'] and i['queue'] in hostdata['queues']: if i['type'] in hostdata['jobtypes'] and i['queue'] in hostdata['queues']:
job = i job = i
break break
if not job: if not job:
return 'no jobs', 503 return 'no jobs', 503
modify('UPDATE jobs SET state="scheduled", worker = ?, time_scheduled = ? WHERE id = ?', hostname, datetime.now(), job['id']) modify('UPDATE jobs SET state=\'scheduled\', worker = ?, time_scheduled = ? WHERE id = ?', hostname, datetime.now(), job['id'])
query("COMMIT") modify("COMMIT")
except: except: #pylint: disable=bare-except
tries += 1 tries += 1
job = None job = None
sleep(random.random()) sleep(random.random())
...@@ -121,4 +136,3 @@ def add_forward_job(): ...@@ -121,4 +136,3 @@ def add_forward_job():
schedule_job('live_forward', {'src': request.values['src'], schedule_job('live_forward', {'src': request.values['src'],
'dest': request.values['dest'], 'format': 'flv'}, priority=9) 'dest': request.values['dest'], 'format': 'flv'}, priority=9)
return redirect(request.values.get('ref', url_for('jobs_overview'))) return redirect(request.values.get('ref', url_for('jobs_overview')))
from server import *
import requests import requests
L2P_BASE = 'https://www3.elearning.rwth-aachen.de/_vti_bin/l2pservices/api.svc/v1/' from server import *
OAUTH_BASE = 'https://oauth.campus.rwth-aachen.de/oauth2waitress/oauth2.svc/' OAUTH_BASE = 'https://oauth.campus.rwth-aachen.de/oauth2waitress/oauth2.svc/'
MOODLE_BASE = 'https://moped.ecampus.rwth-aachen.de/proxy/api/v2/eLearning/Moodle/'
def l2pget(endpoint, token, **args): def moodleget(endpoint, token, **args):
args['accessToken'] = token args['token'] = token
r = requests.request('GET', L2P_BASE+endpoint, params=args) r = requests.request('GET', MOODLE_BASE+endpoint, params=args)
return r.json() return r.json()
def oauthget(endpoint, **args): def oauthget(endpoint, **args):
...@@ -16,13 +17,21 @@ def oauthget(endpoint, **args): ...@@ -16,13 +17,21 @@ def oauthget(endpoint, **args):
@app.route('/internal/l2pauth') @app.route('/internal/l2pauth')
def start_l2pauth(): def start_l2pauth():
return "L2P is no longer available."
@app.route('/internal/moodleauth')
def start_moodleauth():
if 'L2P_APIKEY' not in config: if 'L2P_APIKEY' not in config:
return render_template("500.html"), 500 return render_template("500.html"), 500
code = oauthget('code', scope='l2p2013.rwth') code = oauthget('code', scope='moodle.rwth')
session['oauthcode'] = code['device_code'] session['oauthcode'] = code['device_code']
session['oauthscope'] = 'l2p' session['oauthscope'] = 'moodle'
return redirect(code['verification_url']+'?q=verify&d='+code['user_code']) return redirect(code['verification_url']+'?q=verify&d='+code['user_code'])
@app.route('/internal/moodlel2pauth')
def start_moodlel2pauth():
return start_moodleauth()
@app.route('/internal/rwthauth') @app.route('/internal/rwthauth')
def start_rwthauth(): def start_rwthauth():
if 'L2P_APIKEY' not in config: if 'L2P_APIKEY' not in config:
...@@ -42,12 +51,17 @@ def finish_oauth(): ...@@ -42,12 +51,17 @@ def finish_oauth():
if token.get('status') != 'ok': if token.get('status') != 'ok':
return return
del session['oauthcode'] del session['oauthcode']
if session['oauthscope'] not in ['l2p', 'rwth']:
if session['oauthscope'] not in ['l2p', 'rwth', 'moodle', 'l2pandmoodle']:
return return
session['rwthintern'] = True session['rwthintern'] = True
if session['oauthscope'] == 'l2p': if session['oauthscope'] == 'moodle' or session['oauthscope'] == 'l2pandmoodle':
session['l2p_courses'] = [] data = moodleget('getmyenrolledcourses', token['access_token'])
for course in l2pget('viewAllCourseInfo', token['access_token'])['dataSet']: if data and data.get('Data'):
session['l2p_courses'].append(course['uniqueid']) session['moodle_courses'] = []
for course in data['Data']:
session['moodle_courses'].append(str(course['id']))
else:
notify_admins('endpoint_exception', traceback="finish_oauth failed while getting moodle courses, data={}".format(str(data)))
del session['oauthscope'] del session['oauthscope']
oauthget('token', refresh_token=token['refresh_token'], grant_type='invalidate') oauthget('token', refresh_token=token['refresh_token'], grant_type='invalidate')