Skip to content
Snippets Groups Projects
Commit 3708917f authored by Dorian Koch's avatar Dorian Koch
Browse files

Move source files into ./src, use config file for uwsgi

parent 21f3bf91
No related branches found
No related tags found
No related merge requests found
import json
from server import *
@job_handler('probe', 'probe-raw')
def import_xmp_chapters(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
if 'lecture_id' not in data or not data.get('import-chapters', False):
return
times = set()
# Only add new chapters, deleted chapters are taken into account here
for chapter in query('SELECT * FROM chapters WHERE lecture_id = ?', data['lecture_id']):
for offset in range(5):
times.add(chapter['time']-offset)
times.add(chapter['time']+offset)
for chapter in status.get('xmp_chapters', []):
if int(chapter['time']) in times:
continue
modify(
'INSERT INTO chapters (lecture_id, time, text, visible, time_created, time_updated) VALUES (?, ?, ?, 0, ?, ?)',
data['lecture_id'], int(chapter['time']), chapter['text'],
datetime.now(), datetime.now()
)
@app.route('/internal/newchapter/<int:lectureid>', methods=['POST', 'GET'])
def suggest_chapter(lectureid):
text = request.values['text']
assert 'time' in request.values and 'text' in request.values
try:
parsed_datetime = datetime.strptime(request.values['time'], '%H:%M:%S')
chapter_start = int(timedelta(hours=parsed_datetime.hour, minutes=parsed_datetime.minute, seconds=parsed_datetime.second).total_seconds())
except ValueError:
if 'ref' in request.values:
flash('Falsches Zeitformat, "%H:%M:%S" wird erwartet. Z.B. "01:39:42" für eine Kapitel bei Stunde 1, Minute 39, Sekunde 42')
return redirect(request.values['ref'])
return 'Wrong time format, "%H:%M:%S" is expected', 400
submitter = None
if not ismod():
submitter = request.environ['REMOTE_ADDR']
lecture = query('SELECT * FROM lectures WHERE id = ?', lectureid)[0]
course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0]
id = modify(
'INSERT INTO chapters (lecture_id, time, text, time_created, time_updated, created_by, submitted_by) VALUES (?, ?, ?, ?, ?, ?, ?)',
lectureid, chapter_start, text, datetime.now(), datetime.now(), session.get('user', {'dbid':None})['dbid'], submitter
)
chapter = query('SELECT * FROM chapters WHERE id = ?', id)[0]
if not ismod():
notify_mods('chapter_submitted', course['id'], course=course, lecture=lecture, chapter=chapter)
if 'ref' in request.values:
return redirect(request.values['ref'])
return 'OK', 200
@app.route('/internal/chapters/<int:lectureid>')
def chapters(lectureid):
chapters = query("SELECT * FROM chapters WHERE lecture_id = ? AND NOT deleted AND (visible OR ?) ORDER BY time DESC", lectureid, ismod())
if not chapters:
return 'No chapters found', 404
last = None
for chapter in chapters:
chapter['start'] = chapter['time']
chapter['end'] = last['start'] if last else 9999 #pylint: disable=unsubscriptable-object
last = chapter
if 'json' in request.values:
return Response(json.dumps([{'time': c['time'], 'text': c['text']} for c in chapters]), mimetype='application/json')
return Response(render_template('chapters.srt', chapters=chapters), 200, {'Content-Type':'text/vtt'})
# Defaults for development ,do not use in production!
DEBUG = False
SERVER_IP = 'localhost'
VIDEOPREFIX = 'https://videoag.fsmpi.rwth-aachen.de'
VIDEOMOUNT = [{'mountpoint': 'files/protected/', 'prefix':'protected/'},{'mountpoint':'files/pub/','prefix':'pub/' }, {'mountpoint':'files/vpnonline/','prefix':'vpnonline/' }]
#SECRET_KEY = 'something random'
DB_SCHEMA = 'db_schema.sql'
DB_DATA = 'db_example.sql'
#DB_ENGINE = 'mysql'
#MYSQL_HOST = 'localhost'
#MYSQL_USER = 'videoag'
#MYSQL_PORT = 3306
#MYSQL_UNIX = '/var/run/mysqld/mysqld.sock'
#MYSQL_PASSWD = 'somuchsecret'
#MYSQL_DB = 'videos'
DB_ENGINE = 'sqlite'
SQLITE_DB = 'db.sqlite'
SQLITE_INIT_SCHEMA = True
SQLITE_INIT_DATA = True
#JOBS_API_KEY = 'something random'
#LDAP_HOST = 'auth.fsmpi.rwth-aachen.de'
LDAP_PORT = 636
LDAP_GROUPS = ['fachschaft']
#ICAL_URL = 'https://user:password@mail.fsmpi.rwth-aachen.de/SOGo/....ics'
ERROR_PAGE = 'static/500.html'
RWTH_IP_RANGES = ['134.130.0.0/16', '137.226.0.0/16', '134.61.0.0/16', '192.35.229.0/24', '2a00:8a60::/32']
FSMPI_IP_RANGES = ['137.226.35.192/29', '137.226.75.0/27', '137.226.127.32/27', '137.226.231.192/26', '134.130.102.0/26', '127.0.0.1/32']
INTERNAL_IP_RANGES = ['127.0.0.0/8', '192.168.155.0/24', 'fd78:4d90:6fe4::/48']
DISABLE_SCHEDULER = False
#MAIL_SERVER = 'mail.fsmpi.rwth-aachen.de'
MAIL_FROM = 'Video AG-Website <videoag-it@lists.fsmpi.rwth-aachen.de>'
#MAIL_ADDRESS_OVERWRITE = 'videoag-it@lists.fsmpi.rwth-aachen.de'
MAIL_SUFFIX = 'fsmpi.rwth-aachen.de'
MAIL_DEFAULT = 'Video AG <videoag@fsmpi.rwth-aachen.de>'
MAIL_ADMINS = 'videoag-it@lists.fsmpi.rwth-aachen.de'
STREAMING_SERVER = 'rtmp://video-web-0.fsmpi.rwth-aachen.de/src/'
BACKUP_STREAMING_SERVER = 'rtmp://video-web-1.fsmpi.rwth-aachen.de/src/'
from server import *
@register_navbar('Schnittfortschritt User', icon='spinner', iconlib='fa', userendpoint=True, endpoint='cutprogress_user')
@register_navbar('Schnittfortschritt', icon='spinner', iconlib='fa')
@app.route('/internal/cutprogress', endpoint="cutprogress")
@app.route('/internal/user/<int:user>/cutprogress', endpoint='cutprogress_user')
@mod_required
def cutprogress(user=None):
allsemester = query('SELECT DISTINCT semester from courses ORDER BY semester DESC')
semester = request.values.get('semester', allsemester[0]['semester'])
courses = query('''
SELECT courses.id, courses.handle, courses.short
FROM courses
WHERE semester = ?
ORDER by id DESC
''', semester)
# Fetch list of people responsible for every course
for course in courses:
people = query('''
SELECT users.*
FROM users
JOIN responsible ON responsible.user_id = users.id
WHERE responsible.course_id = ?
ORDER BY users.realname ASC
''', course['id'])
if not people:
people = [{'realname': 'Niemand', 'id': -1}]
course['responsible'] = people
if user is not None:
courses = [
c for c in courses
if user in (r['id'] for r in c['responsible'])
]
# Fetch lectures for all courses
lectures = []
for course in courses:
lectures += query('''
SELECT
lectures.id,
lectures.course_id,
lectures.time,
lectures.title,
COUNT(videos.id) as videos_total,
COUNT(videos.visible) as videos_visible
FROM lectures
JOIN courses ON courses.id = lectures.course_id
LEFT JOIN videos ON lectures.id = videos.lecture_id
WHERE courses.id = ?
AND lectures.time <= ?
AND NOT lectures.norecording
GROUP BY lectures.id
ORDER BY lectures.time ASC, lectures.id ASC
''', course['id'], datetime.now())
# Generate list of days, figure out when weeks change
dates = sorted({row['time'].date() for row in lectures}, reverse=True)
is_new_weeks = [
False if (i == 0) else thisdate.isocalendar()[1] != dates[i-1].isocalendar()[1]
for i, thisdate in enumerate(dates)
]
# Sort into cells
tablebody = [
{
'date': date, # row header
'is_new_week': is_new_week,
'cells': [ # this is the body of the row
[ # this list is a cell
lecture
for lecture in lectures
if lecture['course_id'] == course['id'] and lecture['time'].date() == date
]
for course in courses
]
}
for date, is_new_week in zip(dates, is_new_weeks)
]
return render_template('cutprogress.html',
# dropdown selection
allsemester=allsemester, # options
semester=semester, # choice
user=query('SELECT * FROM users WHERE id = ?', user)[0] if user else None,
# content
courses=courses,
tablebody=tablebody
)
import sqlite3
from flask import g
from server import *
if config['DB_ENGINE'] == 'sqlite':
# From sqlite3 module, but with error catching
def convert_timestamp(val):
try:
datepart, timepart = val.split(b" ")
year, month, day = map(int, datepart.split(b"-"))
timepart_full = timepart.split(b".")
hours, minutes, seconds = map(int, timepart_full[0].split(b":"))
val = datetime(year, month, day, hours, minutes, seconds, 0)
except ValueError:
val = datetime.fromtimestamp(0)
return val
sqlite3.register_converter('datetime', convert_timestamp)
sqlite3.register_converter('timestamp', convert_timestamp)
if config['DB_ENGINE'] == 'sqlite':
DBCREATED = not os.path.exists(config['SQLITE_DB'])
db = sqlite3.connect(config['SQLITE_DB'])
cur = db.cursor()
if config['SQLITE_INIT_SCHEMA']:
print('Init db schema')
cur.executescript(open(config['DB_SCHEMA']).read())
if config['SQLITE_INIT_DATA'] and DBCREATED:
print('Init db data')
cur.executescript(open(config['DB_DATA']).read())
db.commit()
db.close()
def get_dbcursor():
if 'db' not in g:
g.db = sqlite3.connect(config['SQLITE_DB'], detect_types=sqlite3.PARSE_DECLTYPES)
g.db.isolation_level = None
if not hasattr(request, 'db'):
request.db = g.db.cursor()
return request.db
def fix_query(operation, params):
params = [(p.replace(microsecond=0) if isinstance(p, datetime) else p) for p in params]
return operation, params
def show(operation, host=None): #pylint: disable=unused-argument
return {}
elif config['DB_ENGINE'] == 'mysql':
import mysql.connector
def get_dbcursor():
if 'db' not in g or not g.db.is_connected():
g.db = mysql.connector.connect(
user=config['MYSQL_USER'],
password=config['MYSQL_PASSWD'],
host=config.get('MYSQL_HOST', None),
port=config.get('MYSQL_PORT', 3306),
unix_socket=config.get('MYSQL_UNIX', None),
database=config['MYSQL_DB'])
if not hasattr(request, 'db'):
request.db = g.db.cursor()
return request.db
def fix_query(operation, params):
operation = operation.replace('?', '%s')
params = [(p.replace(microsecond=0) if isinstance(p, datetime) else p) for p in params]
return operation, params
def show(operation, host=config.get('MYSQL_HOST', None)):
if host:
db = mysql.connector.connect(user=config['MYSQL_USER'], password=config['MYSQL_PASSWD'], host=host, port=config.get('MYSQL_PORT', 3306))
else:
db = mysql.connector.connect(user=config['MYSQL_USER'], password=config['MYSQL_PASSWD'], unix_socket=config.get('MYSQL_UNIX', None))
cur = db.cursor()
cur.execute(operation)
rows = []
try:
rows = cur.fetchall()
except mysql.connector.errors.InterfaceError as e:
if e.msg == 'No result set to fetch from.':
# no problem, we were just at the end of the result set
pass
else:
raise
res = {}
for row in rows:
res[row[0]] = row[1]
cur.close()
db.close()
return res
def query(operation, *params, delim="sep", nlfix=True):
operation, params = fix_query(operation, params)
tries = 0
retry = True
while tries < 10 and retry:
retry = False
try:
cur = get_dbcursor()
cur.execute(operation, params)
except mysql.connector.errors.InternalError as e:
if e.msg == 'Deadlock found when trying to get lock; try restarting transaction':
tries += 1
retry = True
else:
raise
rows = []
try:
rows = cur.fetchall()
except mysql.connector.errors.InterfaceError as e:
if e.msg == 'No result set to fetch from.':
# no problem, we were just at the end of the result set
pass
else:
raise
res = []
for row in rows:
res.append({})
ptr = res[-1]
for col, desc in zip(row, cur.description):
name = desc[0].split('.')[-1].split(':')[0]
if name == delim:
ptr = res[-1][col] = {}
continue
if isinstance(col, str) and nlfix:
col = col.replace('\\n', '\n').replace('\\r', '\r')
ptr[name] = col
return res
def modify(operation, *params):
operation, params = fix_query(operation, params)
cur = get_dbcursor()
cur.execute(operation, params)
return cur.lastrowid
@app.teardown_request
def commit_db(*args): #pylint: disable=unused-argument
if hasattr(request, 'db'):
request.db.close()
g.db.commit()
@app.teardown_appcontext
def close_db(*args): #pylint: disable=unused-argument
if 'db' in g:
g.db.close()
del g.db
def searchquery(text, columns, match, tables, suffix, *suffixparams):
params = []
subexprs = []
words = text.split(' ')
prio = len(words)+1
for word in words:
if word == '' or word.isspace():
continue
matchexpr = ' OR '.join(['%s LIKE ?'%column for column in match])
subexprs.append('SELECT %s, %s AS _prio FROM %s WHERE %s'%(columns, str(prio), tables, matchexpr))
params += ['%'+word+'%']*len(match)
prio -= 1
if subexprs == []:
return []
expr = 'SELECT *,SUM(_prio) AS _score FROM (%s) AS _tmp %s'%(' UNION '.join(subexprs), suffix)
return query(expr, *(list(params)+list(suffixparams)))
......@@ -4,5 +4,5 @@
cd /code;
nginx -c nginx.conf.example -p . &
# Use -C argument to tell uwsgi to chmod 666 /uswgi.sock
exec uwsgi -C -s uwsgi.sock --manage-script-name --mount /=server:app --plugin python --enable-threads
exec uwsgi --ini uwsgi_videoag.ini
import math
from server import *
# field types:
# boolean
# shortstring
# text
# datetime
# duration
# videotime
editable_tables = { #pylint: disable=invalid-name
'courses': {
'table': 'courses_data',
'idcolumn': 'id',
'editable_fields': {
'visible': {'type': 'boolean', 'description': 'Wenn ein Kurs nicht sichtbar ist sind alle Videos davon nicht abrufbar.'},
'listed': {'type': 'boolean', 'description': 'Soll die Veranstaltung auf der Hauptseite gelistet werden?'},
'title': {'type': 'shortstring'},
'short': {'type': 'shortstring', 'description': 'Abkürzung für die Veranstaltung, z.B. für den Drehplan'},
'handle': {'type': 'shortstring'},
'organizer': {'type': 'shortstring'},
'subject': {'type': 'shortstring'},
'semester': {'type': 'shortstring'},
'downloadable': {'type': 'boolean', 'description': 'Hiermit kann der Download-Button disabled werden'},
'internal': {'type': 'text'},
'responsible': {'type': 'shortstring'},
'deleted': {'type': 'boolean'},
'description': {'type': 'text'},
'external': {'type': 'boolean', 'description': 'Soll die Veranstaltung nicht im Drehplan angezeigt werden?'},
'coursechapters': {'type': 'boolean', 'description': 'Sollen auf der Kursseite die Kapitelmarker der Videos angezeigt werden?'},
'autopublish': {'type': 'boolean', 'description': 'Sollen encodete Videos automatisch verschoben werden?'},
'autovisible': {'type': 'boolean', 'description': 'Sollen neue Videos automatisch sichtbar sein?'},
'login_info': {'type': 'text', 'description': 'Zusätliche Informationen, die dem Nutzer angezeigt werden, wenn er sich anmelden muss.'}
},
'creationtime_fields': ['created_by', 'time_created', 'time_updated']},
'lectures': {
'table': 'lectures_data',
'idcolumn': 'id',
'editable_fields': {
'visible': {'type': 'boolean', 'description': 'Wenn eine lecture nicht sichtbar ist sind alle Videos davon nicht abrufbar'},
'title': {'type': 'shortstring'},
'comment': {'type': 'text'},
'internal': {'type': 'text'},
'speaker': {'type': 'shortstring'},
'place': {'type': 'shortstring'},
'time': {'type': 'datetime'},
'duration': {'type': 'duration'},
'jumplist': {'type': ''},
'deleted': {'type': 'boolean'},
'live': {'type': 'boolean', 'description': 'Ist ein Livestream geplant? Muss gesetzt sein damit der RTMP Stream zugeordnet wird.'},
'norecording': {'type': 'boolean', 'description': 'Führt dazu, dass der Termin ausgegraut wird.'},
'stream_settings': {'type': 'text'}
},
'creationtime_fields': ['course_id', 'time_created', 'time_updated']},
'videos': {
'table': 'videos_data',
'idcolumn': 'id',
'editable_fields': {
'visible': {'type': 'boolean', 'description': 'Ein nicht sichtbares Video kann nicht abgerufen werden.'},
'deleted': {'type': 'boolean'}},
'creationtime_fields': ['created_by', 'time_created', 'time_updated']},
'chapters': {
'table': 'chapters',
'idcolumn': 'id',
'editable_fields': {
'time': {'type': 'videotime'},
'text': {'type': 'shortstring'},
'visible': {'type': 'boolean'},
'deleted': {'type': 'boolean'}},
'creationtime_fields': ['created_by', 'time_created', 'time_updated']},
'announcements': {
'table': 'announcements',
'idcolumn': 'id',
'editable_fields': {
'text': {'type': 'text'},
'level': {'type': 'integer'},
'visible': {'type': 'boolean'},
'deleted': {'type': 'boolean'},
'time_publish': {'type': 'datetime'},
'time_expire': {'type': 'datetime'}},
'creationtime_fields': ['created_by', 'time_created', 'time_updated']},
'featured': {
'table': 'featured',
'idcolumn': 'id',
'editable_fields': {
'title': {'type': 'shortstring'},
'text': {'type': 'text'},
'internal': {'type': 'text'},
'visible': {'type': 'boolean'},
'deleted': {'type': 'boolean'},
'param': {'type': 'shortstring'},
'param2': {'type': 'shortstring'},
'order': {'type': 'integer'}},
'creationtime_fields': ['created_by', 'time_created', 'time_updated', 'type']},
'perm': {
'table': 'perm',
'idcolumn': 'id',
'editable_fields': {
'type': {'type': 'shortstring'},
'param1': {'type': 'shortstring'},
'param2': {'type': 'shortstring'},
'deleted': {'type': 'boolean'}},
'creationtime_fields': ['course_id', 'lecture_id', 'video_id', 'created_by', 'time_created', 'time_updated']},
'sorterrorlog': {
'table': 'sorterrorlog_data',
'idcolumn': 'id',
'editable_fields': {
'deleted': {'type': 'boolean'}},
'creationtime_fields': ['time_created', 'time_updated']},
'users': {
'table': 'users',
'idcolumn': 'id',
'editable_fields': {
'mail_notifications': {'type': 'boolean'},
'notify_chapter_submitted': {'type': 'boolean'},
'notify_new_video': {'type': 'boolean'},
'notify_edit': {'type': 'boolean'}
},
'creationtime_fields': []},
'live_sources': {
'table': 'live_sources',
'idcolumn': 'id',
'editable_fields': {
'name': {'type': 'shortstring'},
'description': {'type': 'text'},
'deleted': {'type': 'boolean'}
},
'creationtime_fields': ['created_by', 'time_created', 'time_updated']}
}
#parses the path to a dict, containing the table, id, field and field type
@app.template_filter(name='parseeditpath')
def parseeditpath(path):
table, id, column = path.split('.', 2)
assert table in editable_tables
assert column in editable_tables[table]['editable_fields']
type = editable_tables[table]['editable_fields'][column]['type']
return {'table': table, 'id': id, 'column': column, 'type': type, 'tableinfo': editable_tables[table]}
@app.template_filter(name='getfielddescription')
def getfielddescription(inputpath):
path = parseeditpath(inputpath)
desc = path['tableinfo']['editable_fields'][path['column']].get('description', '')
if desc != '':
desc = '<br>'+desc
return desc
@app.template_filter(name='getfieldchangelog')
def getfieldchangelog(inputpath):
path = parseeditpath(inputpath)
changelog = query('SELECT * FROM changelog \
LEFT JOIN users ON (changelog.who = users.id) WHERE `table` = ? AND `id_value` = ? and `field` = ? \
ORDER BY `when` DESC LIMIT 5', path['table'], path['id'], path['column'])
for entry in changelog:
entry['id_value'] = str(entry['id_value'])
entry['value_new'] = str(entry['value_new'])
entry['path'] = '.'.join([entry['table'], entry['id_value'], entry['field']])
return changelog
@app.route('/internal/edit', methods=['GET', 'POST'])
@mod_required
@csrf_protect
def edit(prefix='', ignore=None):
if not ignore:
ignore = []
# All editable tables are expected to have a 'time_updated' field
ignore.append('ref')
ignore.append('prefix')
ignore.append('_csrf_token')
if not prefix and 'prefix' in request.args:
prefix = request.args['prefix']
changes = request.values.items()
if (request.method == 'POST') and (request.get_json()):
changes = request.get_json().items()
for key, val in changes:
if key in ignore:
continue
key = prefix+key
path = parseeditpath(key)
modify('INSERT INTO changelog \
(`table`,id_value, id_key, field, value_new, value_old, `when`, who, executed) \
VALUES (?,?,?,?,?, \
(SELECT `%s` FROM %s WHERE %s = ?),?,?,1)'%(
path['column'],
path['tableinfo']['table'],
path['tableinfo']['idcolumn']
),
path['table'],
path['id'],
path['tableinfo']['idcolumn'],
path['column'],
val,
path['id'],
datetime.now(),
session['user']['dbid'])
modify('UPDATE %s SET `%s` = ?, time_updated = ? WHERE `%s` = ?'%(path['tableinfo']['table'], path['column'], path['tableinfo']['idcolumn']),
val, datetime.now(), path['id'])
for func in edit_handlers.get(path['table'], {}).get(None, []):
func(path['table'], path['column'], val, path['id'], session['user']['dbid'])
for func in edit_handlers.get(path['table'], {}).get(path['column'], []):
func(path['table'], path['column'], val, path['id'], session['user']['dbid'])
if 'ref' in request.values:
return redirect(request.values['ref'])
return "OK", 200
@app.route('/internal/new/<table>', methods=['GET', 'POST'])
@mod_required
@csrf_protect
def create(table):
assert table in editable_tables
defaults = {'created_by': session['user']['dbid'], 'time_created': datetime.now(), 'time_updated': datetime.now()}
columns = []
values = []
for column, val in defaults.items():
if column in editable_tables[table]['creationtime_fields']:
columns.append(column)
values.append(val)
args = request.values.items()
if (request.method == 'POST') and (request.get_json()):
args = request.get_json().items()
for column, val in args:
if column in ['ref', '_csrf_token']:
continue
assert column in list(editable_tables[table]['editable_fields'].keys())+editable_tables[table]['creationtime_fields']
assert column not in defaults
columns.append('`'+column+'`')
values.append(val)
id = modify('INSERT INTO %s (%s) VALUES (%s)'%(editable_tables[table]['table'],
','.join(columns), ','.join(['?']*len(values))), *values)
if table == 'courses':
set_responsible(id, session['user']['dbid'], 1)
if 'ref' in request.values:
return redirect(request.values['ref'])
return str(id), 200
@app.route('/internal/changelog')
@register_navbar('Changelog', icon='book', group='weitere')
@mod_required
def changelog():
page = max(0, int(request.args.get('page', 0)))
pagesize = min(500, int(request.args.get('pagesize', 50)))
changelog = query('SELECT * FROM changelog LEFT JOIN users ON (changelog.who = users.id) ORDER BY `when` DESC LIMIT ? OFFSET ?', pagesize, page*pagesize)
pagecount = math.ceil(query('SELECT count(id) as count FROM changelog')[0]['count']/pagesize)
for entry in changelog:
entry['path'] = '.'.join([entry['table'], entry['id_value'], entry['field']])
return render_template('changelog.html', changelog=changelog, page=page, pagesize=pagesize, pagecount=pagecount)
@app.route('/internal/set/responsible/<int:course_id>/<int:user_id>', defaults={'value': True}, methods=['GET', 'POST'])
@app.route('/internal/unset/responsible/<int:course_id>/<int:user_id>', defaults={'value': False}, methods=['GET', 'POST'])
@mod_required
@csrf_protect
def set_responsible(course_id, user_id, value):
if value:
modify('REPLACE INTO responsible (course_id, user_id) values (?, ?)', course_id, user_id)
else:
modify('DELETE FROM responsible WHERE course_id = ? AND user_id = ?', course_id, user_id)
return "OK", 200
edit_handlers = {} #pylint: disable=invalid-name
def edit_handler(*tables, field=None):
def wrapper(func):
for table in tables:
if table not in edit_handlers:
edit_handlers[table] = {}
if field not in edit_handlers[table]:
edit_handlers[table][field] = []
edit_handlers[table][field].append(func)
return func
return wrapper
@edit_handler('courses')
@edit_handler('lectures')
def notify_edit(table, column, value, id, user_id):
lecture = None
if table == 'lectures':
lecture = query('SELECT * FROM lectures_data WHERE id = ?', id)[0]
course_id = lecture['course_id']
elif table == 'courses':
course_id = id
course = query('SELECT * FROM courses_data WHERE id = ?', course_id)[0]
user = query('SELECT * FROM users WHERE id = ?', user_id)[0]
notify_mods('edit', course_id, exclude_uids=[user_id], course=course, lecture=lecture, table=table, column=column, value=value, user=user)
import os.path
import json
from server import *
from sorter import insert_video
from edit import edit_handler
def set_metadata(dest, course, lecture):
chapters = query('SELECT text, time FROM chapters WHERE lecture_id = ? AND visible ORDER BY time', lecture['id'])
metadata = {'title': lecture['title'], 'album': course['title'],
'description': lecture['comment'],
'date': lecture['time'].strftime('%m/%d/%Y'),
'artist': lecture['speaker'] if lecture['speaker'] else course['organizer']}
dest['metadata'] = metadata
dest['chapters'] = chapters
# Incomplete and not enabled currently
#def schedule_intro(lectureid):
# lecture = query('SELECT * FROM lectures where id = ?', lectureid)
# course = query('SELECT * FROM course where id = ?', lecture['course_id'])
# data = {'path': path, 'lecture_id': lectureid}
# set_metadata(data, course, lecture)
# schedule_job('intro', data)
def schedule_remux(lectureid, videoid=None):
lecture = query('SELECT * FROM lectures WHERE id = ?', lectureid)[0]
course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0]
videos = query('''SELECT videos.*, sources.path AS srcpath, sources.hash AS srchash, formats.options AS fmtopts
FROM videos
JOIN sources ON videos.source = sources.id
JOIN formats ON videos.video_format = formats.id
WHERE videos.lecture_id = ?''', lectureid)
for video in videos:
if not video['source']:
continue
if videoid and video['id'] != videoid:
continue
data = {'path': video['path'], 'srcpath': video['srcpath'],
'srchash': video['srchash'], 'video_id': video['id']}
fmt = json.loads(video['fmtopts'])
if 'format' in fmt:
data['format'] = fmt['format']
data['options'] = fmt.get('options', {})
set_metadata(data, course, lecture)
schedule_job('remux', data)
@app.route('/internal/jobs/add/remux', methods=['GET', 'POST'])
@mod_required
@csrf_protect
def add_remux_job():
lectureid = request.values.get('lectureid')
videoid = int(request.values.get('videoid', 0))
if not lectureid:
lectureid = query('SELECT lecture_id FROM videos WHERE id = ?', videoid)[0]['lecture_id']
schedule_remux(lectureid, videoid)
return redirect(request.values.get('ref', url_for('jobs_overview')))
def schedule_transcode(source, fmt_id=None, video=None):
if video:
fmt_id = video['video_format']
assert video['lecture_id'] == source['lecture_id']
assert fmt_id is not None
fmt = query('SELECT * FROM formats WHERE id = ?', fmt_id)[0]
lecture = query('SELECT * FROM lectures WHERE id = ?', source['lecture_id'])[0]
course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0]
data = {'input': {'path': source['path'], 'streams': []}, 'output': json.loads(fmt['options']), 'filters': []}
if source['type'] == 'plain':
stream = {'name': 'video', 'type': 'video'}
data['input']['streams'].append(stream)
stream = {'name': 'audio', 'type': 'audio'}
data['input']['streams'].append(stream)
else:
assert False
set_metadata(data['output'], course, lecture)
basename = os.path.basename(source['path']).rsplit('.', 1)[0]
data['output']['path'] = 'pub/'+course['handle']+'/'+basename+fmt['suffix']
if video:
old_source = query('SELECT * FROM sources WHERE id = ?', video['source'])[0]
data['output']['path'] = video['path']
data['video_id'] = video['id']
data['srcpath'] = old_source['path']
data['srchash'] = old_source['hash']
else:
data['lecture_id'] = lecture['id']
data['format_id'] = fmt['id']
data['source_id'] = source['id']
return schedule_job('transcode', data, queue="background")
@job_handler('transcode')
def insert_transcoded_video(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
if 'lecture_id' not in data or 'source_id' not in data or 'format_id' not in data:
return
if 'video_id' in data:
return
video_id = insert_video(
data['lecture_id'],
data['output']['path'],
data['format_id'],
status['hash'],
status['filesize'],
status['duration'],
data['source_id'])
schedule_remux(data['lecture_id'], video_id)
@app.route('/internal/jobs/add/reencode', methods=['GET', 'POST'])
@mod_required
@csrf_protect
@handle_errors('jobs_overview', 'Video nicht gefunden!', 404, IndexError)
def add_reencode_job():
video = query('SELECT * FROM videos WHERE id = ?', request.values['videoid'])[0]
if not video['source']:
flash('Manuell erstellte Videos können nicht neukodiert werden!')
else:
source = query('''SELECT sources.* FROM sources WHERE sources.id = ? ORDER BY time_created''', video['source'])[-1]
schedule_transcode(source, video=video)
return redirect(request.values.get('ref', url_for('jobs_overview')))
@job_handler('probe-raw', 'intro')
def update_lecture_videos(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
if 'lecture_id' not in data:
return
if jobtype == 'probe-raw':
if 'source_id' not in data:
modify('INSERT INTO sources (lecture_id, path, type, hash, time_created) VALUES (?, ?, ?, ?, ?)',
data['lecture_id'], data['path'], 'plain', status['hash'], datetime.now())
sources = query('SELECT * FROM sources WHERE sources.lecture_id = ? ORDER BY time_created', data['lecture_id'])
if not sources:
return
latest = sources[-1]
# Incomplete and not enabled currently
#if False and jobtype == 'probe-raw':
# schedule_intro(data['lecture_id'])
videos = query('SELECT * FROM videos WHERE videos.lecture_id = ?', data['lecture_id'])
current_fmts = [v['video_format'] for v in videos]
formats = query('''SELECT formats.* FROM formats
JOIN profiles ON formats.id = profiles.format
JOIN courses ON profiles.name = courses.profile
JOIN lectures ON courses.id = lectures.course_id
WHERE lectures.id = ?''', data['lecture_id'])
for fmt in formats:
if fmt['id'] not in current_fmts:
schedule_transcode(latest, fmt_id=fmt['id'])
for video in videos:
if video['source'] != latest['id']:
schedule_transcode(latest, video=video)
@edit_handler('chapters')
def chapter_changed(table, column, value, id, user): #pylint: disable=unused-argument
chapters = query('SELECT * FROM chapters WHERE id = ?', id)
if not chapters:
return
chapter = chapters[0]
if column in ['visible', 'deleted'] or (chapter['visible'] and not chapter['deleted']):
schedule_remux(chapter['lecture_id'])
@edit_handler('courses')
def course_changed(table, column, value, id, user): #pylint: disable=unused-argument
if column not in ['title', 'organizer']:
return
lectures = query('SELECT * FROM lectures WHERE course_id = ?', id)
for lecture in lectures:
schedule_remux(lecture['id'])
@edit_handler('lectures')
def lecture_changed(table, column, value, id, user): #pylint: disable=unused-argument
if column in ['title', 'comment', 'time', 'speaker']:
schedule_remux(id)
import hashlib
from datetime import MINYEAR
from server import *
def gen_atomid(value):
return 'urn:md5:'+hashlib.md5(value.encode('utf-8')).hexdigest().upper()
def fixdate(value):
if not isinstance(value, datetime):
return datetime(MINYEAR, 1, 1)
return value
@app.route('/feed')
@app.route('/<handle>/feed')
@handle_errors(None, 'Diese Veranstaltung existiert nicht!', 404, IndexError)
def feed(handle=None):
course = {'id': None, 'title': 'Neueste Videos', 'time_created': None, 'time_updated': None}
course['atomid'] = gen_atomid('FROM videos SELECT *')
if handle:
course = query('SELECT * FROM courses WHERE handle = ? AND visible', handle)[0]
course['atomid'] = gen_atomid('Video AG, courses['+str(course['id'])+']: '+course['handle'])
entries = query('''
SELECT lectures.*, "video" AS sep, videos.*, formats.description AS format_description, formats.prio, "course" AS sep, courses.*
FROM lectures
JOIN courses ON (courses.id = lectures.course_id)
JOIN videos ON (lectures.id = videos.lecture_id)
JOIN formats ON (formats.id = videos.video_format)
WHERE ((? IS NULL AND courses.listed) OR course_id = ?) AND courses.visible AND lectures.visible AND videos.visible
ORDER BY videos.time_created DESC, prio ASC
LIMIT 100''',
course['id'], course['id'])
updated = max(course['time_updated'], course['time_created'], key=fixdate)
for entry in entries:
entry['updated'] = max(entry['video']['time_created'], entry['video']['time_updated'], entry['time_created'], entry['time_updated'], key=fixdate)
if len(entry['video']['hash']) != 32:
entry['atomid'] = gen_atomid('Video AG, videos['+str(entry['video']['id'])+']')
else:
entry['atomid'] = 'urn:md5:'+(entry['video']['hash'].upper())
updated = max(updated, entry['updated'], key=fixdate)
course['updated'] = updated
return Response(render_template('feed.atom', course=course, entries=entries), 200, {'Content-Type': 'application/atom+xml'})
@app.route('/<handle>/rss')
@handle_errors(None, 'Die Veranstaltung oder das Format existiert nicht!', 404, IndexError)
def rss_feed(handle):
course = query('SELECT * FROM courses WHERE handle = ? AND visible', handle)[0]
formats = query('''SELECT formats.* FROM formats
JOIN videos ON videos.video_format = formats.id
JOIN lectures ON lectures.id = videos.lecture_id
WHERE lectures.course_id = ?
GROUP BY formats.id
ORDER BY formats.player_prio DESC''', course['id'])
if not formats:
formats = query('SELECT * FROM formats WHERE id = 4 OR id = 5 OR id = 10') # 360p, 720p, 1080p
if 'format_id' not in request.values:
return redirect(url_for('rss_feed', handle=handle, format_id=formats[0]['id']))
fmt = query('SELECT * FROM formats WHERE id = ?', request.values.get('format_id', request.values['format_id']))[0]
items = query('''SELECT lectures.*, "video" AS sep, videos.*
FROM lectures
JOIN courses ON courses.id = lectures.course_id
JOIN videos ON lectures.id = videos.lecture_id
WHERE courses.id = ? AND videos.video_format = ? AND courses.visible AND lectures.visible AND videos.visible
ORDER BY lectures.time DESC
LIMIT 100''', course['id'], fmt['id'])
chapters = query('SELECT chapters.* FROM chapters \
JOIN lectures ON lectures.id = chapters.lecture_id \
WHERE lectures.course_id = ? AND NOT chapters.deleted AND chapters.visible \
ORDER BY time ASC', course['id'])
for item in items:
item['updated'] = max(item['video']['time_created'], item['video']['time_updated'], item['time_created'], item['time_updated'], key=fixdate)
return Response(
render_template('feed.rss', course=course, format=fmt, formats=formats, items=items, chapters=chapters),
200,
{'Content-Type': 'application/rss+xml; charset=UTF-8'})
@app.route('/courses/feed')
def courses_feed():
courses = query('SELECT * FROM courses WHERE visible AND listed ORDER BY time_created DESC LIMIT 100')
atomid = gen_atomid('Video AG, courses')
updated = None
for course in courses:
course['updated'] = max(course['time_created'], course['time_updated'], key=fixdate)
course['atomid'] = gen_atomid('Video AG, courses['+str(course['id'])+']: '+course['handle'])
updated = max(updated, course['updated'], key=fixdate)
return Response(render_template('courses_feed.atom', updated=updated, atomid=atomid, courses=courses), 200, {'Content-Type': 'application/atom+xml'})
from datetime import timedelta, datetime
from ipaddress import ip_address, ip_network
import icalendar
from werkzeug.datastructures import Headers
from server import *
def export_lectures(lectures, responsible, name):
cal = icalendar.Calendar()
cal.add('prodid', '-//Video AG//rwth.video//')
cal.add('version', '1.0')
for lecture in lectures:
resp = []
for r in responsible: #pylint: disable=invalid-name
if r['course_id'] == lecture['course_id']:
resp.append(r['realname'])
event = icalendar.Event()
event.add('summary', lecture['course']['short']+': '+lecture['title'])
event.add('description', '\n\n'.join([s for s in [
lecture['comment'],
lecture['internal'],
'Zuständig: '+', '.join(resp) if resp else ''
] if s]))
event.add('uid', '%i@rwth.video'%lecture['id'])
event.add('dtstamp', datetime.utcnow())
event.add('categories', lecture['course']['short'])
event.add('dtstart', lecture['time'])
event.add('location', lecture['place'])
event.add('dtend', lecture['time'] + timedelta(minutes=lecture['duration']))
cal.add_component(event)
headers = Headers()
headers.add_header("Content-Disposition", "inline", filename=name)
return Response(cal.to_ical(), mimetype="text/calendar", headers=headers)
def calperm(func):
@wraps(func)
def decorator(*args, **kwargs):
permission = ismod()
if 'X-Real-IP' in request.headers:
ip = ip_address(request.headers['X-Real-IP'])
for net in config['FSMPI_IP_RANGES']:
if ip in ip_network(net):
permission = True
if request.authorization:
userinfo, groups = ldapauth(request.authorization.username, request.authorization.password)
if check_mod(userinfo.get('uid'), groups):
permission = True
if permission:
return func(*args, **kwargs)
return Response("Login required", 401, {'WWW-Authenticate': 'Basic realm="FS-Login required"'})
return decorator
def get_responsible():
return query('''SELECT users.*, responsible.course_id FROM responsible
JOIN users ON users.id = responsible.user_id''')
@app.route('/internal/ical/all')
@calperm
def ical_all():
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.*
FROM lectures JOIN courses ON courses.id = lectures.course_id
WHERE NOT norecording AND NOT external
ORDER BY time DESC LIMIT ?''', request.values.get('limit', 1000)),
get_responsible(), 'videoag_all.ics')
@app.route('/internal/ical/user/<int:user>')
@calperm
def ical_user(user):
username = query('SELECT name FROM users WHERE users.id = ?', user)[0]['name']
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.*
FROM lectures
JOIN courses ON courses.id = lectures.course_id
JOIN responsible ON responsible.course_id = courses.id
WHERE NOT norecording AND NOT external AND responsible.user_id = ?
ORDER BY time DESC LIMIT ?''', user, request.values.get('limit', 1000)),
get_responsible(), 'videoag_%s.ics'%username)
@app.route('/internal/ical/notuser/<int:user>')
@calperm
def ical_notuser(user):
username = query('SELECT name FROM users WHERE users.id = ?', user)[0]['name']
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.*
FROM lectures
JOIN courses ON courses.id = lectures.course_id
LEFT JOIN responsible ON (responsible.course_id = courses.id AND responsible.user_id = ?)
WHERE NOT norecording AND NOT external AND responsible.user_id IS NULL
ORDER BY time DESC LIMIT ?''', user, request.values.get('limit', 1000)),
get_responsible(), 'videoag_not_%s.ics'%username)
@app.route('/internal/ical/course/<course>')
@calperm
def ical_course(course):
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.*
FROM lectures JOIN courses ON courses.id = lectures.course_id
WHERE courses.handle = ? AND NOT norecording AND NOT external ORDER BY time DESC''', course),
get_responsible(), 'videoag_%s.ics'%course)
import urllib.request
import urllib.parse
from server import *
@app.route('/internal/import/<int:id>', methods=['GET', 'POST'])
@mod_required
def list_import_sources(id):
courses = query('SELECT * FROM courses WHERE id = ?', id)[0]
campus = {}
for i in request.values:
group, importid, field = i.split('.', 2)
if group == 'campus':
if not importid in campus:
campus[importid] = {}
campus[importid][field] = request.values[i]
for i in campus:
if i.startswith('new'):
if campus[i]['url'] != '':
modify('INSERT INTO import_campus (url, type, course_id, last_checked, changed) VALUES (?, ?, ?, ?, 1)',
campus[i]['url'], campus[i]['type'], id, datetime.now())
else:
if campus[i]['url'] != '':
query('UPDATE import_campus SET url = ?, `type` = ? WHERE (course_id = ?) AND (id = ?)', campus[i]['url'], campus[i]['type'], id, int(i))
else:
query('DELETE FROM import_campus WHERE (id = ?) AND (course_id = ?)', int(i), id)
import_campus = query('SELECT * FROM import_campus WHERE course_id = ?', id)
return render_template('import_campus.html', course=courses, import_campus=import_campus, events=[])
def fetch_co_course_events(i):
# pylint: disable=too-many-locals,too-many-branches,too-many-statements,invalid-name,bare-except
from lxml import html # pylint: disable=import-outside-toplevel
events = []
try:
remote_html = urllib.request.urlopen(i['url']).read()
except:
flash("Ungültige URL: '"+i['url']+"'")
tablexpath = "//td[text()='Termine und Ort']/following::table[1]"
basetable = html.fromstring(remote_html).xpath(tablexpath)[0]
#parse recurring events
toparse = [i['url']]
for j in basetable.xpath("//table[@cellpadding='5']//tr[@class='hierarchy4' and td[@name='togglePeriodApp']]"):
url = str(j.xpath("td[@name='togglePeriodApp']/a/@href")[0])
toparse.append(url)
events_raw = []
for j in toparse:
if j.startswith('event'):
url = 'https://www.campus.rwth-aachen.de/rwth/all/'+j
else:
url = j
text = urllib.request.urlopen(url).read()
dom = html.fromstring(text).xpath(tablexpath)[0]
#we get the "heading" row, from it extract the room and time. best way to get it is to match on the picture -.-
baserow = dom.xpath("//table[@cellpadding='5']//tr[@class='hierarchy4' and td[@name='togglePeriodApp']/*/img[@src='../../server/img/minus.gif']]")
if not baserow:
continue
baserow = baserow[0]
rowdata = {'dates': []}
# "kein raum vergeben" is a special case, else use campus id
if baserow.xpath("td[6]/text()")[0] == 'Kein Raum vergeben':
rowdata['place'] = ''
elif baserow.xpath("td[6]/a"):
rowdata['place'] = baserow.xpath("td[6]/a")[0].text_content()
else:
rowdata['place'] = baserow.xpath("td[6]/text()")[0].split(' ', 1)[0]
rowdata['start'] = baserow.xpath("td[3]/text()")[0]
rowdata['end'] = baserow.xpath("td[5]/text()")[0]
rowdata['dates'] = baserow.getparent().xpath("tr[@class='hierarchy5']//td[@colspan='3']/text()")
events_raw.append(rowdata)
# parse single appointments
if basetable.xpath("//table[@cellpadding='3']/tr/td[text()='Einmalige Termine:']"):
singletable = basetable.xpath("//table[@cellpadding='3']/tr/td[text()='Einmalige Termine:']")[0].getparent().getparent()
for row in singletable.xpath("tr/td[2]"):
rowdata = {}
if row.xpath("text()[2]")[0] == 'Kein Raum vergeben':
rowdata['place'] = ''
elif row.xpath("a"):
rowdata['place'] = row.xpath("a")[0].text_content()
else:
rowdata['place'] = row.xpath("text()[2]")[0].split(' ', 1)[0]
rowdata['dates'] = [row.xpath("text()[1]")[0][4:14]]
rowdata['start'] = row.xpath("text()[1]")[0][17:22]
rowdata['end'] = row.xpath("text()[1]")[0][27:32]
events_raw.append(rowdata)
#now we have to filter our data and do some lookups
for j in events_raw:
for k in j['dates']:
e = {}
fmt = "%d.%m.%Y %H:%M"
e['time'] = datetime.strptime("%s %s"%(k, j['start']), fmt)
e['duration'] = int((datetime.strptime("%s %s"%(k, j['end']), fmt) - e['time']).seconds/60)
j['place'] = str(j['place'])
if j['place'] != '':
dbplace = query("SELECT name FROM places WHERE (campus_room = ?) OR (campus_name = ?) OR ((NOT campus_name) AND name = ?)",
j['place'], j['place'], j['place'])
if dbplace:
e['place'] = dbplace[0]['name']
else:
e['place'] = 'Unbekannter Ort ('+j['place']+')'
else:
e['place'] = ''
e['title'] = i['type']
events.append(e)
# it is parsed.
return events
def fetch_ro_event_ical(ids):
data = {'pMode': 'T', 'pInclPruef': 'N', 'pInclPruefGepl': 'N', 'pOutputFormat': '99', 'pCharset': 'UTF8', 'pMaskAction': 'DOWNLOAD'}
data = list(data.items())
for id in ids:
data.append(('pTerminNr', id))
data = urllib.parse.urlencode(data).encode('utf-8')
req = urllib.request.Request('https://online.rwth-aachen.de/RWTHonline/pl/ui/%24ctx/wbKalender.wbExport',
data=data, method='POST')
with urllib.request.urlopen(req) as f:
return f.read().decode('utf-8')
def fetch_ro_course_ical(id):
# pylint: disable=import-outside-toplevel
from lxml import html
url = 'https://online.rwth-aachen.de/RWTHonline/pl/ui/%24ctx/wbTermin_List.wbLehrveranstaltung?pStpSpNr='+'%i'%(int(id))
req = urllib.request.urlopen(url)
dom = html.fromstring(req.read())
event_ids = [x.value for x in dom.xpath('//input[@name="pTerminNr"]')]
return fetch_ro_event_ical(event_ids)
def fetch_ro_course_events(item):
# pylint: disable=import-outside-toplevel
import icalendar
import pytz
localtz = pytz.timezone('Europe/Berlin')
# First fix crappy javascript fragment-Paths
url = urllib.parse.urlparse(item['url'].replace('#/', ''))
args = urllib.parse.parse_qs(url.query)
if 'pStpSpNr' in args: # Legacy URLs
id = args['pStpSpNr'][0]
elif len(url.path.split('/')) > 1 and url.path.split('/')[-2] == 'courses': # New URLs
id = url.path.split('/')[-1]
else:
flash("Ungültige URL: '"+url.geturl()+"'")
return [] #cant get events from wrong URL so just return empty list
cal = icalendar.Calendar().from_ical(fetch_ro_course_ical(id))
events = []
for comp in cal.subcomponents:
if comp.name != 'VEVENT':
continue
if comp.get('STATUS') != 'CONFIRMED':
continue
event = {}
place = str(comp.get('LOCATION', ''))
if place:
campus_room = place.split('(')[-1].split(')')[0]
dbplace = query('SELECT name FROM places WHERE campus_room = ?', campus_room)
if dbplace:
event['place'] = dbplace[0]['name']
else:
event['place'] = 'Unbekannter Ort ('+place+')'
else:
event['place'] = ''
event['time'] = comp['DTSTART'].dt.astimezone(localtz).replace(tzinfo=None)
event['duration'] = int((comp['DTEND'].dt - comp['DTSTART'].dt).seconds/60)
event['title'] = item['type']
events.append(event)
return events
@app.route('/internal/import/<int:id>/now', methods=['GET', 'POST'])
@mod_required
def import_from(id):
# pylint: disable=too-many-branches
courses = query('SELECT * FROM courses WHERE id = ?', id)[0]
lectures = query('SELECT * FROM lectures WHERE course_id = ?', courses['id'])
import_campus = query('SELECT * FROM import_campus WHERE course_id = ?', id)
events = []
try:
# if u have to port this to anything new, god be with you.
for i in import_campus:
if 'www.campus.rwth-aachen.de' in i['url']:
events += fetch_co_course_events(i)
else:
events += fetch_ro_course_events(i)
except ImportError:
flash('python-lxml or python-pytz not found, campus and ro import will not work!')
# events to add
newevents = []
for i in events + lectures:
unique = False
exists = False
for j in newevents:
unique = (i['place'] == j['place']) and (i['time'] == j['time']) and (i['duration'] == j['duration'])
if unique:
break
for j in lectures:
exists = (i['place'] == j['place']) and (i['time'] == j['time']) and (i['duration'] == j['duration'])
if exists:
break
if (not unique) and (not exists):
newevents.append(i)
# deleted events
deletedlectures = []
for i in lectures:
incampus = False
for j in events:
incampus = (i['place'] == j['place']) and (i['time'] == j['time']) and (i['duration'] == j['duration'])
if incampus:
break
if not incampus:
deletedlectures.append(i)
return render_template('import_campus.html', course=courses, import_campus=import_campus, newevents=newevents, deletedlectures=deletedlectures)
from datetime import datetime, timedelta
import traceback
import json
from server import modify, query, date_json_handler, sched_func, notify_admins
job_handlers = {} #pylint: disable=invalid-name
def job_handler(*types, state='finished'):
def wrapper(func):
for jobtype in types:
if jobtype not in job_handlers:
job_handlers[jobtype] = {}
if state not in job_handlers[jobtype]:
job_handlers[jobtype][state] = []
job_handlers[jobtype][state].append(func)
return func
return wrapper
def job_handler_handle(id, state):
job = query('SELECT * FROM jobs WHERE id = ?', id, nlfix=False)[0]
type = job['type']
for func in job_handlers.get(type, {}).get(state, []):
try:
func(id, job['type'], json.loads(job['data']), state, json.loads(job['status']))
except Exception: #pylint: disable=broad-except
notify_admins('scheduler_exception', name=func.__name__, traceback=traceback.format_exc())
traceback.print_exc()
@sched_func(10)
def job_catch_broken():
# scheduled but never pinged
modify("BEGIN")
query('UPDATE jobs SET state="ready" WHERE state="scheduled" and time_scheduled < ?', datetime.now() - timedelta(seconds=10))
try:
modify("COMMIT")
except: #pylint: disable=bare-except
pass
# no pings since 60s
modify("BEGIN")
query('UPDATE jobs SET state="failed" WHERE state="running" and last_ping < ?', datetime.now() - timedelta(seconds=60))
try:
modify("COMMIT")
except: #pylint: disable=bare-except
pass
def job_set_state(id, state):
query('UPDATE jobs SET state=? WHERE id=?', state, id)
def schedule_job(jobtype, data=None, priority=0, queue="default"):
if not data:
data = {}
return modify('INSERT INTO jobs (type, priority, queue, data, time_created) VALUES (?, ?, ?, ?, ?)',
jobtype, priority, queue, json.dumps(data, default=date_json_handler), datetime.now())
def cancel_job(job_id):
query('UPDATE jobs SET state = "deleted" WHERE id = ? AND state = "ready"', job_id)
query('UPDATE jobs SET canceled = 1 WHERE id = ?', job_id)
def restart_job(job_id, canceled=False):
if canceled:
query('UPDATE jobs SET state = "ready", canceled = 0 WHERE id = ? AND state = "failed"', job_id)
else:
query('UPDATE jobs SET state = "ready" WHERE id = ? AND state = "failed" AND NOT canceled', job_id)
import json
import random
import math
from time import sleep
from server import *
@app.route('/internal/jobs/overview')
@register_navbar('Jobs', iconlib='fa', icon='suitcase', group='weitere')
@mod_required
def jobs_overview():
page = max(0, int(request.args.get('page', 0)))
pagesize = min(500, int(request.args.get('pagesize', 50)))
worker = query('SELECT * FROM worker ORDER BY last_ping DESC')
# get filter options
filter_values = {
'type': query('SELECT distinct type FROM jobs'),
'state': query('SELECT distinct state FROM jobs'),
'worker': query('SELECT distinct worker FROM jobs')}
# parse filter
filter = {
'type': request.args.get('type', '%'),
'state': request.args.get('state', 'failed'),
'worker': request.args.get('worker', '%')}
pagecount = math.ceil(query('SELECT count(id) as count FROM jobs WHERE (type like ?) AND (worker like ? OR (worker IS NULL AND ? = "%")) AND (state like ?)',
filter['type'], filter['worker'], filter['worker'], filter['state'])[0]['count']/pagesize)
jobs = query('SELECT * FROM jobs \
WHERE (type like ?) AND (worker like ? OR (worker IS NULL AND ? = "%")) AND (state like ?) \
ORDER BY `time_created` DESC LIMIT ? OFFSET ?',
filter['type'], filter['worker'], filter['worker'], filter['state'], pagesize, page*pagesize)
active_streams = query('SELECT lectures.*, "course" AS sep, courses.*, "job" AS sep, jobs.* FROM lectures \
JOIN courses ON (courses.id = lectures.course_id) \
JOIN jobs ON (jobs.id = lectures.stream_job) WHERE lectures.stream_job')
for stream in active_streams:
try:
stream['destbase'] = json.loads((stream['job']['data'] or '{}')).get('destbase')
except: #pylint: disable=bare-except
pass
return render_template('jobs_overview.html',
worker=worker,
jobs=jobs,
filter_values=filter_values,
filter=filter,
page=page,
pagesize=pagesize,
pagecount=pagecount,
active_streams=active_streams)
@app.route('/internal/jobs/action/<action>', methods=['GET', 'POST'])
@app.route('/internal/jobs/action/<action>/<jobid>', methods=['GET', 'POST'])
@mod_required
@csrf_protect
def jobs_action(action, jobid=None):
if action == 'clear_failed':
query('UPDATE jobs SET state = "deleted" WHERE state = "failed" AND (id = ? OR ? IS NULL)', jobid, jobid)
elif action == 'retry_failed':
query('UPDATE jobs SET state = "ready", canceled = 0 WHERE state = "failed" AND (id = ? OR ? IS NULL)', jobid, jobid)
elif action == 'copy' and jobid:
query("INSERT INTO jobs (type, priority, queue, state, data, time_created) \
SELECT type, priority, queue, 'ready', data, ? FROM jobs where id = ?",
datetime.now(), jobid)
elif action == 'delete' and jobid:
query('UPDATE jobs SET state = "deleted" WHERE id = ?', jobid)
elif action == 'cancel' and jobid:
cancel_job(jobid)
return redirect(request.values.get('ref', url_for('jobs_overview')))
@app.route('/internal/jobs/api/job/<int:id>/ping', methods=['GET', 'POST'])
@api_token_required('JOBS_API_KEY')
def jobs_ping(id):
hostname = request.values['host']
status = json.dumps(json.loads(request.values['status']), default=date_json_handler)
state = request.values['state']
if state == 'finished':
query('UPDATE jobs SET time_finished = ?, status = ?, state = "finished" where id = ?', datetime.now(), status, id)
else:
query('UPDATE jobs SET worker = ?, last_ping = ?, status = ?, state = ? where id = ?', hostname, datetime.now(), status, state, id)
job_handler_handle(id, state)
job = query('SELECT * FROM jobs WHERE id = ?', id, nlfix=False)[0]
if job['canceled']:
return 'Job canceled', 205
return 'OK', 200
@app.route('/internal/jobs/api/worker/<hostname>/schedule', methods=['POST'])
@api_token_required('JOBS_API_KEY')
def jobs_schedule(hostname):
query('REPLACE INTO worker (hostname, last_ping) values (?, ?)', hostname, datetime.now())
hostdata = request.get_json()
if not hostdata:
return 'no hostdata sent', 400
job = None
tries = 0
while not job:
try:
modify("BEGIN")
for i in query('SELECT * FROM jobs WHERE state = "ready" ORDER BY priority DESC'):
if i['type'] in hostdata['jobtypes'] and i['queue'] in hostdata['queues']:
job = i
break
if not job:
return 'no jobs', 503
modify('UPDATE jobs SET state="scheduled", worker = ?, time_scheduled = ? WHERE id = ?', hostname, datetime.now(), job['id'])
modify("COMMIT")
except: #pylint: disable=bare-except
tries += 1
job = None
sleep(random.random())
if tries > 10:
return 'no jobs', 503
return Response(json.dumps(job, default=date_json_handler), mimetype='application/json')
@app.route('/internal/jobs/add/forward', methods=['GET', 'POST'])
@mod_required
@csrf_protect
def add_forward_job():
schedule_job('live_forward', {'src': request.values['src'],
'dest': request.values['dest'], 'format': 'flv'}, priority=9)
return redirect(request.values.get('ref', url_for('jobs_overview')))
import requests
from server import *
OAUTH_BASE = 'https://oauth.campus.rwth-aachen.de/oauth2waitress/oauth2.svc/'
MOODLE_BASE = 'https://moped.ecampus.rwth-aachen.de/proxy/api/v2/eLearning/Moodle/'
def moodleget(endpoint, token, **args):
args['token'] = token
r = requests.request('GET', MOODLE_BASE+endpoint, params=args)
return r.json()
def oauthget(endpoint, **args):
args['client_id'] = config['L2P_APIKEY']
r = requests.request('POST', OAUTH_BASE+endpoint, data=args)
return r.json()
@app.route('/internal/l2pauth')
def start_l2pauth():
return "L2P is no longer available."
@app.route('/internal/moodleauth')
def start_moodleauth():
if 'L2P_APIKEY' not in config:
return render_template("500.html"), 500
code = oauthget('code', scope='moodle.rwth')
session['oauthcode'] = code['device_code']
session['oauthscope'] = 'moodle'
return redirect(code['verification_url']+'?q=verify&d='+code['user_code'])
@app.route('/internal/moodlel2pauth')
def start_moodlel2pauth():
return start_moodleauth()
@app.route('/internal/rwthauth')
def start_rwthauth():
if 'L2P_APIKEY' not in config:
return render_template("500.html"), 500
code = oauthget('code', scope='userinfo.rwth')
session['oauthcode'] = code['device_code']
session['oauthscope'] = 'rwth'
return redirect(code['verification_url']+'?q=verify&d='+code['user_code'])
@app.before_request
def finish_oauth():
if 'L2P_APIKEY' not in config:
return
if 'oauthcode' not in session or 'oauthscope' not in session:
return
token = oauthget('token', code=session['oauthcode'], grant_type='device')
if token.get('status') != 'ok':
return
del session['oauthcode']
if session['oauthscope'] not in ['l2p', 'rwth', 'moodle', 'l2pandmoodle']:
return
session['rwthintern'] = True
if session['oauthscope'] == 'moodle' or session['oauthscope'] == 'l2pandmoodle':
data = moodleget('getmyenrolledcourses', token['access_token'])
if data and data.get('Data'):
session['moodle_courses'] = []
for course in data['Data']:
session['moodle_courses'].append(str(course['id']))
else:
notify_admins('endpoint_exception', traceback="finish_oauth failed while getting moodle courses, data={}".format(str(data)))
del session['oauthscope']
oauthget('token', refresh_token=token['refresh_token'], grant_type='invalidate')
import re
from server import *
LDAP_USERRE = re.compile(r'[^a-z0-9]')
if 'LDAP_HOST' in config:
import ldap3
def ldapauth(user, password): # pylint: disable=function-redefined
if LDAP_USERRE.search(user):
return {}, []
try:
server = ldap3.Server(config['LDAP_HOST'], port=config['LDAP_PORT'], use_ssl=True)
conn = ldap3.Connection(server, 'fsmpi\\%s'%user, password, auto_bind=True, check_names=False)
except (ldap3.core.exceptions.LDAPBindError, ldap3.core.exceptions.LDAPPasswordIsMandatoryError):
return {}, []
conn.search("cn=users,dc=fsmpi,dc=rwth-aachen,dc=de", "(cn=%s)"%user, attributes=['memberOf', 'givenName', 'sn'])
info = {'uid': user, 'givenName': conn.response[0]['attributes']['givenName'][0], 'sn': conn.response[0]['attributes']['sn'][0]}
groups = [g.split(',')[0].split('=')[-1] for g in conn.response[0]['attributes']['memberOf']]
conn.unbind()
return info, groups
else:
NOTLDAP = {
'videoag': ('videoag', ['fachschaft', 'videoag'], {'uid': 'videoag', 'givenName': 'Video', 'sn': 'Geier'}),
'gustav': ('passwort', ['fachschaft'], {'uid': 'gustav', 'givenName': 'Gustav', 'sn': 'Geier'})
}
def ldapauth(user, password): # pylint: disable=function-redefined
if LDAP_USERRE.search(user):
return {}, []
if config.get('DEBUG') and user in NOTLDAP and password == NOTLDAP[user][0]:
return NOTLDAP[user][2], NOTLDAP[user][1]
return {}, []
from server import *
def legacy_index():
# handle legacy urls...
if 'course' in request.args:
return redirect(url_for('course', handle=request.args['course']), code=302)
if 'view' in request.args:
if (request.args['view'] == 'player') and ('lectureid' in request.args):
courses = query('SELECT courses.handle FROM courses JOIN lectures ON courses.id = lectures.course_id WHERE lectures.id = ?', request.args['lectureid'])
if not courses:
return "Not found", 404
return redirect(url_for('lecture', course=courses[0]['handle'], id=request.args['lectureid']), code=302)
if request.args['view'] == 'faq':
return redirect(url_for('faq'), code=302)
return None
@app.route('/site/')
@app.route('/site/<string:phpfile>')
def legacy(phpfile=None): #pylint: disable=too-many-return-statements
if phpfile == 'embed.php' and ('lecture' in request.args):
courses = query('SELECT courses.handle FROM courses JOIN lectures ON courses.id = lectures.course_id WHERE lectures.id = ?', request.args['lecture'])
if not courses:
return render_endpoint('index', 'Diese Seite existiert nicht!'), 404
return redirect(url_for('embed', course=courses[0]['handle'], id=request.args['lecture']), code=302)
if phpfile == 'embed.php' and ('vid' in request.args):
lectures = query('SELECT lecture_id FROM videos WHERE id = ?', request.args['vid'])
if not lectures:
return render_endpoint('index', 'Dieses Videos existiert nicht!'), 404
courses = query('SELECT courses.handle FROM courses JOIN lectures ON courses.id = lectures.course_id WHERE lectures.id = ?', lectures[0]['lecture_id'])
if not courses:
return render_endpoint('index', 'Diese Seite existiert nicht!'), 404
return redirect(url_for('embed', course=courses[0]['handle'], id=lectures[0]['lecture_id']), code=302)
if phpfile == 'feed.php' and ('all' in request.args):
return redirect(url_for('feed'), code=302)
if phpfile == 'feed.php' and ('newcourses' in request.args):
return redirect(url_for('courses_feed'), code=302)
if phpfile == 'feed.php':
return redirect(url_for('feed', handle=request.args.copy().popitem()[0]), code=302)
print("Unknown legacy url:", request.url)
return redirect(url_for('index'), code=302)
from xml.etree import ElementTree
import random
import string
from ipaddress import ip_address, ip_network
import json
import requests
from server import *
@sched_func(120)
def livestream_thumbnail():
livestreams = query('SELECT streams.lecture_id, streams.handle AS livehandle FROM streams WHERE streams.active')
lectures = query('SELECT * FROM lectures WHERE stream_job IS NOT NULL')
for stream in genlive(livestreams)+genlive_new(lectures):
schedule_job('thumbnail', {'src': stream['path'], 'filename': 'l_%i.jpg'%stream['lecture_id']})
@app.route('/internal/streaming/legacy_auth', methods=['GET', 'POST'])
@app.route('/internal/streaming/legacy_auth/<server>', methods=['GET', 'POST'])
def streamauth_legacy(server=None):
# pylint: disable=too-many-branches,bare-except,chained-comparison
internal = False
if 'X-Real-IP' in request.headers:
for net in config.get('FSMPI_IP_RANGES', []):
if ip_address(request.headers['X-Real-IP']) in ip_network(net):
internal = True
if request.values['app'] != 'live':
return 'Bad request', 400
if not internal:
return 'Forbidden', 403
if request.values['call'] == 'publish':
if request.values['pass'] != 'caisoh8aht0wuSu':
return 'Forbidden', 403
matches = query('''SELECT lectures.*
FROM lectures
JOIN courses ON lectures.course_id = courses.id
WHERE courses.handle = ?
ORDER BY lectures.time DESC''', request.values['name'])
now = datetime.now()
match = {'id': -1}
for lecture in matches:
if lecture['time']-timedelta(minutes=30) <= now and \
now <= lecture['time']+timedelta(minutes=lecture['duration']):
match = lecture
break
if 'lecture' in request.values:
match = {'id': request.values['lecture']}
try:
modify("INSERT INTO streams (handle, active, visible, lecture_id, description, poster) VALUES (?, 0, 1, -1, "", "")", request.values['name'])
except:
pass
if server:
data = {'src': 'rtmp://%s/live/%s'%(server, request.values['name']),
'destbase': 'rtmp://%s/hls/%s'%(server, request.values['name'])}
job_id = schedule_job('simple_live_transcode', data, priority=10)
modify("UPDATE streams SET active = 1, lecture_id = ?, job_id = ? WHERE handle = ?",
match['id'], job_id, request.values['name'])
else:
modify("UPDATE streams SET active = 1, lecture_id = ? WHERE handle = ?",
match['id'], request.values['name'])
elif request.values['call'] == 'publish_done':
job_id = query('SELECT job_id FROM streams WHERE handle = ?', request.values['name'])[0]['job_id']
modify("UPDATE streams SET active = 0 WHERE handle = ?", request.values['name'])
if job_id:
cancel_job(job_id)
else:
return 'Bad request', 400
return 'OK', 200
@job_handler('simple_live_transcode', state='failed')
def restart_failed_live_transcode(id, type, data, state, status): # pylint: disable=unused-argument
restart_job(id)
@app.route('/internal/streaming')
@register_navbar('Streaming', icon='broadcast-tower', iconlib='fa')
@mod_required
def streaming():
# pylint: disable=invalid-name
sources = query('SELECT * FROM live_sources WHERE NOT deleted')
for source in sources:
if not source['clientid']:
continue
req = requests.get('http://%s:8080/stats'%source['server'])
if req.status_code != 200:
continue
source['stat'] = {}
tree = ElementTree.fromstring(req.text)
if not tree:
continue
s = tree.find("./server/application/[name='src']/live/stream/[name='%i']"%source['id'])
if not s:
continue
for e in s.find("client/[publishing='']").getchildren():
source['stat'][e.tag] = e.text
source['video'] = {}
for e in s.find('meta/video').getchildren():
source['video'][e.tag] = e.text
source['audio'] = {}
for e in s.find('meta/audio').getchildren():
source['audio'][e.tag] = e.text
return render_template("streaming.html", sources=sources)
def gentoken():
return ''.join(random.SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(16))
@app.route('/internal/streaming/rekey/<int:id>')
@mod_required
def streamrekey(id):
modify('UPDATE live_sources SET `key` = ? WHERE id = ? AND NOT deleted', gentoken(), id)
source = query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', id)[0]
flash('''Der Streamkey von <strong>{name}</strong> wurde neu generiert:
<span><input readonly type="text" style="width: 15em" value="{key}"></span><br>
Trage diesen Streamkey zusammen mit einem der folgenden Streamingserver in die Streamingsoftware ein:
<ul>
<li>{server}</li>
<li>{backup_server}</li>
</ul>Insgesamt sollte die Streaming-URL z.B. so aussehen:
<a href="{server}{key}">{server}{key}</a>'''.format(name=source['name'],
key=source['key'], server=config['STREAMING_SERVER'],
backup_server=config['BACKUP_STREAMING_SERVER']))
return redirect(url_for('streaming'))
@app.route('/internal/streaming/drop/<int:id>')
@mod_required
def streamdrop(id):
source = (query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', id) or [None])[0]
if not source:
if 'ref' in request.values:
flash('Streamquelle nicht gefunden')
return redirect(request.values['ref'])
else:
return 'Not found', 404
requests.get('http://%s:8080/control/drop/publisher?clientid=%i'%(source['server'], source['clientid']))
if 'ref' in request.values:
return redirect(request.values['ref'])
return 'Ok', 200
@sched_func(120)
def live_source_thumbnail():
sources = query('SELECT * FROM live_sources WHERE clientid IS NOT NULL')
for source in sources:
schedule_job('thumbnail', {'srcurl': 'rtmp://%s/src/%i'%(source['server'], source['id']), 'filename': 's_%i.jpg'%source['id']})
def ip_in_networks(ip, networks):
for net in networks:
if ip_address(ip) in ip_network(net):
return True
return False
@app.route('/internal/streaming/auth/<server>', methods=['GET', 'POST'])
def streamauth(server):
# pylint: disable=too-many-return-statements
if not ip_in_networks(request.headers['X-Real-IP'], config.get('FSMPI_IP_RANGES', [])):
return 'Forbidden', 403
# Sources publish their streams at rtmp://example.com/src/{key} and are
# the redirected to rtmp://example.com/src/{id} to hide the secret stream key
if request.values['call'] == 'publish':
sources = query('SELECT * FROM live_sources WHERE NOT deleted AND `key` = ?', request.values['name'])
if not sources:
return 'Not found', 404
modify('UPDATE live_sources SET server = ?, server_public = ?, clientid = ?, last_active = ?, preview_key = ? WHERE id = ?',
server, request.args.get('public_ip', server), request.values['clientid'],
datetime.now(), gentoken(), sources[0]['id'])
live_source_thumbnail()
ret = Response('Redirect', 301, {'Location': '%i'%sources[0]['id']})
ret.autocorrect_location_header = False
return ret
elif request.values['call'] == 'publish_done':
source = (query('SELECT * FROM live_sources WHERE server = ? AND clientid = ?', server, request.values['clientid']) or [None])[0]
modify('UPDATE live_sources SET server = NULL, clientid = NULL, preview_key = NULL, last_active = ? WHERE server = ? AND clientid = ?',
datetime.now(), server, request.values['clientid'])
if not source:
return 'Ok', 200
for lecture in query('SELECT * FROM lectures WHERE stream_job IS NOT NULL'):
settings = json.loads(lecture['stream_settings'])
if str(source['id']) in [str(settings.get('source1')), str(settings.get('source2'))]:
cancel_job(lecture['stream_job'])
return 'Ok', 200
elif request.values['call'] == 'play':
source = (query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', request.values['name']) or [None])[0]
if not source:
return 'Not found', 404
if ip_in_networks(request.values['addr'], config.get('INTERNAL_IP_RANGES', [])):
return 'Ok', 200
if source['preview_key'] == request.values.get('preview_key'):
return 'Ok', 200
return 'Forbidden', 403
return 'Bad request', 400
def schedule_livestream(lecture_id):
# pylint: disable=too-many-branches,too-many-statements
lecture = query('SELECT * FROM lectures WHERE id = ?', lecture_id)[0]
settings = json.loads(lecture['stream_settings'])
# Server that receives transcoded streams and generates HLS data, later
# (hopefully) overwritten with one of the source's ingestion servers to
# reduce the number of servers the stream' stability relies on
dest_server = 'rwth.video'
# Used by complex_live_transcode.c (ffworker) to open the sources and
# construct a ffmpeg filter graph <https://ffmpeg.org/ffmpeg-filters.html>:
#
# Audio graph
# src1 -> {src1.afilter} \
# amix -> {data.afilter} -> output
# src2 -> {src2.afilter} /
# Video graph
# src1 -> {src1.vfilter} \
# {vmix} -> scale=1920:1080 -> opt. logo overlay -> output
# src2 -> {src2.vfilter} /
data = {
'src1':
{
#'url': 'rtmp://...',
'afilter': [],
'vfilter': [],
},
'src2': {
#'url': 'rtmp://...',
'afilter': [],
'vfilter': [],
},
'afilter': [],
#'vmix': 'streamselect=map=0',
'videoag_logo': int(bool(settings.get('video_showlogo'))),
'lecture_id': lecture['id'],
#'destbase': 'rtmp://...'
}
# afilter/vfilter are lists here to simplify the code below and must be
# converted to a single filter expression afterwards.
src1 = (query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', settings.get('source1')) or [{}])[0]
src2 = (query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', settings.get('source2')) or [{}])[0]
for idx, src in zip([1, 2], [src1, src2]):
if src:
dest_server = src['server']
data['src%i'%idx]['url'] = 'rtmp://%s/src/%i'%(src['server'], src['id'])
if not src['clientid']:
flash('Quelle „%s“ ist nicht aktiv!'%src['name'])
return None
if settings.get('source%i_deinterlace'%idx):
data['src%i'%idx]['vfilter'].append('yadif')
mode = settings.get('source%i_audiomode'%idx)
leftvol = float(settings.get('source%i_leftvolume'%idx, 100))/100.0
rightvol = float(settings.get('source%i_rightvolume'%idx, 100))/100.0
if mode == 'mono':
data['src%i'%idx]['afilter'].append('pan=mono|c0=%f*c0+%f*c1'%(0.5*leftvol, 0.5*rightvol))
elif mode == 'stereo':
data['src%i'%idx]['afilter'].append('pan=stereo|c0=%f*c0|c1=%f*c1'%(leftvol, rightvol))
elif mode == 'unchanged':
pass
elif mode == 'off':
data['src%i'%idx]['afilter'].append('pan=mono|c0=0*c0')
else:
raise Exception()
data['destbase'] = 'rtmp://%s/hls/%i'%(dest_server, lecture['id'])
mode = settings.get('videomode')
if mode == '1':
data['vmix'] = 'streamselect=map=0'
elif mode == '2':
data['vmix'] = 'streamselect=map=1'
elif mode == 'lecture4:3':
data['src1']['vfilter'].append('scale=1440:1080')
data['src2']['vfilter'].append('scale=1440:810,pad=1440:1080:0:135,crop=480:1080')
data['vmix'] = 'hstack'
elif mode == 'lecture16:9':
data['src1']['vfilter'].append('scale=1440:810,pad=1440:1080:0:135')
data['src2']['vfilter'].append('scale=1440:810,pad=1440:1080:0:135,crop=480:1080')
data['vmix'] = 'hstack'
elif mode == 'sidebyside':
data['src1']['vfilter'].append('scale=960:540')
data['src2']['vfilter'].append('scale=960:540')
data['vmix'] = 'hstack,pad=1920:1080:0:270'
if settings.get('audio_normalize'):
data['afilter'].append('loudnorm')
# Filter setup done, now lists of ffmpeg filter expressions must be
# converted to single expressions
def build_filter(exprs):
return ','.join(exprs) if exprs else None
data['afilter'] = build_filter(data['afilter'])
data['src1']['afilter'] = build_filter(data['src1']['afilter'])
data['src1']['vfilter'] = build_filter(data['src1']['vfilter'])
data['src2']['afilter'] = build_filter(data['src2']['afilter'])
data['src2']['vfilter'] = build_filter(data['src2']['vfilter'])
if lecture['stream_job']:
flash('Stream läuft bereits!')
return None
job_id = schedule_job('complex_live_transcode', data, priority=10)
modify('UPDATE lectures_data SET stream_job = ? WHERE id = ? AND stream_job IS NULL', job_id, lecture_id)
if query('SELECT stream_job FROM lectures WHERE id = ?', lecture_id)[0]['stream_job'] != job_id:
flash('Stream läuft bereits!')
cancel_job(job_id)
return None
return job_id
@job_handler('complex_live_transcode', state='failed')
def restart_failed_complex_live_transcode(id, type, data, state, status): # pylint: disable=unused-argument
restart_job(id)
@job_handler('complex_live_transcode', state='failed')
@job_handler('complex_live_transcode', state='finished')
def cleanup_after_complex_live_transcode_ended(id, type, data, state, status): # pylint: disable=unused-argument
job = query('SELECT * FROM jobs WHERE id = ?', id, nlfix=False)[0]
if state == 'finished' or (state == 'failed' and job['canceled']):
modify('UPDATE lectures_data SET stream_job = NULL WHERE stream_job = ?', id)
@app.route('/internal/streaming/control', methods=['POST'])
@mod_required
def control_stream():
action = request.values['action']
lecture_id = int(request.values['lecture_id'])
course = (query('SELECT courses.* FROM courses JOIN lectures ON (courses.id = lectures.course_id) WHERE lectures.id = ?', lecture_id) or [None])[0]
if action == 'start':
schedule_livestream(lecture_id)
elif action == 'stop':
lecture = query('SELECT * FROM lectures WHERE id = ?', lecture_id)[0]
cancel_job(lecture['stream_job'])
return redirect(url_for('course', handle=course['handle']))
from email.message import EmailMessage
import smtplib
import traceback
from server import *
def send_message(msgtype, recipients, **kwargs):
msg = EmailMessage()
msg['From'] = config['MAIL_FROM']
msg['To'] = ', '.join([r.replace(',', '') for r in recipients])
cc = kwargs.pop('cc', []) #pylint: disable=invalid-name
if cc:
msg['Cc'] = ', '.join([r.replace(',', '') for r in cc])
try:
msg['Subject'] = render_template('mails/'+msgtype+'.subject', **kwargs)
msg.set_content(render_template('mails/'+msgtype+'.body', **kwargs))
if not config.get('MAIL_SERVER'):
return
mailserver = smtplib.SMTP(config['MAIL_SERVER'])
if config.get('MAIL_ADDRESS_OVERWRITE'):
mailserver.send_message(msg, to_addrs=[config['MAIL_ADDRESS_OVERWRITE']])
else:
mailserver.send_message(msg)
mailserver.quit()
except: #pylint: disable=bare-except
# we musst not raise an exception here, else we would send another mail, rinse and repeat
traceback.print_exc()
def notify_users(msgtype, uids, **kwargs):
recipients = []
exclude = kwargs.pop('exclude_uids', [])
for uid in uids:
user = query('SELECT * FROM users WHERE id = ?', uid)
if not user or user[0]['id'] in exclude:
continue
if not user[0]['fsacc'] or not user[0]['mail_notifications']:
continue
if 'notify_'+msgtype in user[0] and not user[0]['notify_'+msgtype]:
continue
if user[0]['realname']:
recipients.append('%s <%s@%s>'%(user[0]['realname'], user[0]['fsacc'],
config['MAIL_SUFFIX']))
else:
recipients.append('%s@%s'%(user[0]['fsacc'], config['MAIL_SUFFIX']))
cc = kwargs.get('cc', []) #pylint: disable=invalid-name
if kwargs.pop('importend', False):
cc.append(config['MAIL_DEFAULT'])
if kwargs.pop('notify_admins', False):
cc.append(config['MAIL_ADMINS'])
if not recipients:
recipients = cc
cc = [] #pylint: disable=invalid-name
if not recipients:
return
kwargs['cc'] = cc
send_message(msgtype, recipients, **kwargs)
def notify_mods(msgtype, course_id, **kwargs):
users = query('SELECT * FROM responsible WHERE course_id = ?', course_id)
uids = []
for user in users:
uids.append(user['user_id'])
notify_users(msgtype, uids, **kwargs)
def notify_admins(msgtype, **kwargs):
try:
send_message(msgtype, [config['MAIL_ADMINS']], **kwargs)
except: #pylint: disable=bare-except
# we musst not raise an exception here, else we would send another mail, rinse and repeat
traceback.print_exc()
@app.route('/internal/user/<int:user>/notifications')
@register_navbar('Benachrichtigungen', icon='bell', userendpoint=True)
@mod_required
def user_notifications(user):
return render_template('notifications.html', user=query('SELECT * FROM users WHERE id = ?', user)[0])
import icalendar
import requests
from server import *
def get_next_meeting():
ical = requests.get(config['ICAL_URL']).content
events = icalendar.Calendar.from_ical(ical).walk('VEVENT')
now = datetime.now().date()
delta = timedelta(weeks=2)
meetings = []
for event in events:
try:
start = event['DTSTART'].dt.date()
if event['SUMMARY'].upper() != 'VIDEO' or start < now or start > now+delta:
continue
meetings.append(event)
except KeyError:
pass
if not meetings:
return None, None
event = sorted(meetings, key=lambda e: e['DTSTART'].dt)[0]
return str(event['UID']), event['DTSTART'].dt.replace(tzinfo=None)
@sched_func(60*60)
def update_meeting():
uid, start = get_next_meeting()
if uid is None:
return
text = 'Die nächste Video AG-Sitzung findet am %s ab %s Uhr in den Räumlichkeiten der Fachschaft im Augustinerbach 2a statt.'%(
human_date(start), human_time(start))
modify('''REPLACE INTO announcements
(extid, text, level, visible, time_publish, time_expire, time_created, time_updated, created_by)
VALUES (?, ?, 0, 1, ?, ?, ?, ?, 0)''',
'ical:'+uid, text, start-timedelta(days=7), start+timedelta(hours=2), datetime.now(), datetime.now())
......@@ -29,7 +29,7 @@ http {
listen 5000;
#listen [::]:5000;
#listen localhost:5000;
error_page 502 /static/500.html;
#error_page 502 /static/500.html;
location /static/ {
root .;
}
......
#!/usr/bin/python3
try:
from werkzeug.contrib.profiler import ProfilerMiddleware
except ImportError:
from werkzeug.middleware.profiler import ProfilerMiddleware
from server import app
app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])
app.run(debug=True)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment