Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision

Target

Select target project
  • jannik/website
  • vincent/website
  • dominic/website
  • romank/website
  • videoaginfra/website
5 results
Select Git revision
Show changes
Showing
with 4427 additions and 2531 deletions
from server import *
import re
from server import *
LDAP_USERRE = re.compile(r'[^a-z0-9]')
if 'LDAP_HOST' in config:
import ldap3
def ldapauth(user, password):
user = LDAP_USERRE.sub(r'', user.lower())
def ldapauth(user, password): # pylint: disable=function-redefined
if not user or not password or LDAP_USERRE.search(user):
return {}, []
try:
conn = ldap3.Connection(ldap3.Server(config['LDAP_HOST'], port=config['LDAP_PORT'], use_ssl=True), 'fsmpi\\%s'%user, password, auto_bind=True, check_names=False)
server = ldap3.Server(config['LDAP_HOST'], port=config['LDAP_PORT'], use_ssl=True)
conn = ldap3.Connection(server, 'fsmpi\\%s'%user, password, auto_bind=True, check_names=False)
except (ldap3.core.exceptions.LDAPBindError, ldap3.core.exceptions.LDAPPasswordIsMandatoryError):
return {}, []
conn.search("cn=users,dc=fsmpi,dc=rwth-aachen,dc=de", "(cn=%s)"%user, attributes=['memberOf', 'givenName', 'sn'])
......@@ -18,13 +21,14 @@ if 'LDAP_HOST' in config:
return info, groups
else:
notldap = {
NOTLDAP = {
'videoag': ('videoag', ['fachschaft', 'videoag'], {'uid': 'videoag', 'givenName': 'Video', 'sn': 'Geier'}),
'gustav': ('passwort', ['fachschaft'], {'uid': 'gustav', 'givenName': 'Gustav', 'sn': 'Geier'})
}
def ldapauth(user, password):
user = LDAP_USERRE.sub(r'', user.lower())
if config.get('DEBUG') and user in notldap and password == notldap[user][0]:
return notldap[user][2], notldap[user][1]
def ldapauth(user, password): # pylint: disable=function-redefined
if LDAP_USERRE.search(user):
return {}, []
if config.get('DEBUG') and user in NOTLDAP and password == NOTLDAP[user][0]:
return NOTLDAP[user][2], NOTLDAP[user][1]
return {}, []
......@@ -16,7 +16,7 @@ def legacy_index():
@app.route('/site/')
@app.route('/site/<string:phpfile>')
def legacy(phpfile=None):
def legacy(phpfile=None): #pylint: disable=too-many-return-statements
if phpfile == 'embed.php' and ('lecture' in request.args):
courses = query('SELECT courses.handle FROM courses JOIN lectures ON courses.id = lectures.course_id WHERE lectures.id = ?', request.args['lecture'])
if not courses:
......
File moved
from xml.etree import ElementTree
import random
import string
from ipaddress import ip_address, ip_network
import json
import requests
from server import *
@sched_func(30)
@sched_func(120)
def livestream_thumbnail():
livestreams = query('SELECT streams.lecture_id, streams.handle AS livehandle FROM streams WHERE streams.active')
for v in genlive(livestreams):
schedule_job('thumbnail', {'lectureid': str(v['lecture_id']), 'path': v['path']})
lectures = query('SELECT * FROM lectures WHERE stream_job IS NOT NULL')
for stream in genlive(livestreams)+genlive_new(lectures):
schedule_job('thumbnail', {'src': stream['path'], 'filename': 'l_%i.jpg'%stream['lecture_id']})
@app.route('/internal/streaming/legacy_auth', methods=['GET', 'POST'])
@app.route('/internal/streaming/legacy_auth/<server>', methods=['GET', 'POST'])
def streamauth(server=None):
def streamauth_legacy(server=None):
# pylint: disable=too-many-branches,bare-except,chained-comparison
internal = False
if 'X-Real-IP' in request.headers:
for net in config.get('FSMPI_IP_RANGES', []):
......@@ -21,7 +30,11 @@ def streamauth(server=None):
if request.values['call'] == 'publish':
if request.values['pass'] != 'caisoh8aht0wuSu':
return 'Forbidden', 403
matches = query("SELECT lectures.* FROM lectures JOIN courses ON lectures.course_id = courses.id WHERE courses.handle = ? ORDER BY lectures.time DESC", request.values['name'])
matches = query('''SELECT lectures.*
FROM lectures
JOIN courses ON lectures.course_id = courses.id
WHERE courses.handle = ?
ORDER BY lectures.time DESC''', request.values['name'])
now = datetime.now()
match = {'id': -1}
for lecture in matches:
......@@ -31,22 +44,22 @@ def streamauth(server=None):
break
if 'lecture' in request.values:
match = {'id': request.values['lecture']}
try:
modify("INSERT INTO streams (handle, active, visible, lecture_id, description, poster) VALUES (?, 0, 1, -1, "", "")", request.values['name'])
except:
pass
if not query("SELECT handle FROM streams WHERE handle = ?", request.values['name']):
# info: sql no test cover
modify("INSERT INTO streams (handle, active, visible, lecture_id, description, poster) VALUES (?, false, true, -1, '', '')", request.values['name'])
if server:
data = {'src': 'rtmp://%s/live/%s'%(server, request.values['name']),
'destbase': 'rtmp://%s/hls/%s'%(server, request.values['name'])}
job_id = schedule_job('simple_live_transcode', data, priority=10)
modify("UPDATE streams SET active = 1, lecture_id = ?, job_id = ? WHERE handle = ?",
modify("UPDATE streams SET active = true, lecture_id = ?, job_id = ? WHERE handle = ?",
match['id'], job_id, request.values['name'])
else:
modify("UPDATE streams SET active = 1, lecture_id = ? WHERE handle = ?",
# info: sql no test cover
modify("UPDATE streams SET active = true, lecture_id = ? WHERE handle = ?",
match['id'], request.values['name'])
elif request.values['call'] == 'publish_done':
job_id = query('SELECT job_id FROM streams WHERE handle = ?', request.values['name'])[0]['job_id']
modify("UPDATE streams SET active = 0 WHERE handle = ?", request.values['name'])
modify("UPDATE streams SET active = false WHERE handle = ?", request.values['name'])
if job_id:
cancel_job(job_id)
else:
......@@ -54,6 +67,255 @@ def streamauth(server=None):
return 'OK', 200
@job_handler('simple_live_transcode', state='failed')
def restart_failed_live_transcode(id, type, data, state, status):
def restart_failed_live_transcode(id, type, data, state, status): # pylint: disable=unused-argument
restart_job(id)
@app.route('/internal/streaming')
@register_navbar('Streaming', icon='broadcast-tower', iconlib='fa')
@mod_required
def streaming():
# pylint: disable=invalid-name
sources = query('SELECT * FROM live_sources WHERE NOT deleted')
for source in sources:
if not source['clientid']:
continue
req = requests.get('http://%s:8080/stats'%source['server'])
if req.status_code != 200:
continue
source['stat'] = {}
tree = ElementTree.fromstring(req.text)
if not tree:
continue
s = tree.find("./server/application/[name='src']/live/stream/[name='%i']"%source['id'])
if not s:
continue
for e in s.find("client/[publishing='']").getchildren():
source['stat'][e.tag] = e.text
source['video'] = {}
for e in s.find('meta/video').getchildren():
source['video'][e.tag] = e.text
source['audio'] = {}
for e in s.find('meta/audio').getchildren():
source['audio'][e.tag] = e.text
return render_template("streaming.html", sources=sources)
def gentoken():
return ''.join(random.SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(16))
@app.route('/internal/streaming/rekey/<int:id>')
@mod_required
def streamrekey(id):
# info: sql no test cover
modify('UPDATE live_sources SET "key" = ? WHERE id = ? AND NOT deleted', gentoken(), id)
source = query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', id)[0]
flash('''Der Streamkey von <strong>{name}</strong> wurde neu generiert:
<span><input readonly type="text" style="width: 15em" value="{key}"></span><br>
Trage diesen Streamkey zusammen mit einem der folgenden Streamingserver in die Streamingsoftware ein:
<ul>
<li>{server}</li>
<li>{backup_server}</li>
</ul>Insgesamt sollte die Streaming-URL z.B. so aussehen:
<a href="{server}{key}">{server}{key}</a>'''.format(name=source['name'],
key=source['key'], server=config['STREAMING_SERVER'],
backup_server=config['BACKUP_STREAMING_SERVER']))
return redirect(url_for('streaming'))
@app.route('/internal/streaming/drop/<int:id>')
@mod_required
def streamdrop(id):
# info: sql no test cover
source = (query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', id) or [None])[0]
if not source:
if 'ref' in request.values:
flash('Streamquelle nicht gefunden')
return redirect(request.values['ref'])
else:
return 'Not found', 404
requests.get('http://%s:8080/control/drop/publisher?clientid=%i'%(source['server'], source['clientid']))
if 'ref' in request.values:
return redirect(request.values['ref'])
return 'Ok', 200
@sched_func(120)
def live_source_thumbnail():
# info: sql no test cover
sources = query('SELECT * FROM live_sources WHERE clientid IS NOT NULL')
for source in sources:
schedule_job('thumbnail', {'srcurl': 'rtmp://%s/src/%i'%(source['server'], source['id']), 'filename': 's_%i.jpg'%source['id']})
def ip_in_networks(ip, networks):
for net in networks:
if ip_address(ip) in ip_network(net):
return True
return False
@app.route('/internal/streaming/auth/<server>', methods=['GET', 'POST'])
def streamauth(server):
# info: sql no test cover
# pylint: disable=too-many-return-statements
if not ip_in_networks(request.headers['X-Real-IP'], config.get('FSMPI_IP_RANGES', [])):
return 'Forbidden', 403
# Sources publish their streams at rtmp://example.com/src/{key} and are
# the redirected to rtmp://example.com/src/{id} to hide the secret stream key
if request.values['call'] == 'publish':
sources = query('SELECT * FROM live_sources WHERE NOT deleted AND "key" = ?', request.values['name'])
if not sources:
return 'Not found', 404
modify('UPDATE live_sources SET server = ?, server_public = ?, clientid = ?, last_active = ?, preview_key = ? WHERE id = ?',
server, request.args.get('public_ip', server), request.values['clientid'],
datetime.now(), gentoken(), sources[0]['id'])
live_source_thumbnail()
ret = Response('Redirect', 301, {'Location': '%i'%sources[0]['id']})
ret.autocorrect_location_header = False
return ret
elif request.values['call'] == 'publish_done':
source = (query('SELECT * FROM live_sources WHERE server = ? AND clientid = ?', server, request.values['clientid']) or [None])[0]
modify('UPDATE live_sources SET server = NULL, clientid = NULL, preview_key = NULL, last_active = ? WHERE server = ? AND clientid = ?',
datetime.now(), server, request.values['clientid'])
if not source:
return 'Ok', 200
for lecture in query('SELECT * FROM lectures WHERE stream_job IS NOT NULL'):
settings = json.loads(lecture['stream_settings'])
if str(source['id']) in [str(settings.get('source1')), str(settings.get('source2'))]:
cancel_job(lecture['stream_job'])
return 'Ok', 200
elif request.values['call'] == 'play':
source = (query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', request.values['name']) or [None])[0]
if not source:
return 'Not found', 404
if ip_in_networks(request.values['addr'], config.get('INTERNAL_IP_RANGES', [])):
return 'Ok', 200
if source['preview_key'] == request.values.get('preview_key'):
return 'Ok', 200
return 'Forbidden', 403
return 'Bad request', 400
def schedule_livestream(lecture_id):
# info: sql no test cover
# pylint: disable=too-many-branches,too-many-statements
lecture = query('SELECT * FROM lectures WHERE id = ?', lecture_id)[0]
settings = json.loads(lecture['stream_settings'])
# Server that receives transcoded streams and generates HLS data, later
# (hopefully) overwritten with one of the source's ingestion servers to
# reduce the number of servers the stream' stability relies on
dest_server = 'rwth.video'
# Used by complex_live_transcode.c (ffworker) to open the sources and
# construct a ffmpeg filter graph <https://ffmpeg.org/ffmpeg-filters.html>:
#
# Audio graph
# src1 -> {src1.afilter} \
# amix -> {data.afilter} -> output
# src2 -> {src2.afilter} /
# Video graph
# src1 -> {src1.vfilter} \
# {vmix} -> scale=1920:1080 -> opt. logo overlay -> output
# src2 -> {src2.vfilter} /
data = {
'src1':
{
#'url': 'rtmp://...',
'afilter': [],
'vfilter': [],
},
'src2': {
#'url': 'rtmp://...',
'afilter': [],
'vfilter': [],
},
'afilter': [],
#'vmix': 'streamselect=map=0',
'videoag_logo': int(bool(settings.get('video_showlogo'))),
'lecture_id': lecture['id'],
#'destbase': 'rtmp://...'
}
# afilter/vfilter are lists here to simplify the code below and must be
# converted to a single filter expression afterwards.
src1 = (query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', settings.get('source1')) or [{}])[0]
src2 = (query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', settings.get('source2')) or [{}])[0]
for idx, src in zip([1, 2], [src1, src2]):
if src:
dest_server = src['server']
data['src%i'%idx]['url'] = 'rtmp://%s/src/%i'%(src['server'], src['id'])
if not src['clientid']:
flash('Quelle „%s“ ist nicht aktiv!'%src['name'])
return None
if settings.get('source%i_deinterlace'%idx):
data['src%i'%idx]['vfilter'].append('yadif')
mode = settings.get('source%i_audiomode'%idx)
leftvol = float(settings.get('source%i_leftvolume'%idx, 100))/100.0
rightvol = float(settings.get('source%i_rightvolume'%idx, 100))/100.0
if mode == 'mono':
data['src%i'%idx]['afilter'].append('pan=mono|c0=%f*c0+%f*c1'%(0.5*leftvol, 0.5*rightvol))
elif mode == 'stereo':
data['src%i'%idx]['afilter'].append('pan=stereo|c0=%f*c0|c1=%f*c1'%(leftvol, rightvol))
elif mode == 'unchanged':
pass
elif mode == 'off':
data['src%i'%idx]['afilter'].append('pan=mono|c0=0*c0')
else:
raise Exception()
data['destbase'] = 'rtmp://%s/hls/%i'%(dest_server, lecture['id'])
mode = settings.get('videomode')
if mode == '1':
data['vmix'] = 'streamselect=map=0'
elif mode == '2':
data['vmix'] = 'streamselect=map=1'
elif mode == 'lecture4:3':
data['src1']['vfilter'].append('scale=1440:1080')
data['src2']['vfilter'].append('scale=1440:810,pad=1440:1080:0:135,crop=480:1080')
data['vmix'] = 'hstack'
elif mode == 'lecture16:9':
data['src1']['vfilter'].append('scale=1440:810,pad=1440:1080:0:135')
data['src2']['vfilter'].append('scale=1440:810,pad=1440:1080:0:135,crop=480:1080')
data['vmix'] = 'hstack'
elif mode == 'sidebyside':
data['src1']['vfilter'].append('scale=960:540')
data['src2']['vfilter'].append('scale=960:540')
data['vmix'] = 'hstack,pad=1920:1080:0:270'
if settings.get('audio_normalize'):
data['afilter'].append('loudnorm')
# Filter setup done, now lists of ffmpeg filter expressions must be
# converted to single expressions
def build_filter(exprs):
return ','.join(exprs) if exprs else None
data['afilter'] = build_filter(data['afilter'])
data['src1']['afilter'] = build_filter(data['src1']['afilter'])
data['src1']['vfilter'] = build_filter(data['src1']['vfilter'])
data['src2']['afilter'] = build_filter(data['src2']['afilter'])
data['src2']['vfilter'] = build_filter(data['src2']['vfilter'])
if lecture['stream_job']:
flash('Stream läuft bereits!')
return None
job_id = schedule_job('complex_live_transcode', data, priority=10)
modify('UPDATE lectures_data SET stream_job = ? WHERE id = ? AND stream_job IS NULL', job_id, lecture_id)
if query('SELECT stream_job FROM lectures WHERE id = ?', lecture_id)[0]['stream_job'] != job_id:
flash('Stream läuft bereits!')
cancel_job(job_id)
return None
return job_id
@job_handler('complex_live_transcode', state='failed')
def restart_failed_complex_live_transcode(id, type, data, state, status): # pylint: disable=unused-argument
restart_job(id)
@job_handler('complex_live_transcode', state='failed')
@job_handler('complex_live_transcode', state='finished')
def cleanup_after_complex_live_transcode_ended(id, type, data, state, status): # pylint: disable=unused-argument
# info: sql no test cover
job = query('SELECT * FROM jobs WHERE id = ?', id, nlfix=False)[0]
if state == 'finished' or (state == 'failed' and job['canceled']):
modify('UPDATE lectures_data SET stream_job = NULL WHERE stream_job = ?', id)
@app.route('/internal/streaming/control', methods=['POST'])
@mod_required
def control_stream():
# info: sql no test cover
action = request.values['action']
lecture_id = int(request.values['lecture_id'])
course = (query('SELECT courses.* FROM courses JOIN lectures ON (courses.id = lectures.course_id) WHERE lectures.id = ?', lecture_id) or [None])[0]
if action == 'start':
schedule_livestream(lecture_id)
elif action == 'stop':
lecture = query('SELECT * FROM lectures WHERE id = ?', lecture_id)[0]
cancel_job(lecture['stream_job'])
return redirect(url_for('course', handle=course['handle']))
from server import *
from email.message import EmailMessage
import smtplib
import traceback
from server import *
def send_message(msgtype, recipients, **kwargs):
msg = EmailMessage()
msg['From'] = config['MAIL_FROM']
msg['To'] = ', '.join([r.replace(',', '') for r in recipients])
cc = kwargs.pop('cc', [])
cc = kwargs.pop('cc', []) #pylint: disable=invalid-name
if cc:
msg['Cc'] = ', '.join([r.replace(',', '') for r in cc])
try:
......@@ -16,13 +16,14 @@ def send_message(msgtype, recipients, **kwargs):
msg.set_content(render_template('mails/'+msgtype+'.body', **kwargs))
if not config.get('MAIL_SERVER'):
return
s = smtplib.SMTP(config['MAIL_SERVER'])
mailserver = smtplib.SMTP(config['MAIL_SERVER'])
if config.get('MAIL_ADDRESS_OVERWRITE'):
s.send_message(msg, to_addrs=[config['MAIL_ADDRESS_OVERWRITE']])
mailserver.send_message(msg, to_addrs=[config['MAIL_ADDRESS_OVERWRITE']])
else:
s.send_message(msg)
s.quit()
except:
mailserver.send_message(msg)
mailserver.quit()
except: #pylint: disable=bare-except
# we musst not raise an exception here, else we would send another mail, rinse and repeat
traceback.print_exc()
def notify_users(msgtype, uids, **kwargs):
......@@ -41,14 +42,14 @@ def notify_users(msgtype, uids, **kwargs):
config['MAIL_SUFFIX']))
else:
recipients.append('%s@%s'%(user[0]['fsacc'], config['MAIL_SUFFIX']))
cc = kwargs.get('cc', [])
cc = kwargs.get('cc', []) #pylint: disable=invalid-name
if kwargs.pop('importend', False):
cc.append(config['MAIL_DEFAULT'])
if kwargs.pop('notify_admins', False):
cc.append(config['MAIL_ADMINS'])
if not recipients:
recipients = cc
cc = []
cc = [] #pylint: disable=invalid-name
if not recipients:
return
kwargs['cc'] = cc
......@@ -64,7 +65,8 @@ def notify_mods(msgtype, course_id, **kwargs):
def notify_admins(msgtype, **kwargs):
try:
send_message(msgtype, [config['MAIL_ADMINS']], **kwargs)
except:
except: #pylint: disable=bare-except
# we musst not raise an exception here, else we would send another mail, rinse and repeat
traceback.print_exc()
@app.route('/internal/user/<int:user>/notifications')
......@@ -72,4 +74,3 @@ def notify_admins(msgtype, **kwargs):
@mod_required
def user_notifications(user):
return render_template('notifications.html', user=query('SELECT * FROM users WHERE id = ?', user)[0])
from server import *
import icalendar
import requests
from server import *
def get_next_meeting():
ical = requests.get(config['ICAL_URL']).content
events = icalendar.Calendar.from_ical(ical).walk('VEVENT')
......@@ -11,25 +12,24 @@ def get_next_meeting():
for event in events:
try:
start = event['DTSTART'].dt.date()
if 'VIDEO' != event['SUMMARY'].upper() or start < now or start > now+delta:
if event['SUMMARY'].upper() != 'VIDEO' or start < now or start > now+delta:
continue
meetings.append(event)
except:
except KeyError:
pass
if not meetings:
return
return None, None
event = sorted(meetings, key=lambda e: e['DTSTART'].dt)[0]
return str(event['UID']), event['DTSTART'].dt.replace(tzinfo=None)
@sched_func(60*60)
def update_meeting():
try:
uid, start = get_next_meeting()
except:
if uid is None:
return
text = 'Die nächste Video AG-Sitzung findet am %s ab %s Uhr in den Räumlichkeiten der Fachschaft im Augustinerbach 2a statt.'%(
human_date(start), human_time(start))
modify('''REPLACE INTO announcements
(extid, text, level, visible, time_publish, time_expire, time_created, time_updated, created_by)
VALUES (?, ?, 0, 1, ?, ?, ?, ?, 0)''',
VALUES (?, ?, 0, true, ?, ?, ?, ?, 0)''',
'ical:'+uid, text, start-timedelta(days=7), start+timedelta(hours=2), datetime.now(), datetime.now())
......@@ -26,21 +26,22 @@ http {
keepalive_timeout 65;
types_hash_max_size 2048;
server {
#listen 5000;
listen 5000;
#listen [::]:5000;
listen localhost:5000;
#listen localhost:5000;
error_page 502 /static/500.html;
location /static/ {
root .;
}
location /files/ {
auth_request /auth;
auth_request /internal/auth;
auth_request_set $trackingcookie $upstream_http_set_cookie;
# For use with sshfs (recommended)
#alias /mnt/videoag/srv/videoag/released/;
#add_header Set-Cookie $trackingcookie;
# For use without sshfs
proxy_pass https://videoag.fsmpi.rwth-aachen.de/;
# NO TRAILING SLASH so that /files/ will not be skipped of the request!
proxy_pass https://videoag.fsmpi.rwth-aachen.de;
proxy_set_header Host "videoag.fsmpi.rwth-aachen.de";
proxy_set_header Set-Cookie $trackingcookie;
}
......
#!/usr/bin/python3
try:
from werkzeug.contrib.profiler import ProfilerMiddleware
except ImportError:
from werkzeug.middleware.profiler import ProfilerMiddleware
from server import app
app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])
app.run(debug=True)
# required
flask
requests
# optional
lxml
pytz
ldap3
icalendar
mysql-connector-python
coverage
psycopg[c]
\ No newline at end of file
#!/usr/bin/env python3
import unittest
import os
import server
def set_up():
server.app.testing = True
def tear_down():
os.unlink(server.app.config['SQLITE_DB'])
if __name__ == '__main__':
set_up()
try:
suite = unittest.defaultTestLoader.discover('./tests/', pattern="*") #pylint: disable=invalid-name
unittest.TextTestRunner(verbosity=2, failfast=True).run(suite)
finally:
tear_down()
from server import *
import threading
import sched
from time import sleep
scheduler = sched.scheduler()
from server import *
scheduler = sched.scheduler() # pylint: disable=invalid-name
def run_scheduler():
sleep(1) # UWSGI does weird things on startup
while True:
scheduler.run()
sleep(10)
def sched_func(delay, priority=0, firstdelay=None, args=[], kargs={}):
if firstdelay == None:
def sched_func(delay, priority=0, firstdelay=None, args=None, kargs=None):
args = args or []
kargs = kargs or {}
if firstdelay is None:
firstdelay = random.randint(1, 120)
def wrapper(func):
def sched_wrapper():
with app.test_request_context():
with app.test_request_context(base_url='https://video.fsmpi.rwth-aachen.de/'):
try:
if config.get('DEBUG', False):
print("Scheduler: started {} (frequency 1/{}s)".format(func.__name__, delay))
func(*args, **kargs)
except Exception:
except Exception: # pylint: disable=broad-except
traceback.print_exc()
notify_admins('scheduler_exception', name=func.__name__,
traceback=traceback.format_exc())
......
This diff is collapsed.
from server import *
import traceback
import os.path
from server import *
@app.route('/internal/sort/log')
@register_navbar('Sortierlog', icon='sort-by-attributes-alt', group='weitere')
@mod_required
......@@ -16,9 +17,9 @@ def sort_log():
FROM sortlog
JOIN lectures ON lectures.id = sortlog.lecture_id
JOIN courses ON courses.id = lectures.course_id
ORDER BY sortlog.`when` DESC
ORDER BY sortlog."when" DESC
LIMIT 50
'''),sorterrorlog=query('SELECT * FROM sorterrorlog ORDER BY sorterrorlog.`when` DESC'))
'''), sorterrorlog=query('SELECT * FROM sorterrorlog ORDER BY sorterrorlog."when" DESC'))
def to_ascii(inputstring):
asciistring = inputstring
......@@ -27,32 +28,17 @@ def to_ascii(inputstring):
return asciistring
@job_handler('probe', 'remux', 'transcode')
def update_video_metadata(jobid, jobtype, data, state, status):
def update_video_metadata(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
# info: sql no test cover
if 'video_id' not in data:
return
if jobtype not in ['remux', 'transcode']:
video = query('SELECT * FROM videos WHERE id = ?', data['video_id'])[0]
if video['hash'] and video['hash'] != status['hash']:
print('Hash mismatch for video', data['video_id'])
return
raise Exception('Hash mismatch for video {}'.format(data['video_id']))
modify('UPDATE videos_data SET hash = ?, file_size = ?, duration = ? WHERE id = ?',
status['hash'], status['filesize'], status['duration'], data['video_id'])
def insert_video(lectureid, dbfilepath, fileformatid, hash="", filesize=-1, duration=-1):
visible = query('SELECT courses.autovisible FROM courses JOIN lectures ON lectures.course_id = courses.id WHERE lectures.id = ?', lectureid)[0]['autovisible']
video_id = modify('''INSERT INTO videos_data
(lecture_id, visible, path, video_format, title, comment, internal, file_modified, time_created, time_updated, created_by, hash, file_size, duration)
VALUES
(?, ?, ?, ?, "", "", "", ?, ?, ?, ?, ?, ?, ?)''',
lectureid, visible, dbfilepath, fileformatid, datetime.now(), datetime.now(), datetime.now(), -1, hash, filesize, duration)
query('INSERT INTO sortlog (lecture_id,video_id,path,`when`) VALUES (?,?,?,?)', lectureid, video_id, dbfilepath, datetime.now())
schedule_thumbnail(lectureid)
schedule_job('probe', {'path': dbfilepath, 'lecture_id': lectureid, 'video_id': video_id, 'import-chapters': True})
video = query('SELECT videos.*, "format" AS sep, formats.* FROM videos JOIN formats ON formats.id = videos.video_format WHERE videos.id = ?', video_id)[0]
lecture = query('SELECT * FROM lectures WHERE id = ?', lectureid)[0]
course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0]
notify_mods('new_video', course['id'], course=course, lecture=lecture, video=video)
def schedule_thumbnail(lectureid):
videos = query('''
SELECT videos.path
......@@ -60,139 +46,135 @@ def schedule_thumbnail(lectureid):
JOIN formats ON (videos.video_format = formats.id)
WHERE videos.lecture_id = ?
ORDER BY formats.prio DESC''', lectureid)
schedule_job('thumbnail', {'lectureid': str(lectureid), 'path': videos[0]['path']})
return schedule_job('thumbnail', {'src': videos[0]['path'], 'filename': 'l_%i.jpg'%lectureid})
@app.route('/internal/jobs/add/thumbnail', methods=['GET', 'POST'])
@mod_required
@csrf_protect
@handle_errors('jobs_overview', 'Zu dieser Veranstaltung existieren keine Videos!', 404, IndexError)
def add_thumbnail_job():
schedule_thumbnail(request.values['lectureid'])
schedule_thumbnail(int(request.values['lectureid']))
return redirect(request.values.get('ref', url_for('jobs_overview')))
@job_handler('transcode')
def insert_transcoded_video(jobid, jobtype, data, state, status):
if 'lecture_id' not in data or 'source_id' not in data or 'format_id' not in data:
return
if 'video_id' in data:
return
visible = query('SELECT courses.autovisible FROM courses JOIN lectures ON lectures.course_id = courses.id WHERE lectures.id = ?', data['lecture_id'])[0]['autovisible']
def insert_video(lectureid, dbfilepath, fileformatid, hash="", filesize=-1, duration=-1, sourceid=None): #pylint: disable=too-many-arguments
# info: sql no test cover
visible = query('SELECT courses.autovisible FROM courses JOIN lectures ON lectures.course_id = courses.id WHERE lectures.id = ?', lectureid)[0]['autovisible']
video_id = modify('''INSERT INTO videos_data
(lecture_id, visible, path, video_format, title, comment, internal, file_modified, time_created, time_updated, created_by, hash, file_size, source, duration)
(lecture_id, visible, path, video_format, title, comment, internal, file_modified, time_created, time_updated, created_by, hash, file_size, duration, source)
VALUES
(?, ?, ?, ?, "", "", "", ?, ?, ?, ?, ?, ?, ?, ?)''',
data['lecture_id'], visible, data['output']['path'], data['format_id'],
datetime.now(), datetime.now(), datetime.now(), -1, status['hash'],
status['filesize'], data['source_id'], status['duration'])
schedule_remux(data['lecture_id'], video_id)
schedule_thumbnail(data['lecture_id'])
video = query('SELECT videos.*, "format" AS sep, formats.* FROM videos JOIN formats ON formats.id = videos.video_format WHERE videos.id = ?', video_id)[0]
lecture = query('SELECT * FROM lectures WHERE id = ?', data['lecture_id'])[0]
(?, ?, ?, ?, '', '', '', ?, ?, ?, ?, ?, ?, ?, ?)''',
lectureid, visible, dbfilepath, fileformatid, datetime.now(), datetime.now(), datetime.now(), -1, hash, filesize, duration, sourceid,
get_id=True)
if not sourceid:
query('INSERT INTO sortlog (lecture_id,video_id,path,"when") VALUES (?,?,?,?)', lectureid, video_id, dbfilepath, datetime.now())
schedule_job('probe', {'path': dbfilepath, 'lecture_id': lectureid, 'video_id': video_id, 'import-chapters': True})
schedule_thumbnail(lectureid)
video = query('SELECT videos.*, \'format\' AS sep, formats.* FROM videos JOIN formats ON formats.id = videos.video_format WHERE videos.id = ?', video_id)[0]
lecture = query('SELECT * FROM lectures WHERE id = ?', lectureid)[0]
course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0]
notify_mods('new_video', course['id'], course=course, lecture=lecture, video=video)
return video_id
def sort_file(filename, course=None, lectures=None):
# filenames: <handle>-<sorter>-<format>.mp4
# "sorter" musst be found with fuzzy matching. "sorter" musst be one or more of the following types: (inside the loop)
def split_filename(filename):
# '_' and ' ' are handled like '-'
splitfilename = filename.replace('_','-').replace(' ','-').split('-')
if not course:
handle = splitfilename[0]
if splitfilename[0].endswith('ws') or splitfilename[0].endswith('ss'):
handle = '-'.join(splitfilename[:2])
courses = query('SELECT * FROM courses WHERE handle = ?', handle)
if not courses:
return [], 0
course = courses[0]
if not lectures:
lectures = query('SELECT * from lectures where course_id = ?', course['id'])
# we save all extraced data in a dict
return filename.replace('_', '-').replace(' ', '-').split('-')
def parse_filename(filename):
# filenames: <handle>-<sorter>-<format>.<ext>, split at '-' into an array
data = {'keywords': []}
# parse the file name and save all data in 'data'
for s in splitfilename:
s = s.replace('.mp4','')
for chunk in filename:
chunk = chunk.replace('.mp4', '').replace('.webm', '')
#-<YYMMDD> (date)
#-<HHMM> (time)
#-<keyword>
# Looking for keywords in: title,speaker,comment, comma seperated list in internal
try:
if len(s) == 6:
data['date'] = datetime.strptime(s,'%y%m%d').date()
elif len(s) == 4:
data['time'] = datetime.strptime(s,'%H%M').time()
if len(chunk) == 6:
data['date'] = datetime.strptime(chunk, '%y%m%d').date()
elif len(chunk) == 4:
data['time'] = datetime.strptime(chunk, '%H%M').time()
else:
data['keywords'].append(s)
data['keywords'].append(chunk)
except ValueError:
# if its not a date or time, handle it as keyword
data['keywords'].append(s)
# try to match the file on a single lecture
# if its not valid date or time, handle it as keyword
data['keywords'].append(chunk)
return data
def filter_lectures_by_datetime(lectures, date, time):
matches = []
# first try date and time (if one of them is set)
if ('date' in data) or ('time' in data):
if date or time:
for lecture in lectures:
if not ('time' in lecture) or not lecture['time']:
if (not 'time' in lecture) or (not lecture['time']):
continue
if ('date' in data) and (lecture['time'].date() != data['date']):
if date and (lecture['time'].date() != date):
continue
if ('time' in data) and (lecture['time'].time() != data['time']):
if time and (lecture['time'].time() != time):
continue
matches.append(lecture)
# if we can't match exactly based on date and time, we have to match keywords
if ((len(matches) != 1) and (len(data['keywords']) > 0)):
#only test lectures with the correct date/time, if we have any. Else test for matches in all lectures of this course
if len(matches) == 0:
matches.extend(lectures)
found = False
return matches
def filter_lectures_by_keywords(lectures, keywords):
for field in ['title', 'speaker', 'comment', 'internal']:
for lecture in matches:
for keyword in data['keywords']:
for lecture in lectures:
for keyword in keywords:
# first test for exact match, else make it asci and try substring test
if (keyword == lecture[field]) or \
(str(keyword).lower() in str(to_ascii(lecture[field]).lower())):
found = True
matches = [lecture]
if found:
break
if found:
break
if found:
break
# now we should have found exactly one match
# default format is "unknown", with id 0
fmt = 0
if (field in lecture) and (
(keyword == lecture[field]) or
(to_ascii(str(keyword).lower()) in str(to_ascii(lecture[field]).lower()))
):
return [lecture]
return []
def extract_format_keyword_from_filename(filename):
return filename[-1].split('.', 1)[0].lower()
def filter_formats_by_filename(filename):
formatstring = extract_format_keyword_from_filename(filename)
formats = query('SELECT * FROM formats ORDER BY prio DESC')
for videoformat in formats:
# we match the last part of the file name without the extension
formatstring = splitfilename[-1].split('.',1)[0].lower()
if formatstring in videoformat['keywords'].replace(',', ' ').split(' '):
fmt = videoformat['id']
break
return videoformat['id']
# default format is "unknown", with id 0
return 0
def sort_file(filename, course=None, lectures=None):
filename = split_filename(filename)
if not course:
handle = filename[0]
if filename[0].endswith('ws') or filename[0].endswith('ss'):
handle = '-'.join(filename[:2])
courses = query('SELECT * FROM courses WHERE handle = ?', handle)
if not courses:
return [], 0
course = courses[0]
if not lectures:
lectures = query('SELECT * from lectures where course_id = ?', course['id'])
# parse all data from the file name
data = parse_filename(filename)
# try to match the file on a single lecture
matches = filter_lectures_by_datetime(lectures, data.get('date'), data.get('time'))
# if we can't match exactly based on date and time, we have to match keywords
if ((len(matches) != 1) and (len(data['keywords']) > 0)):
if matches:
# only test lectures with the correct date/time, if we have any
matches = filter_lectures_by_keywords(matches, data['keywords'])
else:
# Else test for matches in all lectures of this course
matches = filter_lectures_by_keywords(lectures, data['keywords'])
# now we should have found exactly one match
fmt = filter_formats_by_filename(filename)
return matches, fmt
def log_sort_error(course_id, path, matches):
matches_id = []
for match in matches:
matches_id.append(str(match['id']))
query('INSERT INTO sorterrorlog_data (course_id, path, matches, `when`, time_updated, time_created) VALUES (?, ?, ?, ?, ?, ?)',
query('INSERT INTO sorterrorlog_data (course_id, path, matches, "when", time_updated, time_created) VALUES (?, ?, ?, ?, ?, ?)',
course_id, path, ','.join(matches_id), datetime.now(), datetime.now(), datetime.now())
def sort_api_token_required(func):
@wraps(func)
def decorator(*args, **kwargs):
if 'apikey' in request.values:
token = request.values['apikey']
elif request.get_json() and ('apikey' in request.get_json()):
token = request.get_json()['apikey']
else:
token = None
if not token == config.get('SORTER_API_KEY', [None]):
return 'Permission denied', 403
else:
return func(*args, **kwargs)
return decorator
@app.route('/internal/sort/encoded/<filename>')
@sort_api_token_required
@api_token_required('SORTER_API_KEY')
def sort_encoded(filename):
matches, fmt = sort_file(filename)
if len(matches) != 1:
......@@ -205,11 +187,11 @@ def sort_encoded(filename):
return 'OK', 200
@app.route('/internal/sort/autoencode')
@sort_api_token_required
@api_token_required('SORTER_API_KEY')
def sort_autoencode():
filename = request.values['path']
path = 'autoencode/'+filename
matches, fmt = sort_file(filename)
matches, fmt = sort_file(filename) #pylint: disable=unused-variable
if len(matches) != 1:
log_sort_error(-1, 'raw/'+path, matches)
return "Could not match filename", 400
......@@ -218,7 +200,7 @@ def sort_autoencode():
return 'OK', 200
@job_handler('publish_video')
def handle_published_video(jobid, jobtype, data, state, status):
def handle_published_video(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
if 'lecture_id' not in data or 'format_id' not in data:
return
insert_video(data['lecture_id'], data['path'], data['format_id'], hash=status['hash'], filesize=status['filesize'], duration=status['duration'])
......@@ -233,7 +215,9 @@ def sort_now():
for mountpoint in config['VIDEOMOUNT']:
existingvideos = query('SELECT videos.path FROM videos JOIN lectures ON (videos.lecture_id = lectures.id) WHERE lectures.course_id = ?', course['id'])
knownerrors = query('SELECT sorterrorlog.path FROM sorterrorlog WHERE sorterrorlog.course_id = ?', course['id'])
ignorefiles = existingvideos + knownerrors
ignorefiles = []
for path in existingvideos + knownerrors:
ignorefiles.append(os.path.basename(path['path']))
lectures = query('SELECT * from lectures where course_id = ?', course['id'])
coursepath = mountpoint['mountpoint']+course['handle']
try:
......@@ -243,16 +227,10 @@ def sort_now():
for filename in files:
try:
# if the video is in the table "videos" already (with the correct course), skip it
ignore = False
for file_to_ignore in ignorefiles:
# path is something like
# vpnonline/08ws-swt/08ws-swt-081118.mp4
if os.path.basename(filename) == os.path.basename(file_to_ignore['path']):
ignore = True
break
if ignore:
if os.path.basename(filename) in ignorefiles:
continue
if not os.path.splitext(filename)[1] == '.mp4':
ext = os.path.splitext(filename)[1]
if not ext == '.mp4' and not ext == '.webm':
continue
matches, fmt = sort_file(filename, course=course, lectures=lectures)
dbfilepath = mountpoint['prefix']+course['handle']+'/'+filename
......@@ -260,11 +238,10 @@ def sort_now():
insert_video(matches[0]['id'], dbfilepath, fmt)
else:
log_sort_error(course['id'], dbfilepath, matches)
except Exception:
except: #pylint: disable=bare-except
traceback.print_exc()
modify('COMMIT')
if 'ref' in request.values:
return redirect(request.values['ref'])
else:
return 'OK', 200
This diff is collapsed.
This diff is collapsed.
/*!
* Font Awesome Free 5.1.1 by @fontawesome - https://fontawesome.com
* License - https://fontawesome.com/license (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)
*/
@font-face {
font-family: 'Font Awesome 5 Brands';
font-style: normal;
font-weight: normal;
src: url("../webfonts/fa-brands-400.eot");
src: url("../webfonts/fa-brands-400.eot?#iefix") format("embedded-opentype"), url("../webfonts/fa-brands-400.woff2") format("woff2"), url("../webfonts/fa-brands-400.woff") format("woff"), url("../webfonts/fa-brands-400.ttf") format("truetype"), url("../webfonts/fa-brands-400.svg#fontawesome") format("svg"); }
.fab {
font-family: 'Font Awesome 5 Brands'; }
/*!
* Font Awesome Free 5.1.1 by @fontawesome - https://fontawesome.com
* License - https://fontawesome.com/license (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)
*/
@font-face{font-family:"Font Awesome 5 Brands";font-style:normal;font-weight:normal;src:url(../webfonts/fa-brands-400.eot);src:url(../webfonts/fa-brands-400.eot?#iefix) format("embedded-opentype"),url(../webfonts/fa-brands-400.woff2) format("woff2"),url(../webfonts/fa-brands-400.woff) format("woff"),url(../webfonts/fa-brands-400.ttf) format("truetype"),url(../webfonts/fa-brands-400.svg#fontawesome) format("svg")}.fab{font-family:"Font Awesome 5 Brands"}
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.