Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • bootstrap4
  • intros
  • master
  • modules
  • postgres_integration
  • s3compatible
6 results

Target

Select target project
  • jannik/website
  • vincent/website
  • dominic/website
  • romank/website
  • videoaginfra/website
5 results
Select Git revision
  • bootstrap4
  • forbid-save-as
  • intros
  • live_sources
  • master
  • modules
  • moodle-integration
  • patch-double-tap-seek
  • patch_datum_anzeigen
  • patch_raum_anzeigen
  • upload-via-token
11 results
Show changes
Showing with 31440 additions and 900 deletions
from server import *
import re
from server import *
LDAP_USERRE = re.compile(r'[^a-z0-9]')
if 'LDAP_HOST' in config:
import ldap3
def ldapauth(user, password):
user = LDAP_USERRE.sub(r'', user.lower())
def ldapauth(user, password): # pylint: disable=function-redefined
if not user or not password or LDAP_USERRE.search(user):
return {}, []
try:
conn = ldap3.Connection(ldap3.Server(config['LDAP_HOST'], port=config['LDAP_PORT'], use_ssl=True), 'fsmpi\\%s'%user, password, auto_bind=True, check_names=False)
server = ldap3.Server(config['LDAP_HOST'], port=config['LDAP_PORT'], use_ssl=True)
conn = ldap3.Connection(server, 'fsmpi\\%s'%user, password, auto_bind=True, check_names=False)
except (ldap3.core.exceptions.LDAPBindError, ldap3.core.exceptions.LDAPPasswordIsMandatoryError):
return {}, []
conn.search("cn=users,dc=fsmpi,dc=rwth-aachen,dc=de", "(cn=%s)"%user, attributes=['memberOf', 'givenName', 'sn'])
......@@ -18,13 +21,14 @@ if 'LDAP_HOST' in config:
return info, groups
else:
notldap = {
NOTLDAP = {
'videoag': ('videoag', ['fachschaft', 'videoag'], {'uid': 'videoag', 'givenName': 'Video', 'sn': 'Geier'}),
'gustav': ('passwort', ['fachschaft'], {'uid': 'gustav', 'givenName': 'Gustav', 'sn': 'Geier'})
}
def ldapauth(user, password):
user = LDAP_USERRE.sub(r'', user.lower())
if config.get('DEBUG') and user in notldap and password == notldap[user][0]:
return notldap[user][2], notldap[user][1]
def ldapauth(user, password): # pylint: disable=function-redefined
if LDAP_USERRE.search(user):
return {}, []
if config.get('DEBUG') and user in NOTLDAP and password == NOTLDAP[user][0]:
return NOTLDAP[user][2], NOTLDAP[user][1]
return {}, []
......@@ -16,7 +16,7 @@ def legacy_index():
@app.route('/site/')
@app.route('/site/<string:phpfile>')
def legacy(phpfile=None):
def legacy(phpfile=None): #pylint: disable=too-many-return-statements
if phpfile == 'embed.php' and ('lecture' in request.args):
courses = query('SELECT courses.handle FROM courses JOIN lectures ON courses.id = lectures.course_id WHERE lectures.id = ?', request.args['lecture'])
if not courses:
......
from server import *
import requests
from xml.etree import ElementTree
import random
import string
from ipaddress import ip_address, ip_network
import json
import requests
from server import *
@sched_func(120)
def livestream_thumbnail():
livestreams = query('SELECT streams.lecture_id, streams.handle AS livehandle FROM streams WHERE streams.active')
lectures = query('SELECT * FROM lectures WHERE stream_job IS NOT NULL')
for v in genlive(livestreams)+genlive_new(lectures):
schedule_job('thumbnail', {'src': v['path'], 'filename': 'l_%i.jpg'%v['lecture_id']})
for stream in genlive(livestreams)+genlive_new(lectures):
schedule_job('thumbnail', {'src': stream['path'], 'filename': 'l_%i.jpg'%stream['lecture_id']})
@app.route('/internal/streaming/legacy_auth', methods=['GET', 'POST'])
@app.route('/internal/streaming/legacy_auth/<server>', methods=['GET', 'POST'])
def streamauth_legacy(server=None):
# pylint: disable=too-many-branches,bare-except,chained-comparison
internal = False
if 'X-Real-IP' in request.headers:
for net in config.get('FSMPI_IP_RANGES', []):
......@@ -26,7 +30,11 @@ def streamauth_legacy(server=None):
if request.values['call'] == 'publish':
if request.values['pass'] != 'caisoh8aht0wuSu':
return 'Forbidden', 403
matches = query("SELECT lectures.* FROM lectures JOIN courses ON lectures.course_id = courses.id WHERE courses.handle = ? ORDER BY lectures.time DESC", request.values['name'])
matches = query('''SELECT lectures.*
FROM lectures
JOIN courses ON lectures.course_id = courses.id
WHERE courses.handle = ?
ORDER BY lectures.time DESC''', request.values['name'])
now = datetime.now()
match = {'id': -1}
for lecture in matches:
......@@ -36,22 +44,22 @@ def streamauth_legacy(server=None):
break
if 'lecture' in request.values:
match = {'id': request.values['lecture']}
try:
modify("INSERT INTO streams (handle, active, visible, lecture_id, description, poster) VALUES (?, 0, 1, -1, "", "")", request.values['name'])
except:
pass
if not query("SELECT handle FROM streams WHERE handle = ?", request.values['name']):
# info: sql no test cover
modify("INSERT INTO streams (handle, active, visible, lecture_id, description, poster) VALUES (?, false, true, -1, '', '')", request.values['name'])
if server:
data = {'src': 'rtmp://%s/live/%s'%(server, request.values['name']),
'destbase': 'rtmp://%s/hls/%s'%(server, request.values['name'])}
job_id = schedule_job('simple_live_transcode', data, priority=10)
modify("UPDATE streams SET active = 1, lecture_id = ?, job_id = ? WHERE handle = ?",
modify("UPDATE streams SET active = true, lecture_id = ?, job_id = ? WHERE handle = ?",
match['id'], job_id, request.values['name'])
else:
modify("UPDATE streams SET active = 1, lecture_id = ? WHERE handle = ?",
# info: sql no test cover
modify("UPDATE streams SET active = true, lecture_id = ? WHERE handle = ?",
match['id'], request.values['name'])
elif request.values['call'] == 'publish_done':
job_id = query('SELECT job_id FROM streams WHERE handle = ?', request.values['name'])[0]['job_id']
modify("UPDATE streams SET active = 0 WHERE handle = ?", request.values['name'])
modify("UPDATE streams SET active = false WHERE handle = ?", request.values['name'])
if job_id:
cancel_job(job_id)
else:
......@@ -59,22 +67,23 @@ def streamauth_legacy(server=None):
return 'OK', 200
@job_handler('simple_live_transcode', state='failed')
def restart_failed_live_transcode(id, type, data, state, status):
def restart_failed_live_transcode(id, type, data, state, status): # pylint: disable=unused-argument
restart_job(id)
@app.route('/internal/streaming')
@register_navbar('Streaming', icon='broadcast-tower', iconlib='fa')
@mod_required
def streaming():
# pylint: disable=invalid-name
sources = query('SELECT * FROM live_sources WHERE NOT deleted')
for source in sources:
if not source['clientid']:
continue
r = requests.get('http://%s:8080/stats'%source['server'])
if r.status_code != 200:
req = requests.get('http://%s:8080/stats'%source['server'])
if req.status_code != 200:
continue
source['stat'] = {}
tree = ElementTree.fromstring(r.text)
tree = ElementTree.fromstring(req.text)
if not tree:
continue
s = tree.find("./server/application/[name='src']/live/stream/[name='%i']"%source['id'])
......@@ -96,14 +105,25 @@ def gentoken():
@app.route('/internal/streaming/rekey/<int:id>')
@mod_required
def streamrekey(id):
modify('UPDATE live_sources SET `key` = ? WHERE id = ? AND NOT deleted', gentoken(), id)
# info: sql no test cover
modify('UPDATE live_sources SET "key" = ? WHERE id = ? AND NOT deleted', gentoken(), id)
source = query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', id)[0]
flash('Der Streamkey von <strong>'+source['name']+'</strong> wurde neu generiert: <span><input readonly type="text" style="width: 15em" value="'+source['key']+'"></span><br>Trage diesen Streamkey zusammen mit einem der folgenden Streamingserver in die Streamingsoftware ein:<ul><li>'+config['STREAMING_SERVER']+'</li><li>'+config['BACKUP_STREAMING_SERVER']+'</li></ul>Insgesamt sollte die Streaming-URL z.B. so aussehen: <a href="'+config['STREAMING_SERVER']+source['key']+'">'+config['STREAMING_SERVER']+source['key']+'</a>')
flash('''Der Streamkey von <strong>{name}</strong> wurde neu generiert:
<span><input readonly type="text" style="width: 15em" value="{key}"></span><br>
Trage diesen Streamkey zusammen mit einem der folgenden Streamingserver in die Streamingsoftware ein:
<ul>
<li>{server}</li>
<li>{backup_server}</li>
</ul>Insgesamt sollte die Streaming-URL z.B. so aussehen:
<a href="{server}{key}">{server}{key}</a>'''.format(name=source['name'],
key=source['key'], server=config['STREAMING_SERVER'],
backup_server=config['BACKUP_STREAMING_SERVER']))
return redirect(url_for('streaming'))
@app.route('/internal/streaming/drop/<int:id>')
@mod_required
def streamdrop(id):
# info: sql no test cover
source = (query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', id) or [None])[0]
if not source:
if 'ref' in request.values:
......@@ -118,40 +138,40 @@ def streamdrop(id):
@sched_func(120)
def live_source_thumbnail():
# info: sql no test cover
sources = query('SELECT * FROM live_sources WHERE clientid IS NOT NULL')
for source in sources:
schedule_job('thumbnail', {'srcurl': 'rtmp://%s/src/%i'%(source['server'], source['id']), 'filename': 's_%i.jpg'%source['id']})
def ip_in_networks(ip, networks):
for net in networks:
if ip_address(ip) in ip_network(net):
return True
return False
@app.route('/internal/streaming/auth/<server>', methods=['GET', 'POST'])
def streamauth(server):
internal = False
for net in config.get('FSMPI_IP_RANGES', []):
if ip_address(request.headers['X-Real-IP']) in ip_network(net):
internal = True
if not internal:
# info: sql no test cover
# pylint: disable=too-many-return-statements
if not ip_in_networks(request.headers['X-Real-IP'], config.get('FSMPI_IP_RANGES', [])):
return 'Forbidden', 403
# Sources publish their streams at rtmp://example.com/src/{key} and are
# the redirected to rtmp://example.com/src/{id} to hide the secret stream key
if request.values['call'] == 'publish':
sources = query('SELECT * FROM live_sources WHERE NOT deleted AND `key` = ?', request.values['name'])
sources = query('SELECT * FROM live_sources WHERE NOT deleted AND "key" = ?', request.values['name'])
if not sources:
return 'Not found', 404
modify('UPDATE live_sources SET server = ?, server_public = ?, clientid = ?, last_active = ?, preview_key = ? WHERE id = ?', server, request.args.get('public_ip', server), request.values['clientid'], datetime.now(), gentoken(), sources[0]['id'])
modify('UPDATE live_sources SET server = ?, server_public = ?, clientid = ?, last_active = ?, preview_key = ? WHERE id = ?',
server, request.args.get('public_ip', server), request.values['clientid'],
datetime.now(), gentoken(), sources[0]['id'])
live_source_thumbnail()
ret = Response('Redirect', 301, {'Location': '%i'%sources[0]['id']})
ret.autocorrect_location_header = False
return ret
if request.values['call'] == 'play':
source = (query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', request.values['name']) or [None])[0]
if not source:
return 'Not found', 404
for net in config.get('INTERNAL_IP_RANGES', []):
if ip_address(request.values['addr']) in ip_network(net):
return 'Ok', 200
if source['preview_key'] == request.values.get('preview_key'):
return 'Ok', 200
return 'Forbidden', 403
elif request.values['call'] == 'publish_done':
source = (query('SELECT * FROM live_sources WHERE server = ? AND clientid = ?', server, request.values['clientid']) or [None])[0]
modify('UPDATE live_sources SET server = NULL, clientid = NULL, preview_key = NULL, last_active = ? WHERE server = ? AND clientid = ?', datetime.now(), server, request.values['clientid'])
modify('UPDATE live_sources SET server = NULL, clientid = NULL, preview_key = NULL, last_active = ? WHERE server = ? AND clientid = ?',
datetime.now(), server, request.values['clientid'])
if not source:
return 'Ok', 200
for lecture in query('SELECT * FROM lectures WHERE stream_job IS NOT NULL'):
......@@ -159,23 +179,65 @@ def streamauth(server):
if str(source['id']) in [str(settings.get('source1')), str(settings.get('source2'))]:
cancel_job(lecture['stream_job'])
return 'Ok', 200
elif request.values['call'] == 'play':
source = (query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', request.values['name']) or [None])[0]
if not source:
return 'Not found', 404
if ip_in_networks(request.values['addr'], config.get('INTERNAL_IP_RANGES', [])):
return 'Ok', 200
if source['preview_key'] == request.values.get('preview_key'):
return 'Ok', 200
return 'Forbidden', 403
return 'Bad request', 400
def schedule_livestream(lecture_id):
def build_filter(l):
return ','.join(l) if l else None
server = 'rwth.video'
# info: sql no test cover
# pylint: disable=too-many-branches,too-many-statements
lecture = query('SELECT * FROM lectures WHERE id = ?', lecture_id)[0]
settings = json.loads(lecture['stream_settings'])
data = {'src1': {'afilter': [], 'vfilter': []}, 'src2': {'afilter': [], 'vfilter': []}, 'afilter': [], 'videoag_logo': int(bool(settings.get('video_showlogo'))), 'lecture_id': lecture['id']}
# Server that receives transcoded streams and generates HLS data, later
# (hopefully) overwritten with one of the source's ingestion servers to
# reduce the number of servers the stream' stability relies on
dest_server = 'rwth.video'
# Used by complex_live_transcode.c (ffworker) to open the sources and
# construct a ffmpeg filter graph <https://ffmpeg.org/ffmpeg-filters.html>:
#
# Audio graph
# src1 -> {src1.afilter} \
# amix -> {data.afilter} -> output
# src2 -> {src2.afilter} /
# Video graph
# src1 -> {src1.vfilter} \
# {vmix} -> scale=1920:1080 -> opt. logo overlay -> output
# src2 -> {src2.vfilter} /
data = {
'src1':
{
#'url': 'rtmp://...',
'afilter': [],
'vfilter': [],
},
'src2': {
#'url': 'rtmp://...',
'afilter': [],
'vfilter': [],
},
'afilter': [],
#'vmix': 'streamselect=map=0',
'videoag_logo': int(bool(settings.get('video_showlogo'))),
'lecture_id': lecture['id'],
#'destbase': 'rtmp://...'
}
# afilter/vfilter are lists here to simplify the code below and must be
# converted to a single filter expression afterwards.
src1 = (query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', settings.get('source1')) or [{}])[0]
src2 = (query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', settings.get('source2')) or [{}])[0]
for idx, obj in zip([1,2], [src1, src2]):
if obj:
server = obj['server']
data['src%i'%idx]['url'] = 'rtmp://%s/src/%i'%(obj['server'], obj['id'])
if not obj['clientid']:
flash('Quelle „%s“ ist nicht aktiv!'%obj['name'])
for idx, src in zip([1, 2], [src1, src2]):
if src:
dest_server = src['server']
data['src%i'%idx]['url'] = 'rtmp://%s/src/%i'%(src['server'], src['id'])
if not src['clientid']:
flash('Quelle „%s“ ist nicht aktiv!'%src['name'])
return None
if settings.get('source%i_deinterlace'%idx):
data['src%i'%idx]['vfilter'].append('yadif')
......@@ -191,7 +253,8 @@ def schedule_livestream(lecture_id):
elif mode == 'off':
data['src%i'%idx]['afilter'].append('pan=mono|c0=0*c0')
else:
raise(Exception())
raise Exception()
data['destbase'] = 'rtmp://%s/hls/%i'%(dest_server, lecture['id'])
mode = settings.get('videomode')
if mode == '1':
data['vmix'] = 'streamselect=map=0'
......@@ -211,12 +274,15 @@ def schedule_livestream(lecture_id):
data['vmix'] = 'hstack,pad=1920:1080:0:270'
if settings.get('audio_normalize'):
data['afilter'].append('loudnorm')
# Filter setup done, now lists of ffmpeg filter expressions must be
# converted to single expressions
def build_filter(exprs):
return ','.join(exprs) if exprs else None
data['afilter'] = build_filter(data['afilter'])
data['src1']['afilter'] = build_filter(data['src1']['afilter'])
data['src1']['vfilter'] = build_filter(data['src1']['vfilter'])
data['src2']['afilter'] = build_filter(data['src2']['afilter'])
data['src2']['vfilter'] = build_filter(data['src2']['vfilter'])
data['destbase'] = 'rtmp://%s/hls/%i'%(server, lecture['id'])
if lecture['stream_job']:
flash('Stream läuft bereits!')
return None
......@@ -229,12 +295,13 @@ def schedule_livestream(lecture_id):
return job_id
@job_handler('complex_live_transcode', state='failed')
def restart_failed_complex_live_transcode(id, type, data, state, status):
def restart_failed_complex_live_transcode(id, type, data, state, status): # pylint: disable=unused-argument
restart_job(id)
@job_handler('complex_live_transcode', state='failed')
@job_handler('complex_live_transcode', state='finished')
def cleanup_after_complex_live_transcode_ended(id, type, data, state, status):
def cleanup_after_complex_live_transcode_ended(id, type, data, state, status): # pylint: disable=unused-argument
# info: sql no test cover
job = query('SELECT * FROM jobs WHERE id = ?', id, nlfix=False)[0]
if state == 'finished' or (state == 'failed' and job['canceled']):
modify('UPDATE lectures_data SET stream_job = NULL WHERE stream_job = ?', id)
......@@ -242,6 +309,7 @@ def cleanup_after_complex_live_transcode_ended(id, type, data, state, status):
@app.route('/internal/streaming/control', methods=['POST'])
@mod_required
def control_stream():
# info: sql no test cover
action = request.values['action']
lecture_id = int(request.values['lecture_id'])
course = (query('SELECT courses.* FROM courses JOIN lectures ON (courses.id = lectures.course_id) WHERE lectures.id = ?', lecture_id) or [None])[0]
......
from server import *
from email.message import EmailMessage
import smtplib
import traceback
from server import *
def send_message(msgtype, recipients, **kwargs):
msg = EmailMessage()
msg['From'] = config['MAIL_FROM']
msg['To'] = ', '.join([r.replace(',', '') for r in recipients])
cc = kwargs.pop('cc', [])
cc = kwargs.pop('cc', []) #pylint: disable=invalid-name
if cc:
msg['Cc'] = ', '.join([r.replace(',', '') for r in cc])
try:
......@@ -16,13 +16,14 @@ def send_message(msgtype, recipients, **kwargs):
msg.set_content(render_template('mails/'+msgtype+'.body', **kwargs))
if not config.get('MAIL_SERVER'):
return
s = smtplib.SMTP(config['MAIL_SERVER'])
mailserver = smtplib.SMTP(config['MAIL_SERVER'])
if config.get('MAIL_ADDRESS_OVERWRITE'):
s.send_message(msg, to_addrs=[config['MAIL_ADDRESS_OVERWRITE']])
mailserver.send_message(msg, to_addrs=[config['MAIL_ADDRESS_OVERWRITE']])
else:
s.send_message(msg)
s.quit()
except:
mailserver.send_message(msg)
mailserver.quit()
except: #pylint: disable=bare-except
# we musst not raise an exception here, else we would send another mail, rinse and repeat
traceback.print_exc()
def notify_users(msgtype, uids, **kwargs):
......@@ -41,14 +42,14 @@ def notify_users(msgtype, uids, **kwargs):
config['MAIL_SUFFIX']))
else:
recipients.append('%s@%s'%(user[0]['fsacc'], config['MAIL_SUFFIX']))
cc = kwargs.get('cc', [])
cc = kwargs.get('cc', []) #pylint: disable=invalid-name
if kwargs.pop('importend', False):
cc.append(config['MAIL_DEFAULT'])
if kwargs.pop('notify_admins', False):
cc.append(config['MAIL_ADMINS'])
if not recipients:
recipients = cc
cc = []
cc = [] #pylint: disable=invalid-name
if not recipients:
return
kwargs['cc'] = cc
......@@ -64,7 +65,8 @@ def notify_mods(msgtype, course_id, **kwargs):
def notify_admins(msgtype, **kwargs):
try:
send_message(msgtype, [config['MAIL_ADMINS']], **kwargs)
except:
except: #pylint: disable=bare-except
# we musst not raise an exception here, else we would send another mail, rinse and repeat
traceback.print_exc()
@app.route('/internal/user/<int:user>/notifications')
......@@ -72,4 +74,3 @@ def notify_admins(msgtype, **kwargs):
@mod_required
def user_notifications(user):
return render_template('notifications.html', user=query('SELECT * FROM users WHERE id = ?', user)[0])
from server import *
import icalendar
import requests
from server import *
def get_next_meeting():
ical = requests.get(config['ICAL_URL']).content
events = icalendar.Calendar.from_ical(ical).walk('VEVENT')
......@@ -11,25 +12,24 @@ def get_next_meeting():
for event in events:
try:
start = event['DTSTART'].dt.date()
if 'VIDEO' != event['SUMMARY'].upper() or start < now or start > now+delta:
if event['SUMMARY'].upper() != 'VIDEO' or start < now or start > now+delta:
continue
meetings.append(event)
except:
except KeyError:
pass
if not meetings:
return
return None, None
event = sorted(meetings, key=lambda e: e['DTSTART'].dt)[0]
return str(event['UID']), event['DTSTART'].dt.replace(tzinfo=None)
@sched_func(60*60)
def update_meeting():
try:
uid, start = get_next_meeting()
except:
if uid is None:
return
text = 'Die nächste Video AG-Sitzung findet am %s ab %s Uhr in den Räumlichkeiten der Fachschaft im Augustinerbach 2a statt.'%(
human_date(start), human_time(start))
modify('''REPLACE INTO announcements
(extid, text, level, visible, time_publish, time_expire, time_created, time_updated, created_by)
VALUES (?, ?, 0, 1, ?, ?, ?, ?, 0)''',
VALUES (?, ?, 0, true, ?, ?, ?, ?, 0)''',
'ical:'+uid, text, start-timedelta(days=7), start+timedelta(hours=2), datetime.now(), datetime.now())
......@@ -26,21 +26,22 @@ http {
keepalive_timeout 65;
types_hash_max_size 2048;
server {
#listen 5000;
listen 5000;
#listen [::]:5000;
listen localhost:5000;
#listen localhost:5000;
error_page 502 /static/500.html;
location /static/ {
root .;
}
location /files/ {
auth_request /auth;
auth_request /internal/auth;
auth_request_set $trackingcookie $upstream_http_set_cookie;
# For use with sshfs (recommended)
#alias /mnt/videoag/srv/videoag/released/;
#add_header Set-Cookie $trackingcookie;
# For use without sshfs
proxy_pass https://videoag.fsmpi.rwth-aachen.de/;
# NO TRAILING SLASH so that /files/ will not be skipped of the request!
proxy_pass https://videoag.fsmpi.rwth-aachen.de;
proxy_set_header Host "videoag.fsmpi.rwth-aachen.de";
proxy_set_header Set-Cookie $trackingcookie;
}
......
#!/usr/bin/python3
try:
from werkzeug.contrib.profiler import ProfilerMiddleware
except ImportError:
from werkzeug.middleware.profiler import ProfilerMiddleware
from server import app
app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])
app.run(debug=True)
# required
flask
requests
# optional
lxml
pytz
ldap3
icalendar
mysql-connector-python
coverage
psycopg[c]
\ No newline at end of file
......@@ -3,17 +3,16 @@ import unittest
import os
import server
def setUp():
def set_up():
server.app.testing = True
def tearDown():
def tear_down():
os.unlink(server.app.config['SQLITE_DB'])
if __name__ == '__main__':
setUp()
set_up()
try:
suite = unittest.defaultTestLoader.discover('./tests/', pattern="*")
suite = unittest.defaultTestLoader.discover('./tests/', pattern="*") #pylint: disable=invalid-name
unittest.TextTestRunner(verbosity=2, failfast=True).run(suite)
finally:
tearDown()
tear_down()
from server import *
import threading
import sched
from time import sleep
scheduler = sched.scheduler()
from server import *
scheduler = sched.scheduler() # pylint: disable=invalid-name
def run_scheduler():
sleep(1) # UWSGI does weird things on startup
while True:
scheduler.run()
sleep(10)
def sched_func(delay, priority=0, firstdelay=None, args=[], kargs={}):
if firstdelay == None:
def sched_func(delay, priority=0, firstdelay=None, args=None, kargs=None):
args = args or []
kargs = kargs or {}
if firstdelay is None:
firstdelay = random.randint(1, 120)
def wrapper(func):
def sched_wrapper():
......@@ -20,7 +23,7 @@ def sched_func(delay, priority=0, firstdelay=None, args=[], kargs={}):
if config.get('DEBUG', False):
print("Scheduler: started {} (frequency 1/{}s)".format(func.__name__, delay))
func(*args, **kargs)
except Exception:
except Exception: # pylint: disable=broad-except
traceback.print_exc()
notify_admins('scheduler_exception', name=func.__name__,
traceback=traceback.format_exc())
......
This diff is collapsed.
from server import *
import traceback
import os.path
from server import *
@app.route('/internal/sort/log')
@register_navbar('Sortierlog', icon='sort-by-attributes-alt', group='weitere')
@mod_required
......@@ -16,9 +17,9 @@ def sort_log():
FROM sortlog
JOIN lectures ON lectures.id = sortlog.lecture_id
JOIN courses ON courses.id = lectures.course_id
ORDER BY sortlog.`when` DESC
ORDER BY sortlog."when" DESC
LIMIT 50
'''),sorterrorlog=query('SELECT * FROM sorterrorlog ORDER BY sorterrorlog.`when` DESC'))
'''), sorterrorlog=query('SELECT * FROM sorterrorlog ORDER BY sorterrorlog."when" DESC'))
def to_ascii(inputstring):
asciistring = inputstring
......@@ -27,7 +28,8 @@ def to_ascii(inputstring):
return asciistring
@job_handler('probe', 'remux', 'transcode')
def update_video_metadata(jobid, jobtype, data, state, status):
def update_video_metadata(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
# info: sql no test cover
if 'video_id' not in data:
return
if jobtype not in ['remux', 'transcode']:
......@@ -54,18 +56,20 @@ def add_thumbnail_job():
schedule_thumbnail(int(request.values['lectureid']))
return redirect(request.values.get('ref', url_for('jobs_overview')))
def insert_video(lectureid, dbfilepath, fileformatid, hash="", filesize=-1, duration=-1, sourceid=None):
def insert_video(lectureid, dbfilepath, fileformatid, hash="", filesize=-1, duration=-1, sourceid=None): #pylint: disable=too-many-arguments
# info: sql no test cover
visible = query('SELECT courses.autovisible FROM courses JOIN lectures ON lectures.course_id = courses.id WHERE lectures.id = ?', lectureid)[0]['autovisible']
video_id = modify('''INSERT INTO videos_data
(lecture_id, visible, path, video_format, title, comment, internal, file_modified, time_created, time_updated, created_by, hash, file_size, duration, source)
VALUES
(?, ?, ?, ?, "", "", "", ?, ?, ?, ?, ?, ?, ?, ?)''',
lectureid, visible, dbfilepath, fileformatid, datetime.now(), datetime.now(), datetime.now(), -1, hash, filesize, duration, sourceid)
(?, ?, ?, ?, '', '', '', ?, ?, ?, ?, ?, ?, ?, ?)''',
lectureid, visible, dbfilepath, fileformatid, datetime.now(), datetime.now(), datetime.now(), -1, hash, filesize, duration, sourceid,
get_id=True)
if not sourceid:
query('INSERT INTO sortlog (lecture_id,video_id,path,`when`) VALUES (?,?,?,?)', lectureid, video_id, dbfilepath, datetime.now())
query('INSERT INTO sortlog (lecture_id,video_id,path,"when") VALUES (?,?,?,?)', lectureid, video_id, dbfilepath, datetime.now())
schedule_job('probe', {'path': dbfilepath, 'lecture_id': lectureid, 'video_id': video_id, 'import-chapters': True})
schedule_thumbnail(lectureid)
video = query('SELECT videos.*, "format" AS sep, formats.* FROM videos JOIN formats ON formats.id = videos.video_format WHERE videos.id = ?', video_id)[0]
video = query('SELECT videos.*, \'format\' AS sep, formats.* FROM videos JOIN formats ON formats.id = videos.video_format WHERE videos.id = ?', video_id)[0]
lecture = query('SELECT * FROM lectures WHERE id = ?', lectureid)[0]
course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0]
notify_mods('new_video', course['id'], course=course, lecture=lecture, video=video)
......@@ -75,25 +79,25 @@ def split_filename(filename):
# '_' and ' ' are handled like '-'
return filename.replace('_', '-').replace(' ', '-').split('-')
def parse_filename(splitFileName):
# filenames: <handle>-<sorter>-<format>.mp4
def parse_filename(filename):
# filenames: <handle>-<sorter>-<format>.<ext>, split at '-' into an array
data = {'keywords': []}
for fileNameChunk in splitFileName:
fileNameChunk = fileNameChunk.replace('.mp4','')
for chunk in filename:
chunk = chunk.replace('.mp4', '').replace('.webm', '')
#-<YYMMDD> (date)
#-<HHMM> (time)
#-<keyword>
# Looking for keywords in: title,speaker,comment, comma seperated list in internal
try:
if len(fileNameChunk) == 6:
data['date'] = datetime.strptime(fileNameChunk,'%y%m%d').date()
elif len(fileNameChunk) == 4:
data['time'] = datetime.strptime(fileNameChunk,'%H%M').time()
if len(chunk) == 6:
data['date'] = datetime.strptime(chunk, '%y%m%d').date()
elif len(chunk) == 4:
data['time'] = datetime.strptime(chunk, '%H%M').time()
else:
data['keywords'].append(fileNameChunk)
data['keywords'].append(chunk)
except ValueError:
# if its not valid date or time, handle it as keyword
data['keywords'].append(fileNameChunk)
data['keywords'].append(chunk)
return data
def filter_lectures_by_datetime(lectures, date, time):
......@@ -121,11 +125,11 @@ def filter_lectures_by_keywords(lectures, keywords):
return [lecture]
return []
def extract_format_keyword_from_filename(splitFileName):
return splitFileName[-1].split('.',1)[0].lower()
def extract_format_keyword_from_filename(filename):
return filename[-1].split('.', 1)[0].lower()
def filter_formats_by_filename(splitFileName):
formatstring = extract_format_keyword_from_filename(splitFileName)
def filter_formats_by_filename(filename):
formatstring = extract_format_keyword_from_filename(filename)
formats = query('SELECT * FROM formats ORDER BY prio DESC')
for videoformat in formats:
# we match the last part of the file name without the extension
......@@ -135,11 +139,11 @@ def filter_formats_by_filename(splitFileName):
return 0
def sort_file(filename, course=None, lectures=None):
splitFileName = split_filename(filename)
filename = split_filename(filename)
if not course:
handle = splitFileName[0]
if splitFileName[0].endswith('ws') or splitFileName[0].endswith('ss'):
handle = '-'.join(splitFileName[:2])
handle = filename[0]
if filename[0].endswith('ws') or filename[0].endswith('ss'):
handle = '-'.join(filename[:2])
courses = query('SELECT * FROM courses WHERE handle = ?', handle)
if not courses:
return [], 0
......@@ -147,45 +151,30 @@ def sort_file(filename, course=None, lectures=None):
if not lectures:
lectures = query('SELECT * from lectures where course_id = ?', course['id'])
# parse all data from the file name
data = parse_filename(splitFileName)
data = parse_filename(filename)
# try to match the file on a single lecture
matches = filter_lectures_by_datetime(lectures, data.get('date'), data.get('time'))
# if we can't match exactly based on date and time, we have to match keywords
if ((len(matches) != 1) and (len(data['keywords']) > 0)):
if not matches:
if matches:
# only test lectures with the correct date/time, if we have any
matches = filter_lectures_by_keywords(matches, data['keywords'])
else:
# Else test for matches in all lectures of this course
matches = filter_lectures_by_keywords(lectures, data['keywords'])
# now we should have found exactly one match
fmt = filter_formats_by_filename(splitFileName)
fmt = filter_formats_by_filename(filename)
return matches, fmt
def log_sort_error(course_id, path, matches):
matches_id = []
for match in matches:
matches_id.append(str(match['id']))
query('INSERT INTO sorterrorlog_data (course_id, path, matches, `when`, time_updated, time_created) VALUES (?, ?, ?, ?, ?, ?)',
query('INSERT INTO sorterrorlog_data (course_id, path, matches, "when", time_updated, time_created) VALUES (?, ?, ?, ?, ?, ?)',
course_id, path, ','.join(matches_id), datetime.now(), datetime.now(), datetime.now())
def sort_api_token_required(func):
@wraps(func)
def decorator(*args, **kwargs):
if 'apikey' in request.values:
token = request.values['apikey']
elif request.get_json() and ('apikey' in request.get_json()):
token = request.get_json()['apikey']
else:
token = None
if not token == config.get('SORTER_API_KEY', [None]):
return 'Permission denied', 403
else:
return func(*args, **kwargs)
return decorator
@app.route('/internal/sort/encoded/<filename>')
@sort_api_token_required
@api_token_required('SORTER_API_KEY')
def sort_encoded(filename):
matches, fmt = sort_file(filename)
if len(matches) != 1:
......@@ -198,11 +187,11 @@ def sort_encoded(filename):
return 'OK', 200
@app.route('/internal/sort/autoencode')
@sort_api_token_required
@api_token_required('SORTER_API_KEY')
def sort_autoencode():
filename = request.values['path']
path = 'autoencode/'+filename
matches, fmt = sort_file(filename)
matches, fmt = sort_file(filename) #pylint: disable=unused-variable
if len(matches) != 1:
log_sort_error(-1, 'raw/'+path, matches)
return "Could not match filename", 400
......@@ -211,7 +200,7 @@ def sort_autoencode():
return 'OK', 200
@job_handler('publish_video')
def handle_published_video(jobid, jobtype, data, state, status):
def handle_published_video(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
if 'lecture_id' not in data or 'format_id' not in data:
return
insert_video(data['lecture_id'], data['path'], data['format_id'], hash=status['hash'], filesize=status['filesize'], duration=status['duration'])
......@@ -240,7 +229,8 @@ def sort_now():
# if the video is in the table "videos" already (with the correct course), skip it
if os.path.basename(filename) in ignorefiles:
continue
if not os.path.splitext(filename)[1] == '.mp4':
ext = os.path.splitext(filename)[1]
if not ext == '.mp4' and not ext == '.webm':
continue
matches, fmt = sort_file(filename, course=course, lectures=lectures)
dbfilepath = mountpoint['prefix']+course['handle']+'/'+filename
......@@ -248,11 +238,10 @@ def sort_now():
insert_video(matches[0]['id'], dbfilepath, fmt)
else:
log_sort_error(course['id'], dbfilepath, matches)
except Exception:
except: #pylint: disable=bare-except
traceback.print_exc()
modify('COMMIT')
if 'ref' in request.values:
return redirect(request.values['ref'])
else:
return 'OK', 200
This diff is collapsed.
......@@ -139,11 +139,11 @@ var moderator = {
changeboxclick: function(src) {
var value = $(src)[0].checked;
var path = $(src).data('path');
moderator.api.set(path,value ? 1 : 0);
moderator.api.set(path,value ? true : false);
},
deletebtnclick: function(src) {
if (confirm('Really delete this?')) {
moderator.api.set($(src).data('path'),1,true);
moderator.api.set($(src).data('path'),true,true);
}
}
},
......@@ -168,7 +168,7 @@ var moderator = {
var type = $(srcel).data('type');
$('#editpermdiv').data('id',id);
$('#editpermdiv').data('type',type);
var html = ''
var permElems = []
for (i in moderator.permissioneditor.permissions) {
if ((moderator.permissioneditor.permissions[i][type+'_id'] == id)) {
var perm = {};
......@@ -197,11 +197,21 @@ var moderator = {
case 'l2p':
permstring = '(' + perm.param1 + ')'
break;
case 'moodle':
permstring = '(' + perm.param1 + ')'
break;
}
html += '<option data-id="'+perm.id+'" data-type="'+perm.type+'" data-param1="'+perm.param1+'" data-param2="'+perm.param2+'">#'+perm.id+' '+perm.type+' '+ permstring +'</option>';
var optionEl = document.createElement('option');
optionEl.dataset.id = perm.id;
optionEl.dataset.type = perm.type;
optionEl.dataset.param1 = perm.param1;
optionEl.dataset.param2 = perm.param2;
optionEl.text = '#' + perm.id + ' ' + perm.type + ' ' + permstring;
permElems.push(optionEl)
}
}
$('#permissionlist').html(html);
document.querySelector('#permissionlist').replaceChildren(...permElems);
});
},
......@@ -220,11 +230,14 @@ var moderator = {
case 'l2p':
$(".authl2p", container).val(perm.param1);
break
case 'moodle':
$(".authmoodle", container).val(perm.param1);
break
}
$(".authtype option[value="+perm.type+"]").prop("selected", true);
},
delbtnclick: function (element) {
moderator.api.set("perm."+$("#permissionlist option:selected", element.parentElement).data('id')+".deleted",1,true);
moderator.api.set("perm."+$("#permissionlist option:selected", element.parentElement).data('id')+".deleted",true,true);
},
addbtnclick: function (element) {
var container = element.parentElement;
......@@ -266,6 +279,9 @@ var moderator = {
case 'l2p':
perm.param1 = $(".authl2p", container).val();
break
case 'moodle':
perm.param1 = $(".authmoodle", container).val();
break
}
return perm;
},
......@@ -277,18 +293,27 @@ var moderator = {
$(".authuser", container).val('');
$(".authpassword", container).val('');
$(".authl2p", container).val('');
$(".authmoodle", container).val('');
switch (type) {
case 'password':
$(".passwordinput",container).show();
$(".authl2p",container).hide();
$(".authmoodle",container).hide();
break;
case 'l2p':
$(".passwordinput",container).hide();
$(".authl2p",container).show();
$(".authmoodle",container).hide();
break;
case 'moodle':
$(".passwordinput",container).hide();
$(".authl2p",container).hide();
$(".authmoodle",container).show();
break;
default:
$(".passwordinput",container).hide();
$(".authl2p",container).hide();
$(".authmoodle",container).hide();
break;
}
},
......
static/moodle.png

770 B

......@@ -137,10 +137,23 @@ th.rotate {
th.rotate > div {
transform:
rotate(270deg)
translate(-50px,0px);
translate(-100px,0px);
width: 30px;
}
th.rotate > div > span {
padding: 5px 10px;
}
.tooltip-inner {
max-width: 500px;
}
#cutprogress td {
white-space: nowrap;
}
#cutprogress.table tr.weekbreak td {
border-top: 2px solid black !important;
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Source diff could not be displayed: it is too large. Options to address this: view the blob.