Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • bootstrap4
  • intros
  • master
  • modules
  • postgres_integration
  • s3compatible
6 results

Target

Select target project
  • jannik/website
  • vincent/website
  • dominic/website
  • romank/website
  • videoaginfra/website
5 results
Select Git revision
  • master
1 result
Show changes
Commits on Source (22)
......@@ -12,3 +12,4 @@ nginx.conf
uwsgi.sock
.coverage
htmlcov/
.idea/
......@@ -15,7 +15,7 @@ def import_xmp_chapters(jobid, jobtype, data, state, status): #pylint: disable=u
if int(chapter['time']) in times:
continue
modify(
'INSERT INTO chapters (lecture_id, time, text, visible, time_created, time_updated) VALUES (?, ?, ?, 0, ?, ?)',
'INSERT INTO chapters (lecture_id, time, text, visible, time_created, time_updated) VALUES (?, ?, ?, false, ?, ?)',
data['lecture_id'], int(chapter['time']), chapter['text'],
datetime.now(), datetime.now()
)
......@@ -40,7 +40,8 @@ def suggest_chapter(lectureid):
course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0]
id = modify(
'INSERT INTO chapters (lecture_id, time, text, time_created, time_updated, created_by, submitted_by) VALUES (?, ?, ?, ?, ?, ?, ?)',
lectureid, chapter_start, text, datetime.now(), datetime.now(), session.get('user', {'dbid':None})['dbid'], submitter
lectureid, chapter_start, text, datetime.now(), datetime.now(), session.get('user', {'dbid':None})['dbid'], submitter,
get_id=True
)
chapter = query('SELECT * FROM chapters WHERE id = ?', id)[0]
if not ismod():
......
# Defaults for development ,do not use in production!
DEBUG = False
SERVER_IP = 'localhost'
VIDEOPREFIX = 'https://videoag.fsmpi.rwth-aachen.de'
VIDEOPREFIX = '/files'
VIDEOMOUNT = [{'mountpoint': 'files/protected/', 'prefix':'protected/'},{'mountpoint':'files/pub/','prefix':'pub/' }, {'mountpoint':'files/vpnonline/','prefix':'vpnonline/' }]
#SECRET_KEY = 'something random'
......@@ -16,6 +16,13 @@ DB_DATA = 'db_example.sql'
#MYSQL_PASSWD = 'somuchsecret'
#MYSQL_DB = 'videos'
#DB_ENGINE = 'postgres'
POSTGRES_HOST = '10.0.0.101'
POSTGRES_PORT = 5432
POSTGRES_USER = 'videoag'
POSTGRES_PASSWORD = ''
POSTGRES_DATABASE = 'videoag'
DB_ENGINE = 'sqlite'
SQLITE_DB = 'db.sqlite'
SQLITE_INIT_SCHEMA = True
......
......@@ -40,15 +40,21 @@ def cutprogress(user=None):
lectures.course_id,
lectures.time,
lectures.title,
COALESCE(video_counts.videos_total, 0),
COALESCE(video_counts.videos_visible, 0)
FROM lectures
JOIN courses ON ( courses.id = lectures.course_id )
LEFT JOIN (
SELECT
videos.lecture_id,
COUNT(videos.id) as videos_total,
COUNT(videos.visible) as videos_visible
FROM lectures
JOIN courses ON courses.id = lectures.course_id
LEFT JOIN videos ON lectures.id = videos.lecture_id
FROM videos
GROUP BY videos.lecture_id
) AS video_counts ON ( video_counts.lecture_id = lectures.id )
WHERE courses.id = ?
AND lectures.time <= ?
AND NOT lectures.norecording
GROUP BY lectures.id
ORDER BY lectures.time ASC, lectures.id ASC
''', course['id'], datetime.now())
# Generate list of days, figure out when weeks change
......
......@@ -58,6 +58,7 @@ elif config['DB_ENGINE'] == 'mysql':
port=config.get('MYSQL_PORT', 3306),
unix_socket=config.get('MYSQL_UNIX', None),
database=config['MYSQL_DB'])
g.db.cmd_query("SET SESSION sql_mode = 'ANSI_QUOTES'")
if not hasattr(request, 'db'):
request.db = g.db.cursor()
return request.db
......@@ -89,6 +90,29 @@ elif config['DB_ENGINE'] == 'mysql':
cur.close()
db.close()
return res
elif config['DB_ENGINE'] == 'postgres':
import psycopg2 # pylint: disable=import-error
def get_dbcursor():
if 'db' not in g or g.db.closed:
g.db = psycopg2.connect(
host=config["POSTGRES_HOST"],
port=config["POSTGRES_PORT"],
user=config["POSTGRES_USER"],
password=config["POSTGRES_PASSWORD"],
dbname=config["POSTGRES_DATABASE"]
)
if not hasattr(request, 'db'):
request.db = g.db.cursor()
return request.db
def fix_query(operation, params):
operation = operation.replace('?', '%s')
params = [(p.replace(microsecond=0) if isinstance(p, datetime) else p) for p in params]
return operation, params
def show(operation, host=None): #pylint: disable=unused-argument
return {}
def query(operation, *params, delim="sep", nlfix=True):
operation, params = fix_query(operation, params)
......@@ -99,8 +123,8 @@ def query(operation, *params, delim="sep", nlfix=True):
try:
cur = get_dbcursor()
cur.execute(operation, params)
except mysql.connector.errors.InternalError as e:
if e.msg == 'Deadlock found when trying to get lock; try restarting transaction':
except Exception as e: # pylint: disable=broad-except
if str(e) == 'Deadlock found when trying to get lock; try restarting transaction':
tries += 1
retry = True
else:
......@@ -108,8 +132,8 @@ def query(operation, *params, delim="sep", nlfix=True):
rows = []
try:
rows = cur.fetchall()
except mysql.connector.errors.InterfaceError as e:
if e.msg == 'No result set to fetch from.':
except Exception as e: # pylint: disable=broad-except
if str(e) == 'no results to fetch' or str(e) == "the last operation didn't produce a result":
# no problem, we were just at the end of the result set
pass
else:
......@@ -128,11 +152,20 @@ def query(operation, *params, delim="sep", nlfix=True):
ptr[name] = col
return res
def modify(operation, *params):
def modify(operation, *params, get_id=False):
operation, params = fix_query(operation, params)
if get_id and config["DB_ENGINE"] == "postgres":
operation += " RETURNING id" # Not nice, but works for now
cur = get_dbcursor()
cur.execute(operation, params)
if not get_id:
return None
if config["DB_ENGINE"] != "postgres":
return cur.lastrowid
all_res = cur.fetchall()
if len(all_res) <= 0:
raise ValueError("Got no id")
return int(all_res[0][0])
@app.teardown_request
def commit_db(*args): #pylint: disable=unused-argument
......@@ -145,20 +178,3 @@ def close_db(*args): #pylint: disable=unused-argument
if 'db' in g:
g.db.close()
del g.db
def searchquery(text, columns, match, tables, suffix, *suffixparams):
params = []
subexprs = []
words = text.split(' ')
prio = len(words)+1
for word in words:
if word == '' or word.isspace():
continue
matchexpr = ' OR '.join(['%s LIKE ?'%column for column in match])
subexprs.append('SELECT %s, %s AS _prio FROM %s WHERE %s'%(columns, str(prio), tables, matchexpr))
params += ['%'+word+'%']*len(match)
prio -= 1
if subexprs == []:
return []
expr = 'SELECT *,SUM(_prio) AS _score FROM (%s) AS _tmp %s'%(' UNION '.join(subexprs), suffix)
return query(expr, *(list(params)+list(suffixparams)))
......@@ -8611,6 +8611,7 @@ INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,
INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`) VALUES (9681,7012,1,0,1,'','','','pub/16ss-dsal/16ss-dsal-160715-1080p_1.mp4','2016-08-07 22:54:46','2016-08-07 21:02:37','2016-08-07 21:02:43',46,1402602183,4,'e036f7cbd51afd3ab7be10cf77747c00');
INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`) VALUES (9682,7012,1,0,1,'','','','pub/16ss-dsal/16ss-dsal-160715-360p_1.mp4','2016-08-07 22:45:34','2016-08-07 21:02:38','2016-08-07 21:02:45',46,368611109,10,'fae2bda2da55a3005aa6329a2d0227c3');
INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`) VALUES (9683,7012,1,0,1,'','','','pub/16ss-dsal/16ss-dsal-160715-720p_1.mp4','2016-08-07 22:46:00','2016-08-07 21:02:40','2016-08-07 21:02:44',46,721141077,5,'083c0b7693c82078c513707d1402096b');
INSERT INTO `videos_data` (`id`,`lecture_id`,`visible`,`deleted`,`downloadable`,`title`,`comment`,`internal`,`path`,`file_modified`,`time_created`,`time_updated`,`created_by`,`file_size`,`video_format`,`hash`, `source`) VALUES (16080,7012,1,0,1,'','','','pub/17ws-cgbp/17ws-cgbp-171114-720p.mp4','2018-01-12 04:36:44','2018-01-12 04:36:44','2018-01-12 04:36:44',-1,607257928,5,"8fa956b14162ec42c1dabc11d53671c5",89);
INSERT INTO `users` (`id`,`name`,`realname`,`level`,`fsacc`,`last_login`,`calendar_key`,`rfc6238`) VALUES (1,'gustav1','Gustav Geier',0,'',NULL,'','');
INSERT INTO `users` (`id`,`name`,`realname`,`level`,`fsacc`,`last_login`,`calendar_key`,`rfc6238`) VALUES (2,'gustav2','Gustav Geier',0,'',NULL,'','');
INSERT INTO `users` (`id`,`name`,`realname`,`level`,`fsacc`,`last_login`,`calendar_key`,`rfc6238`) VALUES (4,'gustav4','Gustav Geier',0,'',NULL,'','');
......@@ -14646,4 +14647,5 @@ INSERT INTO `areas` (`area`,`abbreviation`,`default`,`rank`,`coordinates`) VALUE
INSERT INTO `profiles` (`name`,`format`) VALUES ('default',4);
INSERT INTO `profiles` (`name`,`format`) VALUES ('default',5);
INSERT INTO `profiles` (`name`,`format`) VALUES ('default',10);
INSERT INTO `sources` (`id`, `lecture_id`, `path`, `type`, `hash`, `time_created`) VALUES (89, 7012, 'autoencode/something', 'plain', '000000000', '2024-01-01 00:00:00');
COMMIT;
......@@ -151,8 +151,8 @@ def getfielddescription(inputpath):
def getfieldchangelog(inputpath):
path = parseeditpath(inputpath)
changelog = query('SELECT * FROM changelog \
LEFT JOIN users ON (changelog.who = users.id) WHERE `table` = ? AND `id_value` = ? and `field` = ? \
ORDER BY `when` DESC LIMIT 5', path['table'], path['id'], path['column'])
LEFT JOIN users ON (changelog.who = users.id) WHERE "table" = ? AND "id_value" = ? and "field" = ? \
ORDER BY "when" DESC LIMIT 5', path['table'], path['id'], path['column'])
for entry in changelog:
entry['id_value'] = str(entry['id_value'])
entry['value_new'] = str(entry['value_new'])
......@@ -180,9 +180,9 @@ def edit(prefix='', ignore=None):
key = prefix+key
path = parseeditpath(key)
modify('INSERT INTO changelog \
(`table`,id_value, id_key, field, value_new, value_old, `when`, who, executed) \
("table",id_value, id_key, field, value_new, value_old, "when", who, executed) \
VALUES (?,?,?,?,?, \
(SELECT `%s` FROM %s WHERE %s = ?),?,?,1)'%(
(SELECT "%s" FROM %s WHERE %s = ?),?,?,true)'%(
path['column'],
path['tableinfo']['table'],
path['tableinfo']['idcolumn']
......@@ -195,7 +195,7 @@ def edit(prefix='', ignore=None):
path['id'],
datetime.now(),
session['user']['dbid'])
modify('UPDATE %s SET `%s` = ?, time_updated = ? WHERE `%s` = ?'%(path['tableinfo']['table'], path['column'], path['tableinfo']['idcolumn']),
modify('UPDATE %s SET "%s" = ?, time_updated = ? WHERE "%s" = ?'%(path['tableinfo']['table'], path['column'], path['tableinfo']['idcolumn']),
val, datetime.now(), path['id'])
for func in edit_handlers.get(path['table'], {}).get(None, []):
func(path['table'], path['column'], val, path['id'], session['user']['dbid'])
......@@ -225,10 +225,12 @@ def create(table):
continue
assert column in list(editable_tables[table]['editable_fields'].keys())+editable_tables[table]['creationtime_fields']
assert column not in defaults
columns.append('`'+column+'`')
columns.append('"'+column+'"')
values.append(val)
assert editable_tables[table]['idcolumn'] == 'id'
id = modify('INSERT INTO %s (%s) VALUES (%s)'%(editable_tables[table]['table'],
','.join(columns), ','.join(['?']*len(values))), *values)
','.join(columns), ','.join(['?']*len(values))), *values,
get_id=True)
if table == 'courses':
set_responsible(id, session['user']['dbid'], 1)
if 'ref' in request.values:
......@@ -241,7 +243,7 @@ def create(table):
def changelog():
page = max(0, int(request.args.get('page', 0)))
pagesize = min(500, int(request.args.get('pagesize', 50)))
changelog = query('SELECT * FROM changelog LEFT JOIN users ON (changelog.who = users.id) ORDER BY `when` DESC LIMIT ? OFFSET ?', pagesize, page*pagesize)
changelog = query('SELECT * FROM changelog LEFT JOIN users ON (changelog.who = users.id) ORDER BY "when" DESC LIMIT ? OFFSET ?', pagesize, page*pagesize)
pagecount = math.ceil(query('SELECT count(id) as count FROM changelog')[0]['count']/pagesize)
for entry in changelog:
entry['path'] = '.'.join([entry['table'], entry['id_value'], entry['field']])
......@@ -253,7 +255,8 @@ def changelog():
@csrf_protect
def set_responsible(course_id, user_id, value):
if value:
modify('REPLACE INTO responsible (course_id, user_id) values (?, ?)', course_id, user_id)
if not query('SELECT id FROM responsible WHERE course_id = ? AND user_id = ?', course_id, user_id):
modify('INSERT INTO responsible (course_id, user_id) VALUES (?, ?)', course_id, user_id)
else:
modify('DELETE FROM responsible WHERE course_id = ? AND user_id = ?', course_id, user_id)
return "OK", 200
......
......@@ -117,6 +117,7 @@ def add_reencode_job():
@job_handler('probe-raw', 'intro')
def update_lecture_videos(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
# info: sql no test cover
if 'lecture_id' not in data:
return
if jobtype == 'probe-raw':
......
......@@ -20,16 +20,17 @@ def feed(handle=None):
if handle:
course = query('SELECT * FROM courses WHERE handle = ? AND visible', handle)[0]
course['atomid'] = gen_atomid('Video AG, courses['+str(course['id'])+']: '+course['handle'])
entries = query('''
SELECT lectures.*, "video" AS sep, videos.*, formats.description AS format_description, formats.prio, "course" AS sep, courses.*
course_id = course['id']
entries = query(f'''
SELECT lectures.*, 'video' AS sep, videos.*, formats.description AS format_description, formats.prio, \'course\' AS sep, courses.*
FROM lectures
JOIN courses ON (courses.id = lectures.course_id)
JOIN videos ON (lectures.id = videos.lecture_id)
JOIN formats ON (formats.id = videos.video_format)
WHERE ((? IS NULL AND courses.listed) OR course_id = ?) AND courses.visible AND lectures.visible AND videos.visible
WHERE {"courses.listed" if course_id is None else "course_id = ?"} AND courses.visible AND lectures.visible AND videos.visible
ORDER BY videos.time_created DESC, prio ASC
LIMIT 100''',
course['id'], course['id'])
*([] if course_id is None else [course_id]))
updated = max(course['time_updated'], course['time_created'], key=fixdate)
for entry in entries:
entry['updated'] = max(entry['video']['time_created'], entry['video']['time_updated'], entry['time_created'], entry['time_updated'], key=fixdate)
......@@ -52,11 +53,12 @@ def rss_feed(handle):
GROUP BY formats.id
ORDER BY formats.player_prio DESC''', course['id'])
if not formats:
# info: sql no test cover
formats = query('SELECT * FROM formats WHERE id = 4 OR id = 5 OR id = 10') # 360p, 720p, 1080p
if 'format_id' not in request.values:
return redirect(url_for('rss_feed', handle=handle, format_id=formats[0]['id']))
fmt = query('SELECT * FROM formats WHERE id = ?', request.values.get('format_id', request.values['format_id']))[0]
items = query('''SELECT lectures.*, "video" AS sep, videos.*
items = query('''SELECT lectures.*, 'video' AS sep, videos.*
FROM lectures
JOIN courses ON courses.id = lectures.course_id
JOIN videos ON lectures.id = videos.lecture_id
......
......@@ -57,7 +57,7 @@ def get_responsible():
@app.route('/internal/ical/all')
@calperm
def ical_all():
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.*
return export_lectures(query('''SELECT lectures.*, 'course' AS sep, courses.*
FROM lectures JOIN courses ON courses.id = lectures.course_id
WHERE NOT norecording AND NOT external
ORDER BY time DESC LIMIT ?''', request.values.get('limit', 1000)),
......@@ -67,7 +67,7 @@ def ical_all():
@calperm
def ical_user(user):
username = query('SELECT name FROM users WHERE users.id = ?', user)[0]['name']
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.*
return export_lectures(query('''SELECT lectures.*, 'course' AS sep, courses.*
FROM lectures
JOIN courses ON courses.id = lectures.course_id
JOIN responsible ON responsible.course_id = courses.id
......@@ -79,7 +79,7 @@ def ical_user(user):
@calperm
def ical_notuser(user):
username = query('SELECT name FROM users WHERE users.id = ?', user)[0]['name']
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.*
return export_lectures(query('''SELECT lectures.*, 'course' AS sep, courses.*
FROM lectures
JOIN courses ON courses.id = lectures.course_id
LEFT JOIN responsible ON (responsible.course_id = courses.id AND responsible.user_id = ?)
......@@ -90,7 +90,7 @@ def ical_notuser(user):
@app.route('/internal/ical/course/<course>')
@calperm
def ical_course(course):
return export_lectures(query('''SELECT lectures.*, "course" AS sep, courses.*
return export_lectures(query('''SELECT lectures.*, 'course' AS sep, courses.*
FROM lectures JOIN courses ON courses.id = lectures.course_id
WHERE courses.handle = ? AND NOT norecording AND NOT external ORDER BY time DESC''', course),
get_responsible(), 'videoag_%s.ics'%course)
......@@ -21,8 +21,9 @@ def list_import_sources(id):
modify('INSERT INTO import_campus (url, type, course_id, last_checked, changed) VALUES (?, ?, ?, ?, 1)',
campus[i]['url'], campus[i]['type'], id, datetime.now())
else:
# info: sql no test cover
if campus[i]['url'] != '':
query('UPDATE import_campus SET url = ?, `type` = ? WHERE (course_id = ?) AND (id = ?)', campus[i]['url'], campus[i]['type'], id, int(i))
query('UPDATE import_campus SET url = ?, "type" = ? WHERE (course_id = ?) AND (id = ?)', campus[i]['url'], campus[i]['type'], id, int(i))
else:
query('DELETE FROM import_campus WHERE (id = ?) AND (course_id = ?)', int(i), id)
import_campus = query('SELECT * FROM import_campus WHERE course_id = ?', id)
......@@ -99,7 +100,8 @@ def fetch_co_course_events(i):
e['duration'] = int((datetime.strptime("%s %s"%(k, j['end']), fmt) - e['time']).seconds/60)
j['place'] = str(j['place'])
if j['place'] != '':
dbplace = query("SELECT name FROM places WHERE (campus_room = ?) OR (campus_name = ?) OR ((NOT campus_name) AND name = ?)",
# info: sql no test cover
dbplace = query("SELECT name FROM places WHERE (campus_room = ?) OR (campus_name = ?) OR (campus_name = '' AND name = ?)",
j['place'], j['place'], j['place'])
if dbplace:
e['place'] = dbplace[0]['name']
......@@ -158,6 +160,7 @@ def fetch_ro_course_events(item):
place = str(comp.get('LOCATION', ''))
if place:
campus_room = place.split('(')[-1].split(')')[0]
# info: sql no test cover
dbplace = query('SELECT name FROM places WHERE campus_room = ?', campus_room)
if dbplace:
event['place'] = dbplace[0]['name']
......
......@@ -30,14 +30,14 @@ def job_handler_handle(id, state):
def job_catch_broken():
# scheduled but never pinged
modify("BEGIN")
query('UPDATE jobs SET state="ready" WHERE state="scheduled" and time_scheduled < ?', datetime.now() - timedelta(seconds=10))
query('UPDATE jobs SET state=\'ready\' WHERE state=\'scheduled\' and time_scheduled < ?', datetime.now() - timedelta(seconds=10))
try:
modify("COMMIT")
except: #pylint: disable=bare-except
pass
# no pings since 60s
modify("BEGIN")
query('UPDATE jobs SET state="failed" WHERE state="running" and last_ping < ?', datetime.now() - timedelta(seconds=60))
query('UPDATE jobs SET state=\'failed\' WHERE state=\'running\' and last_ping < ?', datetime.now() - timedelta(seconds=60))
try:
modify("COMMIT")
except: #pylint: disable=bare-except
......@@ -50,14 +50,16 @@ def schedule_job(jobtype, data=None, priority=0, queue="default"):
if not data:
data = {}
return modify('INSERT INTO jobs (type, priority, queue, data, time_created) VALUES (?, ?, ?, ?, ?)',
jobtype, priority, queue, json.dumps(data, default=date_json_handler), datetime.now())
jobtype, priority, queue, json.dumps(data, default=date_json_handler), datetime.now(),
get_id=True)
def cancel_job(job_id):
query('UPDATE jobs SET state = "deleted" WHERE id = ? AND state = "ready"', job_id)
query('UPDATE jobs SET canceled = 1 WHERE id = ?', job_id)
query('UPDATE jobs SET state = \'deleted\' WHERE id = ? AND state = \'ready\'', job_id)
query('UPDATE jobs SET canceled = true WHERE id = ?', job_id)
def restart_job(job_id, canceled=False):
# info: sql no test cover
if canceled:
query('UPDATE jobs SET state = "ready", canceled = 0 WHERE id = ? AND state = "failed"', job_id)
query('UPDATE jobs SET state = \'ready\', canceled = false WHERE id = ? AND state = \'failed\'', job_id)
else:
query('UPDATE jobs SET state = "ready" WHERE id = ? AND state = "failed" AND NOT canceled', job_id)
query('UPDATE jobs SET state = \'ready\' WHERE id = ? AND state = \'failed\' AND NOT canceled', job_id)
......@@ -25,15 +25,22 @@ def jobs_overview():
'state': request.args.get('state', 'failed'),
'worker': request.args.get('worker', '%')}
pagecount = math.ceil(query('SELECT count(id) as count FROM jobs WHERE (type like ?) AND (worker like ? OR (worker IS NULL AND ? = "%")) AND (state like ?)',
filter['type'], filter['worker'], filter['worker'], filter['state'])[0]['count']/pagesize)
jobs = query('SELECT * FROM jobs \
WHERE (type like ?) AND (worker like ? OR (worker IS NULL AND ? = "%")) AND (state like ?) \
ORDER BY `time_created` DESC LIMIT ? OFFSET ?',
filter['type'], filter['worker'], filter['worker'], filter['state'], pagesize, page*pagesize)
active_streams = query('SELECT lectures.*, "course" AS sep, courses.*, "job" AS sep, jobs.* FROM lectures \
condition_values = []
if filter['worker'] == '%':
condition = 'WHERE (type like ?) AND (state like ?)'
condition_values.extend([filter['type'], filter['state']])
else:
condition = 'WHERE (type like ?) AND (worker like ?) AND (state like ?)'
condition_values.extend([filter['type'], filter['worker'], filter['state']])
pagecount = math.ceil(query(f'SELECT count(id) as count FROM jobs {condition}',
*condition_values)[0]['count']/pagesize)
jobs = query(f'SELECT * FROM jobs \
{condition} \
ORDER BY "time_created" DESC LIMIT ? OFFSET ?',
*[*condition_values, pagesize, page*pagesize])
active_streams = query('SELECT lectures.*, \'course\' AS sep, courses.*, \'job\' AS sep, jobs.* FROM lectures \
JOIN courses ON (courses.id = lectures.course_id) \
JOIN jobs ON (jobs.id = lectures.stream_job) WHERE lectures.stream_job')
JOIN jobs ON (jobs.id = lectures.stream_job) WHERE lectures.stream_job IS NOT NULL')
for stream in active_streams:
try:
stream['destbase'] = json.loads((stream['job']['data'] or '{}')).get('destbase')
......@@ -55,15 +62,21 @@ def jobs_overview():
@csrf_protect
def jobs_action(action, jobid=None):
if action == 'clear_failed':
query('UPDATE jobs SET state = "deleted" WHERE state = "failed" AND (id = ? OR ? IS NULL)', jobid, jobid)
if jobid:
query('UPDATE jobs SET state = \'deleted\' WHERE state = \'failed\' AND id = ?', jobid)
else:
query('UPDATE jobs SET state = \'deleted\' WHERE state = \'failed\'')
elif action == 'retry_failed':
query('UPDATE jobs SET state = "ready", canceled = 0 WHERE state = "failed" AND (id = ? OR ? IS NULL)', jobid, jobid)
if jobid:
query('UPDATE jobs SET state = \'ready\', canceled = false WHERE state = \'failed\' AND id = ?', jobid)
else:
query('UPDATE jobs SET state = \'ready\', canceled = false WHERE state = \'failed\'')
elif action == 'copy' and jobid:
query("INSERT INTO jobs (type, priority, queue, state, data, time_created) \
SELECT type, priority, queue, 'ready', data, ? FROM jobs where id = ?",
datetime.now(), jobid)
elif action == 'delete' and jobid:
query('UPDATE jobs SET state = "deleted" WHERE id = ?', jobid)
query('UPDATE jobs SET state = \'deleted\' WHERE id = ?', jobid)
elif action == 'cancel' and jobid:
cancel_job(jobid)
return redirect(request.values.get('ref', url_for('jobs_overview')))
......@@ -75,7 +88,7 @@ def jobs_ping(id):
status = json.dumps(json.loads(request.values['status']), default=date_json_handler)
state = request.values['state']
if state == 'finished':
query('UPDATE jobs SET time_finished = ?, status = ?, state = "finished" where id = ?', datetime.now(), status, id)
query('UPDATE jobs SET time_finished = ?, status = ?, state = \'finished\' where id = ?', datetime.now(), status, id)
else:
query('UPDATE jobs SET worker = ?, last_ping = ?, status = ?, state = ? where id = ?', hostname, datetime.now(), status, state, id)
job_handler_handle(id, state)
......@@ -87,7 +100,10 @@ def jobs_ping(id):
@app.route('/internal/jobs/api/worker/<hostname>/schedule', methods=['POST'])
@api_token_required('JOBS_API_KEY')
def jobs_schedule(hostname):
query('REPLACE INTO worker (hostname, last_ping) values (?, ?)', hostname, datetime.now())
if query("SELECT hostname FROM worker WHERE hostname = ?", hostname):
query("UPDATE worker SET last_ping = ? WHERE hostname = ?", datetime.now(), hostname)
else:
query("INSERT INTO worker (hostname, last_ping) VALUES (?, ?)", hostname, datetime.now())
hostdata = request.get_json()
if not hostdata:
return 'no hostdata sent', 400
......@@ -96,13 +112,13 @@ def jobs_schedule(hostname):
while not job:
try:
modify("BEGIN")
for i in query('SELECT * FROM jobs WHERE state = "ready" ORDER BY priority DESC'):
for i in query('SELECT * FROM jobs WHERE state = \'ready\' ORDER BY priority DESC'):
if i['type'] in hostdata['jobtypes'] and i['queue'] in hostdata['queues']:
job = i
break
if not job:
return 'no jobs', 503
modify('UPDATE jobs SET state="scheduled", worker = ?, time_scheduled = ? WHERE id = ?', hostname, datetime.now(), job['id'])
modify('UPDATE jobs SET state=\'scheduled\', worker = ?, time_scheduled = ? WHERE id = ?', hostname, datetime.now(), job['id'])
modify("COMMIT")
except: #pylint: disable=bare-except
tries += 1
......
......@@ -7,7 +7,7 @@ if 'LDAP_HOST' in config:
import ldap3
def ldapauth(user, password): # pylint: disable=function-redefined
if LDAP_USERRE.search(user):
if not user or not password or LDAP_USERRE.search(user):
return {}, []
try:
server = ldap3.Server(config['LDAP_HOST'], port=config['LDAP_PORT'], use_ssl=True)
......
......@@ -44,22 +44,22 @@ def streamauth_legacy(server=None):
break
if 'lecture' in request.values:
match = {'id': request.values['lecture']}
try:
modify("INSERT INTO streams (handle, active, visible, lecture_id, description, poster) VALUES (?, 0, 1, -1, "", "")", request.values['name'])
except:
pass
if not query("SELECT handle FROM streams WHERE handle = ?", request.values['name']):
# info: sql no test cover
modify("INSERT INTO streams (handle, active, visible, lecture_id, description, poster) VALUES (?, false, true, -1, '', '')", request.values['name'])
if server:
data = {'src': 'rtmp://%s/live/%s'%(server, request.values['name']),
'destbase': 'rtmp://%s/hls/%s'%(server, request.values['name'])}
job_id = schedule_job('simple_live_transcode', data, priority=10)
modify("UPDATE streams SET active = 1, lecture_id = ?, job_id = ? WHERE handle = ?",
modify("UPDATE streams SET active = true, lecture_id = ?, job_id = ? WHERE handle = ?",
match['id'], job_id, request.values['name'])
else:
modify("UPDATE streams SET active = 1, lecture_id = ? WHERE handle = ?",
# info: sql no test cover
modify("UPDATE streams SET active = true, lecture_id = ? WHERE handle = ?",
match['id'], request.values['name'])
elif request.values['call'] == 'publish_done':
job_id = query('SELECT job_id FROM streams WHERE handle = ?', request.values['name'])[0]['job_id']
modify("UPDATE streams SET active = 0 WHERE handle = ?", request.values['name'])
modify("UPDATE streams SET active = false WHERE handle = ?", request.values['name'])
if job_id:
cancel_job(job_id)
else:
......@@ -105,7 +105,8 @@ def gentoken():
@app.route('/internal/streaming/rekey/<int:id>')
@mod_required
def streamrekey(id):
modify('UPDATE live_sources SET `key` = ? WHERE id = ? AND NOT deleted', gentoken(), id)
# info: sql no test cover
modify('UPDATE live_sources SET "key" = ? WHERE id = ? AND NOT deleted', gentoken(), id)
source = query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', id)[0]
flash('''Der Streamkey von <strong>{name}</strong> wurde neu generiert:
<span><input readonly type="text" style="width: 15em" value="{key}"></span><br>
......@@ -122,6 +123,7 @@ def streamrekey(id):
@app.route('/internal/streaming/drop/<int:id>')
@mod_required
def streamdrop(id):
# info: sql no test cover
source = (query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', id) or [None])[0]
if not source:
if 'ref' in request.values:
......@@ -136,6 +138,7 @@ def streamdrop(id):
@sched_func(120)
def live_source_thumbnail():
# info: sql no test cover
sources = query('SELECT * FROM live_sources WHERE clientid IS NOT NULL')
for source in sources:
schedule_job('thumbnail', {'srcurl': 'rtmp://%s/src/%i'%(source['server'], source['id']), 'filename': 's_%i.jpg'%source['id']})
......@@ -148,13 +151,14 @@ def ip_in_networks(ip, networks):
@app.route('/internal/streaming/auth/<server>', methods=['GET', 'POST'])
def streamauth(server):
# info: sql no test cover
# pylint: disable=too-many-return-statements
if not ip_in_networks(request.headers['X-Real-IP'], config.get('FSMPI_IP_RANGES', [])):
return 'Forbidden', 403
# Sources publish their streams at rtmp://example.com/src/{key} and are
# the redirected to rtmp://example.com/src/{id} to hide the secret stream key
if request.values['call'] == 'publish':
sources = query('SELECT * FROM live_sources WHERE NOT deleted AND `key` = ?', request.values['name'])
sources = query('SELECT * FROM live_sources WHERE NOT deleted AND "key" = ?', request.values['name'])
if not sources:
return 'Not found', 404
modify('UPDATE live_sources SET server = ?, server_public = ?, clientid = ?, last_active = ?, preview_key = ? WHERE id = ?',
......@@ -187,6 +191,7 @@ def streamauth(server):
return 'Bad request', 400
def schedule_livestream(lecture_id):
# info: sql no test cover
# pylint: disable=too-many-branches,too-many-statements
lecture = query('SELECT * FROM lectures WHERE id = ?', lecture_id)[0]
settings = json.loads(lecture['stream_settings'])
......@@ -296,6 +301,7 @@ def restart_failed_complex_live_transcode(id, type, data, state, status): # pyli
@job_handler('complex_live_transcode', state='failed')
@job_handler('complex_live_transcode', state='finished')
def cleanup_after_complex_live_transcode_ended(id, type, data, state, status): # pylint: disable=unused-argument
# info: sql no test cover
job = query('SELECT * FROM jobs WHERE id = ?', id, nlfix=False)[0]
if state == 'finished' or (state == 'failed' and job['canceled']):
modify('UPDATE lectures_data SET stream_job = NULL WHERE stream_job = ?', id)
......@@ -303,6 +309,7 @@ def cleanup_after_complex_live_transcode_ended(id, type, data, state, status): #
@app.route('/internal/streaming/control', methods=['POST'])
@mod_required
def control_stream():
# info: sql no test cover
action = request.values['action']
lecture_id = int(request.values['lecture_id'])
course = (query('SELECT courses.* FROM courses JOIN lectures ON (courses.id = lectures.course_id) WHERE lectures.id = ?', lecture_id) or [None])[0]
......
......@@ -31,5 +31,5 @@ def update_meeting():
human_date(start), human_time(start))
modify('''REPLACE INTO announcements
(extid, text, level, visible, time_publish, time_expire, time_created, time_updated, created_by)
VALUES (?, ?, 0, 1, ?, ?, ?, ?, 0)''',
VALUES (?, ?, 0, true, ?, ?, ?, ?, 0)''',
'ical:'+uid, text, start-timedelta(days=7), start+timedelta(hours=2), datetime.now(), datetime.now())
......@@ -9,3 +9,4 @@ ldap3
icalendar
mysql-connector-python
coverage
psycopg[c]
\ No newline at end of file
......@@ -33,6 +33,7 @@ if sys.argv[0].endswith('run_tests.py'):
config['DEBUG'] = True
config['DISABLE_SCHEDULER'] = True
config['JOBS_API_KEY'] = '1'
config['SORTER_API_KEY'] = '1'
if config['DEBUG']:
app.jinja_env.auto_reload = True
......@@ -71,7 +72,7 @@ def evalperm(perms):
return [{'type': 'public'}]
#pylint: disable=wrong-import-position
from db import query, modify, show, searchquery
from db import query, modify, show
from template_helper import *
from mail import notify_mods, notify_admins #pylint: disable=unused-import
from ldap import ldapauth
......@@ -138,7 +139,7 @@ def genlive(streams):
return streams
def genlive_new(lectures):
hls_format = (query('SELECT * FROM formats WHERE keywords = "hls"') or [{}])[0]
hls_format = (query('SELECT * FROM formats WHERE keywords = \'hls\'') or [{}])[0]
res = []
for lecture in lectures:
if not lecture['stream_job']:
......@@ -161,7 +162,7 @@ def index():
start = date.today()
end = start + timedelta(days=7)
upcomming = query('''
SELECT lectures.*, streams.active AS nowlive, "course" AS sep, courses.*
SELECT lectures.*, streams.active AS nowlive, \'course\' AS sep, courses.*
FROM lectures
JOIN courses ON (lectures.course_id = courses.id)
LEFT JOIN streams ON lectures.id = streams.lecture_id
......@@ -170,44 +171,54 @@ def index():
for i in upcomming:
i['date'] = i['time'].date()
latestvideos = query('''
SELECT lectures.*, "course" AS sep, courses.*
FROM lectures
LEFT JOIN videos ON (videos.lecture_id = lectures.id)
LEFT JOIN courses on (courses.id = lectures.course_id)
SELECT lectures.*, \'course\' AS sep, courses.*
FROM (
SELECT
videos.lecture_id,
MAX(videos.time_created) AS _time_publish
FROM videos
JOIN lectures ON ( lectures.id = videos.lecture_id )
JOIN courses ON ( courses.id = lectures.course_id )
WHERE (? OR (courses.visible AND courses.listed AND lectures.visible AND videos.visible))
GROUP BY videos.lecture_id
ORDER BY MAX(videos.time_created) DESC
LIMIT 6 ''', ismod())
livestreams = query('''SELECT streams.handle AS livehandle, lectures.*, "course" AS sep, courses.*
ORDER BY _time_publish DESC
LIMIT 6
) AS _latest
JOIN lectures ON ( lectures.id = _latest.lecture_id )
JOIN courses ON ( courses.id = lectures.course_id )
''', ismod())
livestreams = query('''SELECT streams.handle AS livehandle, lectures.*, \'course\' AS sep, courses.*
FROM streams
JOIN lectures ON lectures.id = streams.lecture_id
JOIN courses ON courses.id = lectures.course_id
WHERE streams.active AND (? OR (streams.visible AND courses.visible AND courses.listed AND lectures.visible))
''', ismod())
livestreams_new = query('''SELECT lectures.*, "course" AS sep, courses.*
livestreams_new = query('''SELECT lectures.*, \'course\' AS sep, courses.*
FROM lectures
JOIN courses ON courses.id = lectures.course_id
WHERE lectures.stream_job IS NOT NULL AND (? OR (courses.visible AND courses.listed AND lectures.visible))
''', ismod())
for stream in livestreams_new:
stream['livehandle'] = '%i'%stream['id']
featured = query('SELECT * FROM featured WHERE (? OR visible) ORDER BY `order`', ismod())
featured = query('SELECT * FROM featured WHERE (? OR visible) ORDER BY "order"', ismod())
featured = list(filter(lambda x: not x['deleted'], featured))
for item in featured:
if item['type'] == 'courses':
if item['param'] not in ['title', 'semester', 'organizer', 'subject']:
continue
item['courses'] = query('SELECT * FROM courses WHERE (visible AND listed) AND `%s` = ? ORDER BY `%s`'%(item['param'], item['param']), item['param2'])
# info: sql no test cover
item['courses'] = query('SELECT * FROM courses WHERE (visible AND listed) AND "%s" = ? ORDER BY "%s"'%(item['param'], item['param']), item['param2'])
elif item['type'] == 'video':
item['lecture'] = {'id': item['param']}
streams = query('''SELECT streams.handle AS livehandle, streams.lecture_id, "formats" AS sep, formats.*
# info: sql no test cover
streams = query('''SELECT streams.handle AS livehandle, streams.lecture_id, \'formats\' AS sep, formats.*
FROM streams
JOIN lectures ON lectures.id = streams.lecture_id
JOIN formats ON formats.keywords = "hls"
JOIN formats ON formats.keywords = 'hls'
WHERE streams.active AND streams.visible AND lectures.id = ?
''', item['param'])
item['videos'] = query('''
SELECT videos.*, "formats" AS sep, formats.*
SELECT videos.*, 'formats' AS sep, formats.*
FROM videos
JOIN formats ON (videos.video_format = formats.id)
WHERE videos.lecture_id = ? AND videos.visible
......@@ -247,7 +258,7 @@ def course(id=None, handle=None):
for i in query('SELECT lectures.id AS id, COUNT(chapters.id) AS c FROM chapters \
JOIN lectures ON chapters.lecture_id = lectures.id \
WHERE lectures.course_id = ? AND NOT chapters.visible AND NOT chapters.deleted \
GROUP BY chapters.lecture_id;', course['id']):
GROUP BY lectures.id;', course['id']):
chapters[i['id']] = i['c']
lectures = query('SELECT * FROM lectures WHERE course_id = ? AND (? OR visible) ORDER BY time, duration DESC', course['id'], ismod())
for lecture in lectures:
......@@ -259,7 +270,7 @@ def course(id=None, handle=None):
if perm['lecture_id'] == lecture['id']:
lecture['perm'].append(perm)
videos = query('''
SELECT videos.*, (videos.downloadable AND courses.downloadable) as downloadable, "formats" AS sep, formats.*
SELECT videos.*, (videos.downloadable AND courses.downloadable) as downloadable, 'formats' AS sep, formats.*
FROM videos
JOIN lectures ON (videos.lecture_id = lectures.id)
JOIN formats ON (videos.video_format = formats.id)
......@@ -267,10 +278,10 @@ def course(id=None, handle=None):
WHERE lectures.course_id= ? AND (? OR videos.visible)
ORDER BY lectures.time, formats.prio DESC
''', course['id'], ismod())
livestreams = query('''SELECT streams.handle AS livehandle, streams.lecture_id, "formats" AS sep, formats.*
livestreams = query('''SELECT streams.handle AS livehandle, streams.lecture_id, 'formats' AS sep, formats.*
FROM streams
JOIN lectures ON lectures.id = streams.lecture_id
JOIN formats ON formats.keywords = "hls"
JOIN formats ON formats.keywords = 'hls'
WHERE streams.active AND (? OR streams.visible) AND lectures.course_id = ?
''', ismod(), course['id'])
videos += genlive(livestreams)
......@@ -283,7 +294,7 @@ def course(id=None, handle=None):
responsible = query('''SELECT users.*, responsible.course_id AS responsible
FROM users
LEFT JOIN responsible ON (responsible.user_id = users.id AND responsible.course_id = ?)
WHERE users.fsacc != "" AND users.level > 0
WHERE users.fsacc != '' AND users.level > 0
ORDER BY responsible DESC, users.realname ASC''', course['id'])
live_sources = query('SELECT * FROM live_sources WHERE NOT deleted')
return render_template('course.html', course=course, lectures=lectures, videos=videos, chapters=chapters, responsible=responsible, live_sources=live_sources)
......@@ -305,17 +316,17 @@ def impress():
def lecture(id, course=None, courseid=None): #pylint: disable=unused-argument,too-many-branches
lecture = query('SELECT * FROM lectures WHERE id = ? AND (? OR visible)', id, ismod())[0]
videos = query('''
SELECT videos.*, (videos.downloadable AND courses.downloadable) as downloadable, "formats" AS sep, formats.*
SELECT videos.*, (videos.downloadable AND courses.downloadable) as downloadable, 'formats' AS sep, formats.*
FROM videos
JOIN formats ON (videos.video_format = formats.id)
JOIN courses ON (courses.id = ?)
WHERE videos.lecture_id = ? AND (? OR videos.visible)
ORDER BY formats.prio DESC
''', lecture['course_id'], lecture['id'], ismod())
livestreams = query('''SELECT streams.handle AS livehandle, streams.lecture_id, "formats" AS sep, formats.*
livestreams = query('''SELECT streams.handle AS livehandle, streams.lecture_id, 'formats' AS sep, formats.*
FROM streams
JOIN lectures ON lectures.id = streams.lecture_id
JOIN formats ON formats.keywords = "hls"
JOIN formats ON formats.keywords = 'hls'
WHERE streams.active AND (? OR streams.visible) AND lectures.id = ?
''', ismod(), id)
videos += genlive(livestreams)
......@@ -395,9 +406,37 @@ def search():
if 'q' not in request.args:
return redirect(url_for('index'))
searchtext = request.args['q']
courses = searchquery(searchtext, '*', ['title', 'short', 'organizer', 'subject', 'description'],
'courses', 'WHERE (? OR (visible AND listed)) GROUP BY id ORDER BY _score DESC, semester DESC LIMIT 20', ismod())
lectures = searchquery(searchtext, 'lectures.*, \
courses = _course_query_search(searchtext, ismod())
lectures = _lecture_query_search(searchtext, ismod())
for lecture in lectures:
lecture['course'] = {}
for key in lecture:
if key.startswith('courses_'):
lecture['course'][key[8:]] = lecture[key]
return render_template('search.html', searchtext=searchtext, courses=courses, lectures=lectures)
# This search is basically stolen from the new api
def _course_query_search(search_term: str, is_mod: bool):
return _query_search(
"courses",
["title", "short", "organizer", "subject", "description"],
None,
None,
None if is_mod else 'WHERE "courses"."visible" AND "courses"."listed"',
'"courses"."semester" DESC',
20,
search_term
)
def _lecture_query_search(search_term: str, is_mod: bool):
return _query_search(
"lectures",
["title", "comment", "speaker"],
'JOIN "courses" ON ("lectures"."course_id" = "courses"."id")',
"""\
courses.visible AS coursevisible, \
courses.listed, \
courses.id AS courses_id, \
......@@ -416,16 +455,59 @@ def search():
courses.downloadable AS courses_downloadable, \
courses.embedinvisible AS courses_embedinvisible, \
courses.description AS courses_description, \
courses.internal AS courses_internal',
['lectures.title', 'lectures.comment', 'lectures.speaker', 'courses.short'],
'lectures LEFT JOIN courses on (courses.id = lectures.course_id)',
'WHERE (? OR (coursevisible AND listed AND visible)) GROUP BY id ORDER BY _score DESC, time DESC LIMIT 30', ismod())
for lecture in lectures:
lecture['course'] = {}
for key in lecture:
if key.startswith('courses_'):
lecture['course'][key[8:]] = lecture[key]
return render_template('search.html', searchtext=searchtext, courses=courses, lectures=lectures)
courses.internal AS courses_internal
""",
None if is_mod else 'WHERE "courses"."visible" AND "courses"."listed" AND "lectures"."visible"',
'"lectures"."time" DESC',
30,
search_term
)
# pylint: disable=too-many-arguments
def _query_search(
table: str,
search_columns: list,
join_clause: str or None,
extra_select_columns: str or None,
where_clause: str or None,
extra_ordering: str or None,
limit: int,
search_term: str):
base_sub_query = f"""
SELECT "{table}"."id" AS "_id", CAST(%s AS INT) AS "_priority" FROM "{table}" WHERE {" OR ".join(
map(lambda column: f'LOWER("{table}"."{column}") LIKE ?',
search_columns))}
"""
words: list = list(filter(lambda w: not w.isspace(), search_term.split(" ")))
if len(words) == 0:
return []
sub_queries: list = []
all_values: list = []
prio = len(words)
for word in words:
word = word.lower()
word = word.replace("%", "\\%").replace("_", "\\_")
word = "%" + word + "%"
sub_queries.append(base_sub_query % prio)
for _ in range(0, len(search_columns)):
all_values.append(word)
prio -= 1
return query(f"""
SELECT "{table}".* {"" if extra_select_columns is None else "," + extra_select_columns}
FROM "{table}"
JOIN (
SELECT "_id", CAST(SUM("_priority") AS INT) AS "_score"
FROM ({"UNION ALL".join(sub_queries)}) AS "_sub_result"
GROUP BY "_id"
) AS "_data" ON ("{table}"."id" = "_data"."_id")
{"" if join_clause is None else join_clause}
{"" if where_clause is None else where_clause}
ORDER BY "_data"."_score" DESC{"" if extra_ordering is None else ", " + extra_ordering} LIMIT {limit}
""", *all_values)
def check_mod(user, groups):
if not user:
......@@ -447,7 +529,8 @@ def login():
session['user'] = userinfo
dbuser = query('SELECT * FROM users WHERE name = ?', user)
if not dbuser:
modify('INSERT INTO users (name, realname, fsacc, level, calendar_key, rfc6238) VALUES (?, ?, ?, 1, "", "")', user, session['user']['givenName'], user)
# info: sql no test cover
modify('INSERT INTO users (name, realname, fsacc, level, calendar_key, rfc6238) VALUES (?, ?, ?, 1, \'\', \'\')', user, session['user']['givenName'], user)
dbuser = query('SELECT * FROM users WHERE name = ?', user)
session['user']['dbid'] = dbuser[0]['id']
session['_csrf_token'] = ''.join(random.SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(64))
......@@ -524,12 +607,12 @@ def auth(): #pylint: disable=too-many-branches
if is_authorized:
try:
if not url_path.startswith('pub/hls/'):
modify('INSERT INTO log (id, `time`, `date`, video, source) VALUES (?, ?, ?, ?, 1)',
modify('INSERT INTO log (id, "time", "date", video, source) VALUES (?, ?, ?, ?, true)',
cookie, datetime.now(), datetime.combine(date.today(), time()), perms[0]['vid'])
elif url_path.endswith('.ts'):
fmt = url_path.split('_')[-1].split('-')[0]
seg = url_path.split('.')[0].split('-')[-1]
modify('INSERT INTO hlslog (id, `time`, segment, lecture, handle, format) VALUES (?, ?, ?, ?, ?, ?)',
modify('INSERT INTO hlslog (id, "time", segment, lecture, handle, format) VALUES (?, ?, ?, ?, ?, ?)',
cookie, datetime.now(), seg, perms[0]['lecture'], handle, fmt)
except: #pylint: disable=bare-except
pass
......
......@@ -17,9 +17,9 @@ def sort_log():
FROM sortlog
JOIN lectures ON lectures.id = sortlog.lecture_id
JOIN courses ON courses.id = lectures.course_id
ORDER BY sortlog.`when` DESC
ORDER BY sortlog."when" DESC
LIMIT 50
'''), sorterrorlog=query('SELECT * FROM sorterrorlog ORDER BY sorterrorlog.`when` DESC'))
'''), sorterrorlog=query('SELECT * FROM sorterrorlog ORDER BY sorterrorlog."when" DESC'))
def to_ascii(inputstring):
asciistring = inputstring
......@@ -29,6 +29,7 @@ def to_ascii(inputstring):
@job_handler('probe', 'remux', 'transcode')
def update_video_metadata(jobid, jobtype, data, state, status): #pylint: disable=unused-argument
# info: sql no test cover
if 'video_id' not in data:
return
if jobtype not in ['remux', 'transcode']:
......@@ -56,17 +57,19 @@ def add_thumbnail_job():
return redirect(request.values.get('ref', url_for('jobs_overview')))
def insert_video(lectureid, dbfilepath, fileformatid, hash="", filesize=-1, duration=-1, sourceid=None): #pylint: disable=too-many-arguments
# info: sql no test cover
visible = query('SELECT courses.autovisible FROM courses JOIN lectures ON lectures.course_id = courses.id WHERE lectures.id = ?', lectureid)[0]['autovisible']
video_id = modify('''INSERT INTO videos_data
(lecture_id, visible, path, video_format, title, comment, internal, file_modified, time_created, time_updated, created_by, hash, file_size, duration, source)
VALUES
(?, ?, ?, ?, "", "", "", ?, ?, ?, ?, ?, ?, ?, ?)''',
lectureid, visible, dbfilepath, fileformatid, datetime.now(), datetime.now(), datetime.now(), -1, hash, filesize, duration, sourceid)
(?, ?, ?, ?, '', '', '', ?, ?, ?, ?, ?, ?, ?, ?)''',
lectureid, visible, dbfilepath, fileformatid, datetime.now(), datetime.now(), datetime.now(), -1, hash, filesize, duration, sourceid,
get_id=True)
if not sourceid:
query('INSERT INTO sortlog (lecture_id,video_id,path,`when`) VALUES (?,?,?,?)', lectureid, video_id, dbfilepath, datetime.now())
query('INSERT INTO sortlog (lecture_id,video_id,path,"when") VALUES (?,?,?,?)', lectureid, video_id, dbfilepath, datetime.now())
schedule_job('probe', {'path': dbfilepath, 'lecture_id': lectureid, 'video_id': video_id, 'import-chapters': True})
schedule_thumbnail(lectureid)
video = query('SELECT videos.*, "format" AS sep, formats.* FROM videos JOIN formats ON formats.id = videos.video_format WHERE videos.id = ?', video_id)[0]
video = query('SELECT videos.*, \'format\' AS sep, formats.* FROM videos JOIN formats ON formats.id = videos.video_format WHERE videos.id = ?', video_id)[0]
lecture = query('SELECT * FROM lectures WHERE id = ?', lectureid)[0]
course = query('SELECT * FROM courses WHERE id = ?', lecture['course_id'])[0]
notify_mods('new_video', course['id'], course=course, lecture=lecture, video=video)
......@@ -77,10 +80,10 @@ def split_filename(filename):
return filename.replace('_', '-').replace(' ', '-').split('-')
def parse_filename(filename):
# filenames: <handle>-<sorter>-<format>.mp4, split at '-' into an array
# filenames: <handle>-<sorter>-<format>.<ext>, split at '-' into an array
data = {'keywords': []}
for chunk in filename:
chunk = chunk.replace('.mp4', '')
chunk = chunk.replace('.mp4', '').replace('.webm', '')
#-<YYMMDD> (date)
#-<HHMM> (time)
#-<keyword>
......@@ -167,7 +170,7 @@ def log_sort_error(course_id, path, matches):
matches_id = []
for match in matches:
matches_id.append(str(match['id']))
query('INSERT INTO sorterrorlog_data (course_id, path, matches, `when`, time_updated, time_created) VALUES (?, ?, ?, ?, ?, ?)',
query('INSERT INTO sorterrorlog_data (course_id, path, matches, "when", time_updated, time_created) VALUES (?, ?, ?, ?, ?, ?)',
course_id, path, ','.join(matches_id), datetime.now(), datetime.now(), datetime.now())
@app.route('/internal/sort/encoded/<filename>')
......@@ -226,7 +229,8 @@ def sort_now():
# if the video is in the table "videos" already (with the correct course), skip it
if os.path.basename(filename) in ignorefiles:
continue
if not os.path.splitext(filename)[1] == '.mp4':
ext = os.path.splitext(filename)[1]
if not ext == '.mp4' and not ext == '.webm':
continue
matches, fmt = sort_file(filename, course=course, lectures=lectures)
dbfilepath = mountpoint['prefix']+course['handle']+'/'+filename
......
......@@ -139,11 +139,11 @@ var moderator = {
changeboxclick: function(src) {
var value = $(src)[0].checked;
var path = $(src).data('path');
moderator.api.set(path,value ? 1 : 0);
moderator.api.set(path,value ? true : false);
},
deletebtnclick: function(src) {
if (confirm('Really delete this?')) {
moderator.api.set($(src).data('path'),1,true);
moderator.api.set($(src).data('path'),true,true);
}
}
},
......@@ -237,7 +237,7 @@ var moderator = {
$(".authtype option[value="+perm.type+"]").prop("selected", true);
},
delbtnclick: function (element) {
moderator.api.set("perm."+$("#permissionlist option:selected", element.parentElement).data('id')+".deleted",1,true);
moderator.api.set("perm."+$("#permissionlist option:selected", element.parentElement).data('id')+".deleted",true,true);
},
addbtnclick: function (element) {
var container = element.parentElement;
......