diff --git a/db.py b/db.py index 42730d5924b1e91a0f4d4347d736abcc5f83cd30..d12252c9915bfb59744456448cd53a16eabc5c51 100644 --- a/db.py +++ b/db.py @@ -58,6 +58,7 @@ elif config['DB_ENGINE'] == 'mysql': port=config.get('MYSQL_PORT', 3306), unix_socket=config.get('MYSQL_UNIX', None), database=config['MYSQL_DB']) + g.db.cmd_query("SET SESSION sql_mode = 'ANSI_QUOTES'") if not hasattr(request, 'db'): request.db = g.db.cursor() return request.db diff --git a/edit.py b/edit.py index caee27000fe36dcfb527641c69e1a179f6614700..45147acd77fddb927a90f022b46bed18134e01ae 100644 --- a/edit.py +++ b/edit.py @@ -151,8 +151,8 @@ def getfielddescription(inputpath): def getfieldchangelog(inputpath): path = parseeditpath(inputpath) changelog = query('SELECT * FROM changelog \ - LEFT JOIN users ON (changelog.who = users.id) WHERE `table` = ? AND `id_value` = ? and `field` = ? \ - ORDER BY `when` DESC LIMIT 5', path['table'], path['id'], path['column']) + LEFT JOIN users ON (changelog.who = users.id) WHERE "table" = ? AND "id_value" = ? and "field" = ? \ + ORDER BY "when" DESC LIMIT 5', path['table'], path['id'], path['column']) for entry in changelog: entry['id_value'] = str(entry['id_value']) entry['value_new'] = str(entry['value_new']) @@ -180,9 +180,9 @@ def edit(prefix='', ignore=None): key = prefix+key path = parseeditpath(key) modify('INSERT INTO changelog \ - (`table`,id_value, id_key, field, value_new, value_old, `when`, who, executed) \ + ("table",id_value, id_key, field, value_new, value_old, "when", who, executed) \ VALUES (?,?,?,?,?, \ - (SELECT `%s` FROM %s WHERE %s = ?),?,?,1)'%( + (SELECT "%s" FROM %s WHERE %s = ?),?,?,1)'%( path['column'], path['tableinfo']['table'], path['tableinfo']['idcolumn'] @@ -195,7 +195,7 @@ def edit(prefix='', ignore=None): path['id'], datetime.now(), session['user']['dbid']) - modify('UPDATE %s SET `%s` = ?, time_updated = ? WHERE `%s` = ?'%(path['tableinfo']['table'], path['column'], path['tableinfo']['idcolumn']), + modify('UPDATE %s SET "%s" = ?, time_updated = ? WHERE "%s" = ?'%(path['tableinfo']['table'], path['column'], path['tableinfo']['idcolumn']), val, datetime.now(), path['id']) for func in edit_handlers.get(path['table'], {}).get(None, []): func(path['table'], path['column'], val, path['id'], session['user']['dbid']) @@ -225,7 +225,7 @@ def create(table): continue assert column in list(editable_tables[table]['editable_fields'].keys())+editable_tables[table]['creationtime_fields'] assert column not in defaults - columns.append('`'+column+'`') + columns.append('"'+column+'"') values.append(val) id = modify('INSERT INTO %s (%s) VALUES (%s)'%(editable_tables[table]['table'], ','.join(columns), ','.join(['?']*len(values))), *values) @@ -241,7 +241,7 @@ def create(table): def changelog(): page = max(0, int(request.args.get('page', 0))) pagesize = min(500, int(request.args.get('pagesize', 50))) - changelog = query('SELECT * FROM changelog LEFT JOIN users ON (changelog.who = users.id) ORDER BY `when` DESC LIMIT ? OFFSET ?', pagesize, page*pagesize) + changelog = query('SELECT * FROM changelog LEFT JOIN users ON (changelog.who = users.id) ORDER BY "when" DESC LIMIT ? OFFSET ?', pagesize, page*pagesize) pagecount = math.ceil(query('SELECT count(id) as count FROM changelog')[0]['count']/pagesize) for entry in changelog: entry['path'] = '.'.join([entry['table'], entry['id_value'], entry['field']]) diff --git a/importer.py b/importer.py index 2d65fed5a0ef50efe490894052404cd0498cb43c..7cf3e0281714c2337fbc17be7710fbc2de88a546 100644 --- a/importer.py +++ b/importer.py @@ -22,7 +22,7 @@ def list_import_sources(id): campus[i]['url'], campus[i]['type'], id, datetime.now()) else: if campus[i]['url'] != '': - query('UPDATE import_campus SET url = ?, `type` = ? WHERE (course_id = ?) AND (id = ?)', campus[i]['url'], campus[i]['type'], id, int(i)) + query('UPDATE import_campus SET url = ?, "type" = ? WHERE (course_id = ?) AND (id = ?)', campus[i]['url'], campus[i]['type'], id, int(i)) else: query('DELETE FROM import_campus WHERE (id = ?) AND (course_id = ?)', int(i), id) import_campus = query('SELECT * FROM import_campus WHERE course_id = ?', id) diff --git a/jobs.py b/jobs.py index 515b3ee02348b6d7a3b674e73797743754882475..87511969c3c5254ab1e101649a68cbd23ca40d08 100644 --- a/jobs.py +++ b/jobs.py @@ -29,7 +29,7 @@ def jobs_overview(): filter['type'], filter['worker'], filter['worker'], filter['state'])[0]['count']/pagesize) jobs = query('SELECT * FROM jobs \ WHERE (type like ?) AND (worker like ? OR (worker IS NULL AND ? = "%")) AND (state like ?) \ - ORDER BY `time_created` DESC LIMIT ? OFFSET ?', + ORDER BY "time_created" DESC LIMIT ? OFFSET ?', filter['type'], filter['worker'], filter['worker'], filter['state'], pagesize, page*pagesize) active_streams = query('SELECT lectures.*, "course" AS sep, courses.*, "job" AS sep, jobs.* FROM lectures \ JOIN courses ON (courses.id = lectures.course_id) \ diff --git a/livestreams.py b/livestreams.py index affdc0386a3da12934d4cd266d22d9ddfb79b2bb..cb8dca55a199fcde1cd4ffd009ad161c41d6bd26 100644 --- a/livestreams.py +++ b/livestreams.py @@ -105,7 +105,7 @@ def gentoken(): @app.route('/internal/streaming/rekey/<int:id>') @mod_required def streamrekey(id): - modify('UPDATE live_sources SET `key` = ? WHERE id = ? AND NOT deleted', gentoken(), id) + modify('UPDATE live_sources SET "key" = ? WHERE id = ? AND NOT deleted', gentoken(), id) source = query('SELECT * FROM live_sources WHERE NOT deleted AND id = ?', id)[0] flash('''Der Streamkey von <strong>{name}</strong> wurde neu generiert: <span><input readonly type="text" style="width: 15em" value="{key}"></span><br> @@ -154,7 +154,7 @@ def streamauth(server): # Sources publish their streams at rtmp://example.com/src/{key} and are # the redirected to rtmp://example.com/src/{id} to hide the secret stream key if request.values['call'] == 'publish': - sources = query('SELECT * FROM live_sources WHERE NOT deleted AND `key` = ?', request.values['name']) + sources = query('SELECT * FROM live_sources WHERE NOT deleted AND "key" = ?', request.values['name']) if not sources: return 'Not found', 404 modify('UPDATE live_sources SET server = ?, server_public = ?, clientid = ?, last_active = ?, preview_key = ? WHERE id = ?', diff --git a/server.py b/server.py index 16644fa97950542a45ba778d891edb35dc3d5575..ad98b1f73e0a0413da4858eaa2dc03fc2f4f8c96 100644 --- a/server.py +++ b/server.py @@ -191,13 +191,13 @@ def index(): ''', ismod()) for stream in livestreams_new: stream['livehandle'] = '%i'%stream['id'] - featured = query('SELECT * FROM featured WHERE (? OR visible) ORDER BY `order`', ismod()) + featured = query('SELECT * FROM featured WHERE (? OR visible) ORDER BY "order"', ismod()) featured = list(filter(lambda x: not x['deleted'], featured)) for item in featured: if item['type'] == 'courses': if item['param'] not in ['title', 'semester', 'organizer', 'subject']: continue - item['courses'] = query('SELECT * FROM courses WHERE (visible AND listed) AND `%s` = ? ORDER BY `%s`'%(item['param'], item['param']), item['param2']) + item['courses'] = query('SELECT * FROM courses WHERE (visible AND listed) AND "%s" = ? ORDER BY "%s"'%(item['param'], item['param']), item['param2']) elif item['type'] == 'video': item['lecture'] = {'id': item['param']} streams = query('''SELECT streams.handle AS livehandle, streams.lecture_id, "formats" AS sep, formats.* @@ -524,12 +524,12 @@ def auth(): #pylint: disable=too-many-branches if is_authorized: try: if not url_path.startswith('pub/hls/'): - modify('INSERT INTO log (id, `time`, `date`, video, source) VALUES (?, ?, ?, ?, 1)', + modify('INSERT INTO log (id, "time", "date", video, source) VALUES (?, ?, ?, ?, 1)', cookie, datetime.now(), datetime.combine(date.today(), time()), perms[0]['vid']) elif url_path.endswith('.ts'): fmt = url_path.split('_')[-1].split('-')[0] seg = url_path.split('.')[0].split('-')[-1] - modify('INSERT INTO hlslog (id, `time`, segment, lecture, handle, format) VALUES (?, ?, ?, ?, ?, ?)', + modify('INSERT INTO hlslog (id, "time", segment, lecture, handle, format) VALUES (?, ?, ?, ?, ?, ?)', cookie, datetime.now(), seg, perms[0]['lecture'], handle, fmt) except: #pylint: disable=bare-except pass diff --git a/sorter.py b/sorter.py index 395b4be4163f8355cd973ea92998808f4ae4c8a7..f4b8e9b93dcd2f0231c6a2798c0bdf9aed1e6913 100644 --- a/sorter.py +++ b/sorter.py @@ -17,9 +17,9 @@ def sort_log(): FROM sortlog JOIN lectures ON lectures.id = sortlog.lecture_id JOIN courses ON courses.id = lectures.course_id - ORDER BY sortlog.`when` DESC + ORDER BY sortlog."when" DESC LIMIT 50 - '''), sorterrorlog=query('SELECT * FROM sorterrorlog ORDER BY sorterrorlog.`when` DESC')) + '''), sorterrorlog=query('SELECT * FROM sorterrorlog ORDER BY sorterrorlog."when" DESC')) def to_ascii(inputstring): asciistring = inputstring @@ -63,7 +63,7 @@ def insert_video(lectureid, dbfilepath, fileformatid, hash="", filesize=-1, dura (?, ?, ?, ?, "", "", "", ?, ?, ?, ?, ?, ?, ?, ?)''', lectureid, visible, dbfilepath, fileformatid, datetime.now(), datetime.now(), datetime.now(), -1, hash, filesize, duration, sourceid) if not sourceid: - query('INSERT INTO sortlog (lecture_id,video_id,path,`when`) VALUES (?,?,?,?)', lectureid, video_id, dbfilepath, datetime.now()) + query('INSERT INTO sortlog (lecture_id,video_id,path,"when") VALUES (?,?,?,?)', lectureid, video_id, dbfilepath, datetime.now()) schedule_job('probe', {'path': dbfilepath, 'lecture_id': lectureid, 'video_id': video_id, 'import-chapters': True}) schedule_thumbnail(lectureid) video = query('SELECT videos.*, "format" AS sep, formats.* FROM videos JOIN formats ON formats.id = videos.video_format WHERE videos.id = ?', video_id)[0] @@ -167,7 +167,7 @@ def log_sort_error(course_id, path, matches): matches_id = [] for match in matches: matches_id.append(str(match['id'])) - query('INSERT INTO sorterrorlog_data (course_id, path, matches, `when`, time_updated, time_created) VALUES (?, ?, ?, ?, ?, ?)', + query('INSERT INTO sorterrorlog_data (course_id, path, matches, "when", time_updated, time_created) VALUES (?, ?, ?, ?, ?, ?)', course_id, path, ','.join(matches_id), datetime.now(), datetime.now(), datetime.now()) @app.route('/internal/sort/encoded/<filename>') diff --git a/stats.py b/stats.py index 7cbab86370d680297876bab63766e9e64aadecc7..ad27e48b71474b6b0d00eb8410a15d0cebc0e369 100644 --- a/stats.py +++ b/stats.py @@ -20,17 +20,17 @@ def stats(): return render_template('stats.html', semester=semester, filter=request.args.get('filter')) STATS_QUERIES = {} -STATS_QUERIES['formats_views'] = "SELECT formats.description AS labels, count(DISTINCT log.id) AS `values` FROM log \ +STATS_QUERIES['formats_views'] = "SELECT formats.description AS labels, count(DISTINCT log.id) AS \"values\" FROM log \ JOIN videos ON (videos.id = log.video) JOIN formats ON (formats.id = videos.video_format) GROUP BY formats.id" STATS_QUERIES['course_count'] = 'SELECT semester AS x, count(id) AS y FROM courses WHERE semester != "" GROUP BY semester' STATS_QUERIES['lectures_count'] = 'SELECT semester AS x, count(lectures.id) AS y FROM lectures \ JOIN courses ON (courses.id = lectures.course_id) WHERE semester != "" GROUP BY semester' -STATS_QUERIES['categories_courses'] = "SELECT courses.subject AS labels, count(courses.id) AS `values` FROM courses \ +STATS_QUERIES['categories_courses'] = "SELECT courses.subject AS labels, count(courses.id) AS \"values\" FROM courses \ GROUP BY courses.subject ORDER BY labels DESC LIMIT 100" -STATS_QUERIES['organizer_courses'] = "SELECT courses.organizer AS labels, count(courses.id) AS `values` FROM courses \ +STATS_QUERIES['organizer_courses'] = "SELECT courses.organizer AS labels, count(courses.id) AS \"values\" FROM courses \ GROUP BY courses.organizer ORDER BY labels DESC LIMIT 100" -STATS_QUERIES['categories_lectures'] = "SELECT courses.subject AS labels, count(lectures.id) AS `values` FROM lectures \ - JOIN courses ON (courses.id = lectures.course_id) WHERE lectures.visible GROUP BY courses.subject ORDER BY `values` DESC LIMIT 100" +STATS_QUERIES['categories_lectures'] = "SELECT courses.subject AS labels, count(lectures.id) AS \"values\" FROM lectures \ + JOIN courses ON (courses.id = lectures.course_id) WHERE lectures.visible GROUP BY courses.subject ORDER BY \"values\" DESC LIMIT 100" STATS_QUERIES['lecture_views'] = "SELECT lectures.time AS x, count(DISTINCT log.id) AS y FROM log \ JOIN videos ON (videos.id = log.video) \ JOIN lectures ON (lectures.id = videos.lecture_id) \ @@ -73,7 +73,7 @@ def stats_generic(req, param=None): def stats_viewsperday(req, param=""): #pylint: disable=too-many-locals update_expr = 'INSERT INTO logcache (req, param, trace, date, value) SELECT "%s", ?, trace, date, y FROM (%s) AS cachetmp WHERE date < ?' query_expr = 'SELECT date, trace, value AS y FROM logcache WHERE req = "%s" AND param = ? UNION SELECT * FROM (%s) AS cachetmp' - date_subexpr = 'SELECT CASE WHEN MAX(date) IS NULL THEN "2000-00-00" ELSE MAX(date) END AS t FROM `logcache` WHERE req = "%s" AND param = ?' + date_subexpr = 'SELECT CASE WHEN MAX(date) IS NULL THEN "2000-00-00" ELSE MAX(date) END AS t FROM "logcache" WHERE req = "%s" AND param = ?' queries = { 'lecture': # views per day per lecture (split per format) '''SELECT log.date AS date, formats.description AS trace, COUNT(DISTINCT log.id) AS y