Skip to content
Snippets Groups Projects
Commit 584ed579 authored by Simon Künzel's avatar Simon Künzel
Browse files

Increase sql statement compatibility

parent d6460435
No related branches found
No related tags found
No related merge requests found
# Defaults for development ,do not use in production!
DEBUG = False
SERVER_IP = 'localhost'
VIDEOPREFIX = 'https://videoag.fsmpi.rwth-aachen.de'
VIDEOPREFIX = '/files'
VIDEOMOUNT = [{'mountpoint': 'files/protected/', 'prefix':'protected/'},{'mountpoint':'files/pub/','prefix':'pub/' }, {'mountpoint':'files/vpnonline/','prefix':'vpnonline/' }]
#SECRET_KEY = 'something random'
......
......@@ -20,16 +20,17 @@ def feed(handle=None):
if handle:
course = query('SELECT * FROM courses WHERE handle = ? AND visible', handle)[0]
course['atomid'] = gen_atomid('Video AG, courses['+str(course['id'])+']: '+course['handle'])
entries = query('''
course_id = course['id']
entries = query(f'''
SELECT lectures.*, 'video' AS sep, videos.*, formats.description AS format_description, formats.prio, \'course\' AS sep, courses.*
FROM lectures
JOIN courses ON (courses.id = lectures.course_id)
JOIN videos ON (lectures.id = videos.lecture_id)
JOIN formats ON (formats.id = videos.video_format)
WHERE ((? IS NULL AND courses.listed) OR course_id = ?) AND courses.visible AND lectures.visible AND videos.visible
WHERE {"courses.listed" if course_id is None else "course_id = ?"} AND courses.visible AND lectures.visible AND videos.visible
ORDER BY videos.time_created DESC, prio ASC
LIMIT 100''',
course['id'], course['id'])
*([] if course_id is None else [course_id]))
updated = max(course['time_updated'], course['time_created'], key=fixdate)
for entry in entries:
entry['updated'] = max(entry['video']['time_created'], entry['video']['time_updated'], entry['time_created'], entry['time_updated'], key=fixdate)
......
......@@ -25,15 +25,22 @@ def jobs_overview():
'state': request.args.get('state', 'failed'),
'worker': request.args.get('worker', '%')}
pagecount = math.ceil(query('SELECT count(id) as count FROM jobs WHERE (type like ?) AND (worker like ? OR (worker IS NULL AND ? = \'%\')) AND (state like ?)',
filter['type'], filter['worker'], filter['worker'], filter['state'])[0]['count']/pagesize)
jobs = query('SELECT * FROM jobs \
WHERE (type like ?) AND (worker like ? OR (worker IS NULL AND ? = \'%\')) AND (state like ?) \
condition_values = []
if filter['worker'] == '%':
condition = 'WHERE (type like ?) AND (worker IS NULL) AND (state like ?)'
condition_values.extend([filter['type'], filter['state']])
else:
condition = 'WHERE (type like ?) AND (worker like ?) AND (state like ?)'
condition_values.extend([filter['type'], filter['worker'], filter['state']])
pagecount = math.ceil(query(f'SELECT count(id) as count FROM jobs {condition}',
*condition_values)[0]['count']/pagesize)
jobs = query(f'SELECT * FROM jobs \
{condition} \
ORDER BY "time_created" DESC LIMIT ? OFFSET ?',
filter['type'], filter['worker'], filter['worker'], filter['state'], pagesize, page*pagesize)
*[*condition_values, pagesize, page*pagesize])
active_streams = query('SELECT lectures.*, \'course\' AS sep, courses.*, \'job\' AS sep, jobs.* FROM lectures \
JOIN courses ON (courses.id = lectures.course_id) \
JOIN jobs ON (jobs.id = lectures.stream_job) WHERE lectures.stream_job')
JOIN jobs ON (jobs.id = lectures.stream_job) WHERE lectures.stream_job IS NOT NULL')
for stream in active_streams:
try:
stream['destbase'] = json.loads((stream['job']['data'] or '{}')).get('destbase')
......@@ -55,9 +62,15 @@ def jobs_overview():
@csrf_protect
def jobs_action(action, jobid=None):
if action == 'clear_failed':
query('UPDATE jobs SET state = \'deleted\' WHERE state = \'failed\' AND (id = ? OR ? IS NULL)', jobid, jobid)
if jobid:
query('UPDATE jobs SET state = \'deleted\' WHERE state = \'failed\' AND id = ?', jobid)
else:
query('UPDATE jobs SET state = \'deleted\' WHERE state = \'failed\'')
elif action == 'retry_failed':
query('UPDATE jobs SET state = \'ready\', canceled = false WHERE state = \'failed\' AND (id = ? OR ? IS NULL)', jobid, jobid)
if jobid:
query('UPDATE jobs SET state = \'ready\', canceled = false WHERE state = \'failed\' AND id = ?', jobid)
else:
query('UPDATE jobs SET state = \'ready\', canceled = false WHERE state = \'failed\'')
elif action == 'copy' and jobid:
query("INSERT INTO jobs (type, priority, queue, state, data, time_created) \
SELECT type, priority, queue, 'ready', data, ? FROM jobs where id = ?",
......
......@@ -21,7 +21,7 @@ def stats():
STATS_QUERIES = {}
STATS_QUERIES['formats_views'] = "SELECT formats.description AS labels, count(DISTINCT log.id) AS \"values\" FROM log \
JOIN videos ON (videos.id = log.video) JOIN formats ON (formats.id = videos.video_format) GROUP BY formats.id"
JOIN videos ON (videos.id = log.video) JOIN formats ON (formats.id = videos.video_format) GROUP BY formats.id, formats.description"
STATS_QUERIES['course_count'] = 'SELECT semester AS x, count(id) AS y FROM courses WHERE semester != \'\' GROUP BY semester'
STATS_QUERIES['lectures_count'] = 'SELECT semester AS x, count(lectures.id) AS y FROM lectures \
JOIN courses ON (courses.id = lectures.course_id) WHERE semester != \'\' GROUP BY semester'
......@@ -34,7 +34,7 @@ STATS_QUERIES['categories_lectures'] = "SELECT courses.subject AS labels, count(
STATS_QUERIES['lecture_views'] = "SELECT lectures.time AS x, count(DISTINCT log.id) AS y FROM log \
JOIN videos ON (videos.id = log.video) \
JOIN lectures ON (lectures.id = videos.lecture_id) \
WHERE (lectures.course_id = ?) GROUP BY lectures.id ORDER BY lectures.time"
WHERE (lectures.course_id = ?) GROUP BY lectures.id, lectures.time ORDER BY lectures.time"
STATS_QUERIES['live_views'] = "SELECT hlslog.segment AS x, COUNT(DISTINCT hlslog.id) AS y FROM hlslog WHERE hlslog.lecture = ? \
GROUP BY hlslog.segment ORDER BY hlslog.segment"
STATS_QUERIES['lecture_totalviews'] = "SELECT 42"
......@@ -73,7 +73,7 @@ def stats_generic(req, param=None):
def stats_viewsperday(req, param=""): #pylint: disable=too-many-locals
update_expr = 'INSERT INTO logcache (req, param, trace, date, value) SELECT \'%s\', ?, trace, date, y FROM (%s) AS cachetmp WHERE date < ?'
query_expr = 'SELECT date, trace, value AS y FROM logcache WHERE req = \'%s\' AND param = ? UNION SELECT * FROM (%s) AS cachetmp'
date_subexpr = 'SELECT CASE WHEN MAX(date) IS NULL THEN \'2000-00-00\' ELSE MAX(date) END AS t FROM "logcache" WHERE req = \'%s\' AND param = ?'
date_subexpr = 'SELECT CASE WHEN MAX(date) IS NULL THEN \'2000-01-01\' ELSE MAX(date) END AS t FROM "logcache" WHERE req = \'%s\' AND param = ?'
queries = {
'lecture': # views per day per lecture (split per format)
'''SELECT log.date AS date, formats.description AS trace, COUNT(DISTINCT log.id) AS y
......@@ -81,7 +81,7 @@ def stats_viewsperday(req, param=""): #pylint: disable=too-many-locals
JOIN videos ON videos.id = log.video
JOIN formats ON formats.id = videos.video_format
WHERE log.date > %T AND videos.lecture_id = ?
GROUP BY log.date, videos.video_format
GROUP BY log.date, formats.description, videos.video_format
UNION
SELECT log.date AS date, \'total\' AS trace, COUNT(DISTINCT log.id) AS y
FROM log JOIN videos ON videos.id = log.video
......@@ -93,7 +93,7 @@ def stats_viewsperday(req, param=""): #pylint: disable=too-many-locals
JOIN lectures ON lectures.id = videos.lecture_id
JOIN formats ON formats.id = videos.video_format
WHERE log.date > %T AND lectures.course_id = ?
GROUP BY log.date, videos.video_format
GROUP BY log.date, formats.description, videos.video_format
UNION
SELECT log.date AS date, \'total\' AS trace, COUNT(DISTINCT log.id) AS y
FROM log
......@@ -107,7 +107,7 @@ def stats_viewsperday(req, param=""): #pylint: disable=too-many-locals
FROM log
JOIN videos ON videos.id = log.video
JOIN formats ON formats.id = videos.video_format
WHERE log.date > %T GROUP BY log.date, videos.video_format
WHERE log.date > %T GROUP BY log.date, formats.description, videos.video_format
UNION
SELECT log.date AS date, \'total\' AS trace, COUNT(DISTINCT log.id) AS y
FROM log
......@@ -120,9 +120,9 @@ def stats_viewsperday(req, param=""): #pylint: disable=too-many-locals
JOIN lectures ON lectures.id = videos.lecture_id
JOIN courses ON courses.id = lectures.course_id
WHERE log.date > %T
GROUP BY log.date, courses.id'''
GROUP BY log.date, courses.handle, courses.id'''
}
expr = queries[req].replace('%T', '\''+query(date_subexpr%('viewsperday.'+req), param)[0]['t']+'\'')
expr = queries[req].replace('%T', '\''+str(query(date_subexpr%('viewsperday.'+req), param)[0]['t'])+'\'')
params = [param]*expr.count('?')
try:
modify("BEGIN")
......@@ -136,12 +136,15 @@ def stats_viewsperday(req, param=""): #pylint: disable=too-many-locals
traces = set()
data = {}
for row in rows:
if not start or row['date'] < start:
start = row['date']
row_date = row['date']
if isinstance(row_date, datetime):
row_date = row_date.date()
if not start or row_date < start:
start = row_date
traces.add(row['trace'])
if row['date'] not in data:
data[row['date']] = {}
data[row['date']][row['trace']] = row['y']
if row_date not in data:
data[row_date] = {}
data[row_date][row['trace']] = row['y']
end = date.today()
res = [{'name': trace, 'x': [], 'y': []} for trace in traces]
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment