Skip to content
Snippets Groups Projects
Commit 743df080 authored by Simon Künzel's avatar Simon Künzel
Browse files

Ensure DB returns datetime with timezone set and fix more issues with timezones

parent 3b63d3c7
No related branches found
No related tags found
No related merge requests found
Showing
with 74 additions and 24 deletions
...@@ -69,8 +69,8 @@ def api_route_is_running(): ...@@ -69,8 +69,8 @@ def api_route_is_running():
def _db_execute_get_homepage(session: SessionDb, is_mod: bool): def _db_execute_get_homepage(session: SessionDb, is_mod: bool):
upcoming_start: date = date.today() upcoming_start: datetime = get_standard_datetime_now().replace(hour=0, minute=0, second=0, microsecond=0)
upcoming_end: date = upcoming_start + timedelta(days=7) upcoming_end: datetime = upcoming_start + timedelta(days=7)
upcoming_lectures = session.scalars( upcoming_lectures = session.scalars(
Lecture.select({ Lecture.select({
......
...@@ -169,8 +169,8 @@ class ApiDatetimeField[_O: "ApiObject"](ApiSimpleColumnField[_O]): ...@@ -169,8 +169,8 @@ class ApiDatetimeField[_O: "ApiObject"](ApiSimpleColumnField[_O]):
def post_init_check(self, context: FieldContext): def post_init_check(self, context: FieldContext):
super().post_init_check(context) super().post_init_check(context)
if not isinstance(self._column.type, sql.types.DateTime): # pragma: no cover if not isinstance(self._column.type, UTCTimestamp): # pragma: no cover
raise TypeError(f"SQL type for datetime field must be datetime, but is '{self._column.type}'") raise TypeError(f"SQL type for datetime field must be UTCTimestamp (our custom type), but is '{self._column.type}'")
@property @property
def may_be_none_allowed_for_config_fields(self) -> bool: def may_be_none_allowed_for_config_fields(self) -> bool:
......
...@@ -19,3 +19,6 @@ from .database import ( ...@@ -19,3 +19,6 @@ from .database import (
TransactionConflictError, TransactionConflictError,
Database, Database,
) )
from .utc_timestamp import (
UTCTimestamp
)
...@@ -10,6 +10,8 @@ from sqlalchemy.sql.base import _NoneName, ReadOnlyColumnCollection ...@@ -10,6 +10,8 @@ from sqlalchemy.sql.base import _NoneName, ReadOnlyColumnCollection
from sqlalchemy.sql.schema import ColumnCollectionConstraint, ForeignKey from sqlalchemy.sql.schema import ColumnCollectionConstraint, ForeignKey
from sqlalchemy.dialects import postgresql as postgresql from sqlalchemy.dialects import postgresql as postgresql
from .utc_timestamp import UTCTimestamp
# #
# This file attempts its best to detect any difference between the schema in the python files and the actual database. # This file attempts its best to detect any difference between the schema in the python files and the actual database.
...@@ -264,6 +266,9 @@ def _check_types_equal(actual: types.TypeEngine, schema: types.TypeEngine) -> bo ...@@ -264,6 +266,9 @@ def _check_types_equal(actual: types.TypeEngine, schema: types.TypeEngine) -> bo
if type(schema) is types.TIMESTAMP: if type(schema) is types.TIMESTAMP:
return isinstance(actual, types.TIMESTAMP) return isinstance(actual, types.TIMESTAMP)
if type(schema) is UTCTimestamp:
return isinstance(actual, types.TIMESTAMP)
if isinstance(schema, types.Enum): if isinstance(schema, types.Enum):
if not isinstance(actual, types.Enum): if not isinstance(actual, types.Enum):
return False return False
......
import datetime
# noinspection PyPep8Naming
from datetime import datetime as Datetime
import sqlalchemy as sql
class UTCTimestamp(sql.TypeDecorator[Datetime]):
impl = sql.TIMESTAMP()
cache_ok = True
@property
def python_type(self) -> type[Datetime]:
return Datetime
def process_bind_param(self, value: Datetime | None, dialect: sql.Dialect) -> Datetime | None:
if value is None:
return None
if not value.tzinfo:
raise ValueError(f"Missing timezone in datetime: {value}")
return value.astimezone(datetime.timezone.utc)
def process_result_value(self, value: Datetime | None, dialect: sql.Dialect) -> Datetime | None:
if value is None:
return None
if value.tzinfo is None:
value = value.replace(tzinfo=datetime.UTC)
return value
...@@ -35,10 +35,17 @@ def pad_string(val: str, padding: str, length: int): ...@@ -35,10 +35,17 @@ def pad_string(val: str, padding: str, length: int):
return (padding * math.ceil( (length - len(val)) / len(padding) )) + val return (padding * math.ceil( (length - len(val)) / len(padding) )) + val
# Python doesn't have a formatter for milliseconds
# Don't just remove last three numbers with microseconds. See here: https://stackoverflow.com/a/35643540
def format_standard_datetime(dt: Datetime): def format_standard_datetime(dt: Datetime):
return dt.astimezone(datetime.UTC).strftime("%Y-%m-%dT%H:%M:%S.") + pad_string(str(dt.microsecond // 1000), "0", 3) + "Z" def zero_pad(val: int, length: int):
return pad_string(str(val), "0", length)
# Python's strftime can't handle milliseconds and is unreliably regarding the length (e.g. sometimes microseconds or
# years are not padded to full length) https://stackoverflow.com/a/35643540
# So we just format it ourselves
dt = dt.astimezone(datetime.UTC)
return (f"{zero_pad(dt.year, 4)}-{zero_pad(dt.month, 2)}-{zero_pad(dt.day, 2)}"
f"T{zero_pad(dt.hour, 2)}:{zero_pad(dt.minute, 2)}:{zero_pad(dt.second, 2)}"
f".{zero_pad(dt.microsecond // 1000, 3)}Z")
def parse_standard_datetime(val: str): def parse_standard_datetime(val: str):
......
...@@ -23,7 +23,7 @@ class ChangelogEntry(ApiObject, Base): ...@@ -23,7 +23,7 @@ class ChangelogEntry(ApiObject, Base):
} }
change_time: Mapped[datetime] = api_mapped( change_time: Mapped[datetime] = api_mapped(
mapped_column(TIMESTAMP(), nullable=False, index=True, server_default=sql.text("CURRENT_TIMESTAMP")), mapped_column(UTCTimestamp(), nullable=False, index=True, server_default=sql.text("CURRENT_TIMESTAMP")),
ApiDatetimeField( ApiDatetimeField(
include_in_data=True include_in_data=True
) )
......
...@@ -108,7 +108,7 @@ class Lecture(DeletableApiObject, VisibilityApiObject, ApiViewPermissionsObject, ...@@ -108,7 +108,7 @@ class Lecture(DeletableApiObject, VisibilityApiObject, ApiViewPermissionsObject,
) )
) )
time: Mapped[datetime] = api_mapped( time: Mapped[datetime] = api_mapped(
mapped_column(TIMESTAMP(), nullable=False, index=True), mapped_column(UTCTimestamp(), nullable=False, index=True),
ApiDatetimeField( ApiDatetimeField(
include_in_config=True, config_directly_modifiable=True, include_in_config=True, config_directly_modifiable=True,
include_in_data=True include_in_data=True
...@@ -154,7 +154,7 @@ class Lecture(DeletableApiObject, VisibilityApiObject, ApiViewPermissionsObject, ...@@ -154,7 +154,7 @@ class Lecture(DeletableApiObject, VisibilityApiObject, ApiViewPermissionsObject,
) )
) )
publish_time: Mapped[datetime] = api_mapped( publish_time: Mapped[datetime] = api_mapped(
mapped_column(TIMESTAMP(), nullable=True, index=True), mapped_column(UTCTimestamp(), nullable=True, index=True),
ApiDatetimeField( ApiDatetimeField(
include_in_data=True, data_only_mod=True include_in_data=True, data_only_mod=True
) )
......
...@@ -73,19 +73,19 @@ class Job(ApiObject, Base): ...@@ -73,19 +73,19 @@ class Job(ApiObject, Base):
) )
) )
creation_time: Mapped[datetime] = api_mapped( creation_time: Mapped[datetime] = api_mapped(
mapped_column(sql.DateTime(), nullable=False, server_default=sql.text("CURRENT_TIMESTAMP")), mapped_column(UTCTimestamp(), nullable=False, server_default=sql.text("CURRENT_TIMESTAMP")),
ApiDatetimeField( ApiDatetimeField(
include_in_data=True include_in_data=True
) )
) )
run_start_time: Mapped[datetime] = api_mapped( run_start_time: Mapped[datetime] = api_mapped(
mapped_column(sql.DateTime(), nullable=True), mapped_column(UTCTimestamp(), nullable=True),
ApiDatetimeField( ApiDatetimeField(
include_in_data=True include_in_data=True
) )
) )
run_end_time: Mapped[datetime] = api_mapped( run_end_time: Mapped[datetime] = api_mapped(
mapped_column(sql.DateTime(), nullable=True), mapped_column(UTCTimestamp(), nullable=True),
ApiDatetimeField( ApiDatetimeField(
include_in_data=True include_in_data=True
) )
......
...@@ -74,7 +74,7 @@ class SorterFile(DeletableApiObject, Base): ...@@ -74,7 +74,7 @@ class SorterFile(DeletableApiObject, Base):
) )
) )
file_modification_time: Mapped[datetime] = api_mapped( file_modification_time: Mapped[datetime] = api_mapped(
mapped_column(sql.DateTime, nullable=False), mapped_column(UTCTimestamp, nullable=False),
ApiDatetimeField( ApiDatetimeField(
include_in_data=True include_in_data=True
) )
...@@ -101,7 +101,7 @@ class SorterFile(DeletableApiObject, Base): ...@@ -101,7 +101,7 @@ class SorterFile(DeletableApiObject, Base):
) )
) )
update_time: Mapped[datetime] = api_mapped( update_time: Mapped[datetime] = api_mapped(
mapped_column(sql.DateTime, nullable=False), mapped_column(UTCTimestamp, nullable=False),
ApiDatetimeField( ApiDatetimeField(
include_in_data=True include_in_data=True
) )
......
...@@ -49,14 +49,14 @@ class Announcement(DeletableApiObject, VisibilityApiObject, Base): ...@@ -49,14 +49,14 @@ class Announcement(DeletableApiObject, VisibilityApiObject, Base):
) )
) )
publish_time: Mapped[datetime] = api_mapped( publish_time: Mapped[datetime] = api_mapped(
mapped_column(TIMESTAMP(), nullable=True), mapped_column(UTCTimestamp(), nullable=True),
ApiDatetimeField( ApiDatetimeField(
include_in_config=True, include_in_config=True,
include_in_data=True, data_only_mod=True include_in_data=True, data_only_mod=True
) )
) )
expiration_time: Mapped[datetime] = api_mapped( expiration_time: Mapped[datetime] = api_mapped(
mapped_column(TIMESTAMP(), nullable=True), mapped_column(UTCTimestamp(), nullable=True),
ApiDatetimeField( ApiDatetimeField(
include_in_config=True, include_in_config=True,
include_in_data=True, data_only_mod=True include_in_data=True, data_only_mod=True
......
...@@ -12,7 +12,7 @@ from videoag_common.miscellaneous import JsonSerializableEnum ...@@ -12,7 +12,7 @@ from videoag_common.miscellaneous import JsonSerializableEnum
class PublishMediumWatchLogEntry(Base): class PublishMediumWatchLogEntry(Base):
watch_id: Mapped[str] = mapped_column(String(length=64, collation=STRING_COLLATION), nullable=False, primary_key=True) watch_id: Mapped[str] = mapped_column(String(length=64, collation=STRING_COLLATION), nullable=False, primary_key=True)
timestamp: Mapped[datetime] = mapped_column(TIMESTAMP(), nullable=False, primary_key=True, timestamp: Mapped[datetime] = mapped_column(UTCTimestamp(), nullable=False, primary_key=True,
server_default=sql.text("CURRENT_TIMESTAMP")) server_default=sql.text("CURRENT_TIMESTAMP"))
# No foreign key to improve performance. Values are validated when aggregating # No foreign key to improve performance. Values are validated when aggregating
publish_medium_id: Mapped[int] = mapped_column(nullable=False) publish_medium_id: Mapped[int] = mapped_column(nullable=False)
......
...@@ -36,7 +36,7 @@ class User(ApiObject, Base): ...@@ -36,7 +36,7 @@ class User(ApiObject, Base):
) )
) )
last_login: Mapped[datetime] = mapped_column(TIMESTAMP(), nullable=True) last_login: Mapped[datetime] = mapped_column(UTCTimestamp(), nullable=True)
enable_mail_notifications: Mapped[bool] = mapped_column(nullable=False, default=True) enable_mail_notifications: Mapped[bool] = mapped_column(nullable=False, default=True)
notify_new_video: Mapped[bool] = mapped_column(nullable=False, default=True) notify_new_video: Mapped[bool] = mapped_column(nullable=False, default=True)
......
...@@ -2,7 +2,7 @@ import hashlib ...@@ -2,7 +2,7 @@ import hashlib
import logging import logging
import shutil import shutil
import time import time
from datetime import date, datetime from datetime import date, datetime, UTC
from pathlib import Path from pathlib import Path
import videoag_common import videoag_common
...@@ -40,7 +40,7 @@ def _check_file( ...@@ -40,7 +40,7 @@ def _check_file(
to_sort_file_db_paths: list[str], to_sort_file_db_paths: list[str],
file: Path file: Path
): ):
file_mod_time = datetime.fromtimestamp(file.lstat().st_mtime) file_mod_time = datetime.fromtimestamp(file.lstat().st_mtime, tz=UTC)
seconds_since_modification = time.time() - file.lstat().st_mtime seconds_since_modification = time.time() - file.lstat().st_mtime
error_message = None error_message = None
......
...@@ -17,7 +17,9 @@ class JobExecutionInfo(ABC): ...@@ -17,7 +17,9 @@ class JobExecutionInfo(ABC):
@abstractmethod @abstractmethod
def get_start_time(self) -> datetime or None: def get_start_time(self) -> datetime or None:
""" """
Returns the time when the job execution started or None if the execution has not started yet Returns the time when the job execution started or None if the execution has not started yet.
Always with UTC timezone
""" """
pass pass
...@@ -25,6 +27,8 @@ class JobExecutionInfo(ABC): ...@@ -25,6 +27,8 @@ class JobExecutionInfo(ABC):
def get_finish_time(self) -> datetime or None: def get_finish_time(self) -> datetime or None:
""" """
Only call this if is_success() did not return None. May be None only if the job failed Only call this if is_success() did not return None. May be None only if the job failed
Always with UTC timezone
""" """
pass pass
......
from datetime import datetime from datetime import datetime, UTC
import kubernetes as k8s import kubernetes as k8s
from kubernetes.client import ApiException from kubernetes.client import ApiException
...@@ -54,11 +54,15 @@ class K8sJobInfo(JobExecutionInfo): ...@@ -54,11 +54,15 @@ class K8sJobInfo(JobExecutionInfo):
def get_start_time(self) -> datetime or None: def get_start_time(self) -> datetime or None:
start_time = self._k8s_job.status.start_time start_time = self._k8s_job.status.start_time
assert start_time is None or isinstance(start_time, datetime) assert start_time is None or isinstance(start_time, datetime)
if start_time is not None:
start_time = start_time.astimezone(UTC)
return start_time return start_time
def get_finish_time(self) -> datetime: def get_finish_time(self) -> datetime | None:
finish_time = self._k8s_job.status.completion_time finish_time = self._k8s_job.status.completion_time
assert finish_time is None or isinstance(finish_time, datetime) assert finish_time is None or isinstance(finish_time, datetime)
if finish_time is not None:
finish_time = finish_time.astimezone(UTC)
return finish_time return finish_time
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment