diff --git a/.gitignore b/.gitignore
index 788441badae9e32489d52921035fb80351ca302c..0dbe1eac2b522bfeca9acc62197551b8d0280490 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,8 +1,10 @@
 .idea/
 
 venv/
+.venv/
 myvenv/
 __pycache__
 
 *.sqlite
 child-pipeline.yml
+.dockerfiles
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e3a982d745ef25a4f7921dc96e4ad4a5d6914ad7..13ab1f1ac0133eb0b578056663bf30ff0df77011 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -2,15 +2,26 @@ stages:
     - build
     - run
 
+variables:
+    PARENT_PIPELINE_ID: $CI_PIPELINE_ID
+
 generate-pipeline:
     stage: build
     image: python:3.13-slim
     script:
-        - python generate_ci_pipeline.py
+        - >-
+            python build_pipeline_generator.py
+            --ci-pipeline-dest child-pipeline.yml
+            --commit-sha "$CI_COMMIT_SHA"
+            --commit-tag "$CI_COMMIT_TAG"
+            api job_controller
+            $(find job_controller/jobs/ -mindepth 1 -maxdepth 1)
+
     artifacts:
         expire_in: 1 week
         paths:
             - child-pipeline.yml
+            - .dockerfiles
 
 run-pipeline:
     stage: run
diff --git a/api/Dockerfile b/api/Dockerfile
deleted file mode 100755
index 87d71a08638a4c2aa5eaff22639970395a912a3b..0000000000000000000000000000000000000000
--- a/api/Dockerfile
+++ /dev/null
@@ -1,27 +0,0 @@
-# Can be "development" or "production"
-ARG ENV_TYPE
-ARG GIT_COMMIT_SHA
-FROM registry.git.fsmpi.rwth-aachen.de/videoag/backend/${ENV_TYPE}_common_py:${GIT_COMMIT_SHA}
-
-ENV VIDEOAG_API_GIT_COMMIT_HASH $GIT_COMMIT_SHA
-
-COPY extra_requirements.txt ./
-RUN pip3 install -r extra_requirements.txt
-
-COPY docker_start.sh ./
-COPY .pylintrc ./
-COPY tests/ ./tests/
-COPY config/api_example_config.py ./config/
-COPY config/test_config_override.py ./config/
-COPY config/uwsgi_example.ini ./config/
-
-# The source has a symlink file at src/videoag_common
-# The actual files are already in the image at src/videoag_common
-# So we move the actual files temporarily, copy the src directory, remove the symlink and move the actual files back
-# In the future, COPY --exclude src/videoag_common might work (but right now it doesn't, some "failed to compute cache key")
-RUN mv src/videoag_common src/.temp
-COPY src/ ./src/
-RUN rm src/videoag_common
-RUN mv src/.temp src/videoag_common
-
-ENTRYPOINT ["/code/docker_start.sh"]
\ No newline at end of file
diff --git a/api/build_config.py b/api/build_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..8499e5cb2f4f38dab45d7580e4dc7a80fa1642cf
--- /dev/null
+++ b/api/build_config.py
@@ -0,0 +1,45 @@
+TARGET_IMAGE_NAME = "api"
+BUILD_DEPENDENCIES = ["../common_py/"]
+
+PIP_REQUIREMENTS_FILE = "requirements.txt"
+APT_RUNTIME_DEPENDENCIES = [
+    "libxml2",  # For uwsgi
+]
+APT_BUILD_DEPENDENCIES = [
+    "gcc", "python3-dev", "libxslt-dev", "libxml2-dev"
+]
+DOCKERFILE_EXTRA = """
+COPY $MODULE_DIR/docker_start.sh ./
+COPY $MODULE_DIR/.pylintrc ./
+COPY $MODULE_DIR/src/ ./src/
+COPY $MODULE_DIR/tests/ ./tests/
+COPY $MODULE_DIR/config/api_example_config.py ./config/
+COPY $MODULE_DIR/config/test_config_override.py ./config/
+COPY $MODULE_DIR/config/uwsgi_example.ini ./config/
+COPY $MODULE_DIR/config/db_test_data.sql ./config/
+
+ENTRYPOINT ["/code/docker_start.sh"]
+"""
+
+# stage, needs, image is added automatically
+CI_TEST_JOB_TEMPLATE = """\
+timeout: 30m
+variables:
+    VIDEOAG_CONFIG: /code/config/api_example_config.py
+    COVERAGE_REPORT_DIR: $CI_PROJECT_DIR/coverage
+script:
+    - cd /code
+    - ./docker_start.sh -test
+artifacts:
+    paths:
+        - $CI_PROJECT_DIR/coverage/report.txt
+        - $CI_PROJECT_DIR/coverage/html/
+services:
+    - name: postgres:17
+      # So we can have the same host for local and CI testing
+      alias: host.docker.internal
+      variables:
+          POSTGRES_DB: videoag
+          POSTGRES_USER: videoag
+          POSTGRES_PASSWORD: videoag
+"""
\ No newline at end of file
diff --git a/api/config/api_example_config.py b/api/config/api_example_config.py
index b30711d18db37122e08ad14a4b5af8204fb95c5c..b96f67c7659d998c82a6b6ff998548a04a18c7f5 100644
--- a/api/config/api_example_config.py
+++ b/api/config/api_example_config.py
@@ -65,7 +65,7 @@ DATABASE = {
     "engine": "postgres",
     "postgres": {
         "host": "host.docker.internal",
-        "port": 9343,
+        "port": 5432,
         "user": "videoag",
         "password": "videoag",
         "database": "videoag",
diff --git a/api/config/test_config_override.py b/api/config/test_config_override.py
index 2593c79843ac729b7ba5b335ca101368a71bb1d4..26a5616f82272ea22b0ad9fa9c9d69c87e273bb4 100644
--- a/api/config/test_config_override.py
+++ b/api/config/test_config_override.py
@@ -8,10 +8,11 @@ DB_DATA = "../config/db_test_data.sql"
 
 DATABASE = {
     "postgres": {
-        "host": "ci-database",
-        "user": "videoagtest",
-        "password": "LetMeTest...",
-        "database": "videoagtest",
+        "host": "host.docker.internal",
+        "port": 5432,
+        "user": "videoag",
+        "password": "videoag",
+        "database": "videoag",
         "auto_migration": True
     },
     "log_all_statements": False
diff --git a/api/docker_start.sh b/api/docker_start.sh
index 09f181cb3d4690cd8c530694db90b7a05c11dfc4..f7d32b139b55350dfb282e960aaebe56595c9b35 100755
--- a/api/docker_start.sh
+++ b/api/docker_start.sh
@@ -6,18 +6,22 @@ mkdir -p ./log/
 
 if [ $# = 1 ] && [ $1 = "-test" ]; then
   echo "Running tests"
+
+  export COVERAGE_REPORT_DIR=${COVERAGE_REPORT_DIR:=../coverage}
+  mkdir -p $COVERAGE_REPORT_DIR
+
   # We always execute from src (For consistency with uWSGI)
   cd src
   uname -a
   export VIDEOAG_TEST_CONFIG_OVERRIDE="../config/test_config_override.py"
-  python3 -V
-  python3 -m coverage run --data-file "../coverage/.data" run_tests.py ||
+  python -V
+  python -m coverage run --data-file "../coverage/.data" run_tests.py ||
     { echo "Test failed!"; exit 1; }
-  python3 -m coverage report --data-file "../coverage/.data" --include "./*" ||
+  python -m coverage report --data-file "../coverage/.data" --include "./*" ||
     { echo "Coverage report stdout failed"; exit 1; }
-  python3 -m coverage report --data-file "../coverage/.data" -m --include "./*" > ../coverage/report.txt ||
+  python -m coverage report --data-file "../coverage/.data" -m --include "./*" > $COVERAGE_REPORT_DIR/report.txt ||
     { echo "Coverage report report.txt failed"; exit 1; }
-  python3 -m coverage html -d "../coverage/html/" --data-file "../coverage/.data" --include "./*" ||
+  python -m coverage html -d "$COVERAGE_REPORT_DIR/html/" --data-file "../coverage/.data" --include "./*" ||
     { echo "Coverage report html failed"; exit 1; }
 else
   echo "Running uWSGI"
diff --git a/api/extra_requirements.txt b/api/requirements.txt
similarity index 79%
rename from api/extra_requirements.txt
rename to api/requirements.txt
index e874a7a5efad7a3e50ef9bb62ac6c4045a64077a..17cf5b58396b3829313d3f63a29fb65b75a01756 100644
--- a/api/extra_requirements.txt
+++ b/api/requirements.txt
@@ -1,4 +1,5 @@
 # The Api Dependencies (Versions picked in January 2025)
+# Note that this does NOT include the common_py dependencies!
 
 # required
 flask==3.1.0
diff --git a/build_pipeline_generator.py b/build_pipeline_generator.py
new file mode 100644
index 0000000000000000000000000000000000000000..00108a64fee2e1c4ba2a62581f15fcef1506250c
--- /dev/null
+++ b/build_pipeline_generator.py
@@ -0,0 +1,325 @@
+import os
+import re
+from pathlib import Path
+from argparse import ArgumentParser
+
+
+class ModuleBuildConfig:
+    
+    def __init__(self, context: "BuildContext", module_dir: Path):
+        super().__init__()
+        
+        self.context = context
+        self.module_dir = module_dir.resolve()
+        self.config_file = self.module_dir.joinpath("build_config.py")
+        self.name = str(self.module_dir.relative_to(context.build_dir))
+        
+        globals = {}
+        exec(self.config_file.read_text(), globals)
+        
+        self.target_image_name = globals.pop("TARGET_IMAGE_NAME", None)
+        if self.target_image_name is not None and  not isinstance(self.target_image_name, str):
+            raise TypeError("TARGET_IMAGE_NAME must be a str (or absent)")
+        
+        context.add_module(self)
+        
+        self.dependencies: list[ModuleBuildConfig] = []
+        for dependency in globals.pop("BUILD_DEPENDENCIES", []):
+            if not isinstance(dependency, str):
+                raise ValueError("BUILD_DEPENDENCIES must be list of str")
+            dependency_name = str(module_dir.joinpath(Path(dependency)).resolve().relative_to(context.build_dir))
+            self.dependencies.append(context.get_or_load_module(dependency_name))
+        
+        self.pip_req_file = None
+        pip_req_file_name = globals.pop("PIP_REQUIREMENTS_FILE", None)
+        if pip_req_file_name is not None:
+            assert isinstance(pip_req_file_name, str)
+            self.pip_req_file = self.module_dir.joinpath(Path(pip_req_file_name))
+            if not self.pip_req_file.is_file():
+                raise ValueError(f"Cannot find pip requirements file {self.pip_req_file}")
+        
+        self.apt_runtime_dependencies = globals.pop("APT_RUNTIME_DEPENDENCIES", [])
+        if any(not isinstance(r, str) for r in self.apt_runtime_dependencies):
+            raise TypeError(f"APT_RUNTIME_DEPENDENCIES must be list of str")
+        
+        self.apt_build_dependencies = globals.pop("APT_BUILD_DEPENDENCIES", [])
+        if any(not isinstance(r, str) for r in self.apt_build_dependencies):
+            raise TypeError(f"APT_BUILD_DEPENDENCIES must be list of str")
+        
+        self.dockerfile_extra = globals.pop("DOCKERFILE_EXTRA", "")
+        if not isinstance(self.dockerfile_extra, str):
+            raise TypeError("DOCKERFILE_EXTRA must be str (or absent)")
+        self.dockerfile_extra = self.dockerfile_extra.replace(
+            "$MODULE_DIR",
+            str(self.module_dir.relative_to(context.build_dir))
+        )
+        
+        self.ci_test_job_template: str or None = globals.pop("CI_TEST_JOB_TEMPLATE", None)
+        if self.ci_test_job_template is not None and not isinstance(self.dockerfile_extra, str):
+            raise TypeError("CI_TEST_JOB_TEMPLATE must be str (or absent)")
+        
+        for g in globals.keys():
+            assert isinstance(g, str)
+            if g.isupper():
+                raise ValueError(f"Unknown key {g} in config file")
+    
+    def check_cyclic_dependency(self, dependents_stack: list[str]):
+        if self.name in dependents_stack:
+            raise ValueError(f"Dependency cycle involving {self.name} detected")
+        dependents_stack = dependents_stack + [self.name]
+        for dependency in self.dependencies:
+            dependency.check_cyclic_dependency(dependents_stack)
+    
+    def collect_docker_dependencies(
+            self,
+            pip_requirement_files: list[Path],
+            apt_runtime_dependencies: list[str],
+            apt_build_dependencies: list[str],
+            dockerfile_extras: list[str]
+    ):
+        for dep in self.dependencies:
+            dep.collect_docker_dependencies(
+                pip_requirement_files,
+                apt_runtime_dependencies,
+                apt_build_dependencies,
+                dockerfile_extras
+            )
+        
+        if self.pip_req_file is not None:
+            pip_requirement_files.append(self.pip_req_file)
+        
+        apt_runtime_dependencies.extend(self.apt_runtime_dependencies)
+        apt_build_dependencies.extend(self.apt_build_dependencies)
+        dockerfile_extras.append(self.dockerfile_extra)
+    
+    def generate_dockerfile(self):
+        pip_requirement_files: list[Path] = []
+        apt_runtime_dependencies: list[str] = []
+        apt_build_dependencies: list[str] = []
+        dockerfile_extras: list[str] = []
+        self.collect_docker_dependencies(
+            pip_requirement_files,
+            apt_runtime_dependencies,
+            apt_build_dependencies,
+            dockerfile_extras
+        )
+        # Remove duplicates and ensure same order every time
+        apt_runtime_dependencies = list(set(apt_runtime_dependencies))
+        apt_runtime_dependencies.sort()
+        apt_build_dependencies = list(set(apt_build_dependencies))
+        apt_build_dependencies.sort()
+        
+        res = f"""\
+#####################################################################
+### WARNING: THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT EDIT ! ###
+#####################################################################
+FROM python:3.13-slim AS base
+
+RUN mkdir -p /code
+
+WORKDIR /code
+
+ENV PIP_CACHE_DIR=/tmp/pip-cache
+RUN apt-get update && apt-get --no-install-recommends install -y {' '.join(apt_runtime_dependencies)}
+"""
+        for path, i in zip(pip_requirement_files, range(len(pip_requirement_files))):
+            res += f"COPY {str(path.relative_to(self.context.build_dir))} /tmp/req-{i}.txt\n"
+        
+        pip_requirement_list_arg = " ".join(f"-r /tmp/req-{i}.txt" for i in range(len(pip_requirement_files)))
+        
+        res += f"""
+FROM base AS builder
+
+# This step builds (and installs) the requirements and also puts the build result into the pip cache
+RUN apt-get update && apt-get --no-install-recommends install -y {' '.join(apt_build_dependencies)}
+RUN pip install {pip_requirement_list_arg}
+
+FROM base AS final
+
+# Here we copy the pip cache with the built packages and install them again. Pip will use the cache and won't need the
+# apt build dependencies. This saves a lot of space, compared to leaving the build dependencies in the final image
+# (reduces the final image size by about half)
+COPY --from=builder /tmp/pip-cache /tmp/pip-cache
+RUN pip install {pip_requirement_list_arg}
+"""
+        for docker in dockerfile_extras:
+            res += "\n" + docker + "\n"
+        
+        return res
+    
+    def generate_ci_jobs(self):
+        self.context.ensure_in_ci()
+        if self.target_image_name is None:
+            raise ValueError("This module has no target image name and therefore cannot be built")
+        return (self._generate_ci_build_job()
+                + "\n" + self._generate_ci_test_job()
+                + "\n" + self._generate_ci_deploy_job())
+    
+    def output_dockerfile_path(self) -> Path:
+        return self.context.build_dir.joinpath(".dockerfiles").joinpath(self.target_image_name)
+    
+    def image_full_name(self):
+        return f"{self.context.env_type()}_{self.target_image_name}"
+    
+    def ci_build_job_name(self):
+        return f"build-{self.target_image_name}"
+    
+    @staticmethod
+    def _get_auth_echo():
+        return """\
+echo "{\\"auths\\":{\\"$CI_REGISTRY\\":{\\"username\\":\\"$CI_REGISTRY_USER\\",\\"password\\":\\"$CI_REGISTRY_PASSWORD\\"}}}" > /kaniko/.docker/config.json\
+"""
+
+    def _generate_ci_build_job(self):
+        kaniko_args = [
+            f"--context=git://git.fsmpi.rwth-aachen.de/videoag/backend.git#{self.context.commit_sha}",
+            f"--dockerfile={str(self.output_dockerfile_path().relative_to(self.context.build_dir))}",
+            f"--git recurse-submodules=true",
+            f"--destination=$CI_REGISTRY_IMAGE/{self.image_full_name()}:{self.context.commit_sha}",
+            f"--build-arg=GIT_COMMIT_SHA={self.context.commit_sha}",
+            f"--cache=true",
+            f"--cache-repo=$CI_REGISTRY_IMAGE/{self.context.env_type()}_cache",
+            f"--cache-copy-layers=true",
+            f"--cache-run-layers=true",
+            f"--verbosity=debug",
+        ]
+        
+        if self.context.commit_tag is not None:
+            kaniko_args.append(f"--build-arg=GIT_COMMIT_TAG={self.context.commit_tag}")
+        
+        return f"""
+{self.ci_build_job_name()}:
+    stage: build-and-test
+    timeout: 3h
+    needs:
+        - pipeline: $PARENT_PIPELINE_ID
+          job: generate-pipeline
+    image:
+        name: gcr.io/kaniko-project/executor:v1.23.2-debug
+        entrypoint: [""]
+    script:
+        - {self._get_auth_echo()}
+        - echo {self.context.commit_sha}
+        - >-
+            /kaniko/executor
+            {"\n            ".join(kaniko_args)}
+"""
+    
+    def _generate_ci_test_job(self):
+        if self.ci_test_job_template is None:
+            return ""
+        assert isinstance(self.ci_test_job_template, str)
+        res = f"""
+test-{self.target_image_name}:
+    stage: build-and-test
+    needs: [{self.ci_build_job_name()}]
+    image:
+        name: $CI_REGISTRY_IMAGE/{self.image_full_name()}:{self.context.commit_sha}
+        entrypoint: [""]
+"""
+        res += "    " + "\n    ".join(self.ci_test_job_template.splitlines()) + "\n"
+        return res
+    
+    def _generate_ci_deploy_job(self):
+        destination_args = [
+            f"--destination=$CI_REGISTRY_IMAGE/{self.image_full_name()}:latest"
+        ]
+        if self.context.is_production:
+            destination_args.append(f"--destination=$CI_REGISTRY_IMAGE/{self.image_full_name()}:{self.context.commit_tag}")
+        
+        return f"""
+deploy-{self.target_image_name}:
+    stage: deploy
+    timeout: 1h
+    image:
+        name: gcr.io/kaniko-project/executor:v1.23.2-debug
+        entrypoint: [""]
+    script:
+        - {self._get_auth_echo()}
+        - mkdir /workdir
+        - echo "FROM $CI_REGISTRY_IMAGE/{self.image_full_name()}:{self.context.commit_sha}" > /workdir/Dockerfile
+        - >-
+            /kaniko/executor
+            --context=dir:///workdir
+            {"\n            ".join(destination_args)}
+"""
+
+
+class BuildContext:
+    
+    def __init__(self, build_dir: Path, commit_sha: str or None, commit_tag: str or None):
+        super().__init__()
+        self.build_dir = build_dir
+        self.modules: dict[str, ModuleBuildConfig] = {}
+        self._target_image_names: set[str] = set()
+        self.commit_sha = commit_sha or None  # Make empty string to None
+        self.commit_tag = commit_tag or None
+        self.is_production = commit_tag is not None and re.fullmatch("v.*", commit_tag) is not None
+    
+    def env_type(self) -> str:
+        return "production" if self.is_production else "development"
+    
+    def ensure_in_ci(self):
+        if self.commit_sha is None:
+            raise Exception("Not in GitLab CI. No commit sha given")
+    
+    def add_module(self, module: ModuleBuildConfig):
+        if module.target_image_name in self._target_image_names:
+            raise ValueError(f"Duplicate target image name {module.target_image_name}")
+        self.modules[module.name] = module
+    
+    def get_or_load_module(self, name: str):
+        if name in self.modules:
+            return self.modules[name]
+        module_dir = self.build_dir.joinpath(name)
+        try:
+            return ModuleBuildConfig(self, module_dir)
+        except Exception as e:
+            raise Exception(f"Exception while loading module {module_dir}", e)
+    
+    def generate_ci_pipeline(self):
+        self.ensure_in_ci()
+        
+        pipeline = """
+####################################################################
+##### AUTOMATICALLY GENERATED PIPELINE. DO NOT CHANGE MANUALLY! ####
+####################################################################
+
+stages:
+    - build-and-test
+    - deploy
+"""
+        for module in self.modules.values():
+            if module.target_image_name is not None:
+                pipeline += module.generate_ci_jobs()
+        
+        return pipeline
+
+
+def main():
+    parser = ArgumentParser()
+    parser.add_argument("--build-dir", type=Path, default=Path("."))
+    parser.add_argument("--commit-sha", type=str, required=False)
+    parser.add_argument("--commit-tag", type=str, required=False)
+    parser.add_argument("--ci-pipeline-dest", type=Path, required=False)
+    parser.add_argument("modules", nargs="+", type=Path)
+    
+    args = parser.parse_args()
+    
+    context = BuildContext(args.build_dir.resolve(), args.commit_sha, args.commit_tag)
+    for module in args.modules:
+        context.get_or_load_module(str(module.resolve().relative_to(context.build_dir)))
+    
+    for module in context.modules.values():
+        if module.target_image_name is None:
+            continue
+        module.output_dockerfile_path().parent.mkdir(parents=True, exist_ok=True)
+        module.output_dockerfile_path().write_text(module.generate_dockerfile())
+    
+    if args.ci_pipeline_dest is not None:
+        args.ci_pipeline_dest.parent.mkdir(parents=True, exist_ok=True)
+        args.ci_pipeline_dest.write_text(context.generate_ci_pipeline())
+
+
+if __name__ == "__main__":
+    main()
diff --git a/common_py/Dockerfile b/common_py/Dockerfile
deleted file mode 100755
index 14602ba36a85f902fb18268e630bc61af705fdfe..0000000000000000000000000000000000000000
--- a/common_py/Dockerfile
+++ /dev/null
@@ -1,11 +0,0 @@
-FROM python:3.12
-
-WORKDIR /code
-
-RUN mkdir -p /code
-WORKDIR /code
-
-COPY requirements.txt /code
-RUN pip3 install -r requirements.txt
-
-COPY src/videoag_common /code/src/videoag_common
diff --git a/common_py/build_config.py b/common_py/build_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..36eafd4c8c85ae3d8efa169f4494cfb5402891fb
--- /dev/null
+++ b/common_py/build_config.py
@@ -0,0 +1,10 @@
+PIP_REQUIREMENTS_FILE = "requirements.txt"
+APT_RUNTIME_DEPENDENCIES = [
+    "libpq-dev",  # For psycopg
+]
+APT_BUILD_DEPENDENCIES = [
+    "gcc", "python3-dev",
+]
+DOCKERFILE_EXTRA = """
+COPY $MODULE_DIR/src ./src/
+"""
\ No newline at end of file
diff --git a/common_py/requirements.txt b/common_py/requirements.txt
index dd82d0575b3a7fc340cc65ee5a96ada9f2580c4c..c5c985edd26eaca5fc05162d63375b3310bd8a61 100644
--- a/common_py/requirements.txt
+++ b/common_py/requirements.txt
@@ -1,8 +1,8 @@
-# The common Py Dependencies (Versions picked in May 2024)
+# The common Py Dependencies (Versions picked in January 2025)
 
-psycopg[c]==3.1.19
-sqlalchemy==2.0.31
+psycopg[c]==3.2.4
+sqlalchemy==2.0.37
 
 # required for testing
-coverage==7.5.1
-pylint==3.2.0
+coverage==7.6.10
+pylint==3.3.4
\ No newline at end of file
diff --git a/dev/install_requirements.sh b/dev/install_requirements.sh
new file mode 100755
index 0000000000000000000000000000000000000000..91193258133baf24a65b36aebd330a9e2b368857
--- /dev/null
+++ b/dev/install_requirements.sh
@@ -0,0 +1,2 @@
+#!/bin/bash
+pip install -r ../common_py/requirements.txt -r ../api/extra_requirements.txt -r ../job_controller/extra_requirements.txt
diff --git a/api/docker-compose.yaml b/docker-compose.yaml
similarity index 54%
rename from api/docker-compose.yaml
rename to docker-compose.yaml
index 072035d495718d9c2ce1b2449ca97317db51f1df..b792ee86933ca4a4693884d837d8db39209e8c15 100644
--- a/api/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -1,10 +1,10 @@
 services:
-  videoag_api:
-    build: .
+  api:
+    build:
+      context: .
+      dockerfile: .dockerfiles/api
     ports:
       - 5000:5000
-    volumes:
-      - ./:/code
     extra_hosts:
       host.docker.internal: host-gateway
     environment:
@@ -12,22 +12,39 @@ services:
       - VIDEOAG_API_LIVE_CONFIG=../config/live_config.json
       - VIDEOAG_UWSGI_CONFIG=/code/config/uwsgi_example.ini
     depends_on:
-      database:
+      db:
+        condition: service_healthy
+
+  api-test:
+    profiles: ["test"]
+    build:
+      context: .
+      dockerfile: .dockerfiles/api
+    extra_hosts:
+      host.docker.internal: host-gateway
+    environment:
+      - VIDEOAG_CONFIG=../config/api_example_config.py
+    volumes:
+      - ./api/coverage/:/code/coverage
+    command: -test
+    depends_on:
+      db:
         condition: service_healthy
-  database:
+
+  db:
     image: postgres:16-alpine
     environment:
       - POSTGRES_USER=videoag
       - POSTGRES_PASSWORD=videoag
     ports:
-      - "9343:5432"
+      - "5432:5432"
     volumes:
-      - .data/:/var/lib/postgresql/data
+      - db-data:/var/lib/postgresql/data
     healthcheck:
       test: ["CMD-SHELL", "pg_isready -U videoag"]
       interval: 2s
       timeout: 5s
       retries: 5
+
 volumes:
-  database:
-    driver: local
+  db-data:
diff --git a/generate_ci_pipeline.py b/generate_ci_pipeline.py
deleted file mode 100644
index f8b6f9792c9eec247e9521e4f60b47e9eb6c2d36..0000000000000000000000000000000000000000
--- a/generate_ci_pipeline.py
+++ /dev/null
@@ -1,234 +0,0 @@
-import os
-import re
-import traceback
-from dataclasses import dataclass
-from pathlib import Path
-
-
-# This file dynamically generates a GitLab CI pipeline. This is needed since the GitLab CI is not quite flexible enough
-# to handle the different "development" and "production" builds and to automatically build all the jobs.
-# For development (everything that is not a tag pipeline whose tag starts with v) the images are put into the registry
-# with the "development" prefix. For production the images are put into the registry with the "production" prefix and
-# that prefix is write-protected, so only maintainers (or pipelines triggered by them) can push new images to production.
-# Kubernetes directly uses those images from the registry.
-
-
-class BuildContext:
-    
-    def __init__(self, commit_sha: str, commit_tag: str or None):
-        super().__init__()
-        self.commit_sha = commit_sha
-        self.commit_tag = commit_tag
-        self.is_production = commit_tag is not None and re.fullmatch("v.*", commit_tag) is not None
-        self.targets: dict[str, "ImageTarget"] = {}
-    
-    def env_type(self) -> str:
-        return "production" if self.is_production else "development"
-    
-    def add_image_target(self,
-                         image_name: str,
-                         dependency_targets: list["ImageTarget"],
-                         context_sub_path: str,
-                         only_intermediate: bool
-                         ) -> "ImageTarget":
-        target = ImageTarget(
-            self,
-            image_name,
-            dependency_targets,
-            context_sub_path,
-            only_intermediate
-        )
-        self.targets[target.image_name] = target
-        return target
-
-
-@dataclass
-class ImageTarget:
-    context: BuildContext
-    image_name: str
-    dependency_targets: list["ImageTarget" or str]
-    context_sub_path: str
-    only_intermediate: bool
-    
-    def full_name(self):
-        return f"{self.context.env_type()}_{self.image_name}"
-    
-    def versioned_full_name(self):
-        return f"{self.full_name()}:{self.context.commit_sha}"
-    
-    def build_job_name(self):
-        return f"build-{self.image_name}"
-    
-    def deploy_job_name(self):
-        return f"deploy-{self.image_name}"
-    
-    def _validate(self):
-        for i in range(len(self.dependency_targets)):
-            target = self.dependency_targets[i]
-            if isinstance(target, ImageTarget):
-                if target not in self.context.targets.values():
-                    raise ValueError(f"Unknown target {target.image_name} (object not in context)")
-            else:
-                assert isinstance(target, str)
-                if target not in self.context.targets:
-                    raise ValueError(f"Unknown target {target}")
-                self.dependency_targets[i] = self.context.targets[target]
-    
-    def gen_jobs(self):
-        self._validate()
-        return self._gen_build_job() + self._gen_deploy_job()
-    
-    @staticmethod
-    def _get_auth_echo():
-        return """\
-echo "{\\"auths\\":{\\"$CI_REGISTRY\\":{\\"username\\":\\"$CI_REGISTRY_USER\\",\\"password\\":\\"$CI_REGISTRY_PASSWORD\\"}}}" > /kaniko/.docker/config.json\
-"""
-    
-    def _gen_build_job(self):
-        kaniko_args = [
-            f"--context=git://git.fsmpi.rwth-aachen.de/videoag/backend.git#{self.context.commit_sha}",
-            f"--context-sub-path={self.context_sub_path}",
-            f"--git recurse-submodules=true",
-            f"--destination=$CI_REGISTRY_IMAGE/{self.versioned_full_name()}",
-            f"--build-arg=GIT_COMMIT_SHA={self.context.commit_sha}",
-            f"--build-arg=ENV_TYPE={self.context.env_type()}",
-            f"--cache=true",
-        ]
-        
-        if self.context.commit_tag is not None:
-            kaniko_args.append(f"--build-arg=GIT_COMMIT_TAG={self.context.commit_tag}")
-        
-        return f"""
-{self.build_job_name()}:
-    stage: build-and-test
-    needs: [{",".join(t.build_job_name() for t in self.dependency_targets)}]
-    timeout: 1h
-    image:
-        name: gcr.io/kaniko-project/executor:v1.23.2-debug
-        entrypoint: [""]
-    script:
-        - {self._get_auth_echo()}
-        - echo {self.context.commit_sha}
-        - >-
-            /kaniko/executor
-            {"\n            ".join(kaniko_args)}
-"""
-    
-    def _gen_deploy_job(self):
-        if self.only_intermediate:
-            return ""
-        
-        destination_args = [
-            f"--destination=$CI_REGISTRY_IMAGE/{self.full_name()}:latest"
-        ]
-        if self.context.is_production:
-            destination_args.append(f"--destination=$CI_REGISTRY_IMAGE/{self.full_name()}:{self.context.commit_tag}")
-        
-        return f"""
-{self.deploy_job_name()}:
-    stage: deploy
-    timeout: 1h
-    image:
-        name: gcr.io/kaniko-project/executor:v1.23.2-debug
-        entrypoint: [""]
-    script:
-        - {self._get_auth_echo()}
-        - mkdir /workdir
-        - echo "FROM registry.git.fsmpi.rwth-aachen.de/videoag/backend/{self.versioned_full_name()} > /workdir/Dockerfile"
-        - echo {self.context.commit_sha}
-        - >-
-            /kaniko/executor
-            --context=dir:///workdir
-            {"\n            ".join(destination_args)}
-"""
-
-
-def gen_test_api(context: BuildContext) -> str:
-    return f"""
-run-api-tests:
-    stage: build-and-test
-    needs: [build-api]
-    timeout: 30m
-    variables:
-        VIDEOAG_CONFIG: /code/config/api_example_config.py
-    image:
-        name: registry.git.fsmpi.rwth-aachen.de/videoag/backend/{context.env_type()}_api:{context.commit_sha}
-        entrypoint: [""]
-    script:
-        - cd /code
-        - /code/docker_start.sh -test
-    artifacts:
-        paths:
-            - /code/coverage/report.txt
-            - /code/coverage/html/*
-    services:
-        - name: postgres:17
-          alias: ci-database
-          variables:
-              POSTGRES_USER: videoagtest
-              POSTGRES_PASSWORD: LetMeTest...
-
-"""
-
-
-def gen_pipeline(context: BuildContext) -> str:
-    pipeline = """
-stages:
-    - build-and-test
-    - deploy
-"""
-    
-    for target in context.targets.values():
-        pipeline += target.gen_jobs()
-    
-    pipeline += gen_test_api(context)
-    
-    return pipeline
-
-
-def main():
-    commit_sha = os.environ["CI_COMMIT_SHA"]
-    if not isinstance(commit_sha, str) or commit_sha == "":
-        raise ValueError("Empty or invalid commit sha")
-    
-    commit_tag = os.environ.get("CI_COMMIT_TAG", None)
-    if commit_tag == "":
-        commit_tag = None
-    if commit_tag is not None and not isinstance(commit_tag, str):
-        raise ValueError("Invalid commit tag")
-    
-    context = BuildContext(commit_sha, commit_tag)
-    
-    common_py = context.add_image_target("common_py", [], "common_py/", True)
-    context.add_image_target("api", [common_py], "api/", False)
-    context.add_image_target("job_controller", [common_py], "job_controller/", False)
-    
-    for job_dir in Path("job_controller/jobs").iterdir():
-        assert isinstance(job_dir, Path)
-        job_name = job_dir.name
-        
-        job_dir = Path("job_controller/jobs").joinpath(job_name)
-        dockerfile = job_dir.joinpath("Dockerfile")
-        pattern = "FROM registry\\.git\\.fsmpi\\.rwth-aachen\\.de\\/videoag\\/backend/\\$\\{ENV_TYPE}_([a-zA-Z0-9-_]+):\\$\\{GIT_COMMIT_SHA}"
-        matches = re.findall(pattern, dockerfile.read_text())
-        if len(matches) != 1:
-            raise Exception(f"{dockerfile}: Unable to determine base image for pipeline dependencies. Cannot find"
-                            f"special FROM instruction (Or found multiple) (See other job's images)")
-        base_image_name = matches[0]
-        
-        context.add_image_target(
-            f"job_{job_name}",
-            [base_image_name],
-            f"job_controller/jobs/{job_dir.name}",
-            not job_dir.joinpath("metadata.json").exists()
-        )
-    
-    Path("child-pipeline.yml").write_text(gen_pipeline(context))
-
-
-if __name__ == "__main__":
-    try:
-        main()
-    except Exception as e:
-        traceback.print_exception(e)
-        exit(-1)
diff --git a/generate_dockerfiles.sh b/generate_dockerfiles.sh
new file mode 100755
index 0000000000000000000000000000000000000000..49dd60c6eeb117125fc1f322a7bb5c5692985b58
--- /dev/null
+++ b/generate_dockerfiles.sh
@@ -0,0 +1,2 @@
+#!/bin/bash
+python build_pipeline_generator.py api job_controller $(find job_controller/jobs/ -mindepth 1 -maxdepth 1)
diff --git a/job_controller/Dockerfile b/job_controller/Dockerfile
deleted file mode 100644
index b101e64d7566295e008df555c9e08d47be685c22..0000000000000000000000000000000000000000
--- a/job_controller/Dockerfile
+++ /dev/null
@@ -1,29 +0,0 @@
-# Can be "development" or "production"
-ARG ENV_TYPE
-ARG GIT_COMMIT_SHA
-FROM registry.git.fsmpi.rwth-aachen.de/videoag/backend/${ENV_TYPE}_common_py:${GIT_COMMIT_SHA}
-
-# Empty by default
-ARG GIT_COMMIT_TAG=
-
-ENV VIDEOAG_JOB_CONTROLLER_GIT_COMMIT_HASH $GIT_COMMIT_SHA
-ENV VIDEOAG_JOB_CONTROLLER_GIT_COMMIT_TAG $GIT_COMMIT_TAG
-
-COPY extra_requirements.txt ./
-RUN pip install -r extra_requirements.txt
-
-COPY jobs ./jobs/
-
-# The source has a symlink file at src/videoag_common
-# The actual files are already in the image at src/videoag_common
-# So we move the actual files temporarily, copy the src directory, remove the symlink and move the actual files back
-# In the future, COPY --exclude src/videoag_common might work (but right now it doesn't, some "failed to compute cache key")
-RUN mv src/videoag_common src/.temp
-COPY src/ ./src/
-RUN rm src/videoag_common
-RUN mv src/.temp src/videoag_common
-
-
-WORKDIR src
-
-CMD ["python", "run.py"]
diff --git a/job_controller/build_config.py b/job_controller/build_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..83718b17c46a7039c44595fe4039ee4481ab12b6
--- /dev/null
+++ b/job_controller/build_config.py
@@ -0,0 +1,12 @@
+TARGET_IMAGE_NAME = "job_controller"
+BUILD_DEPENDENCIES = ["../common_py/"]
+
+PIP_REQUIREMENTS_FILE = "requirements.txt"
+DOCKERFILE_EXTRA = """
+COPY $MODULE_DIR/src ./src/
+COPY $MODULE_DIR/jobs/ ./jobs/
+
+WORKDIR src
+
+CMD ["python", "run.py"]
+"""
\ No newline at end of file
diff --git a/job_controller/extra_requirements.txt b/job_controller/extra_requirements.txt
deleted file mode 100644
index 1f048bb3fc1b068454acc8d9088349fa4a12a851..0000000000000000000000000000000000000000
--- a/job_controller/extra_requirements.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-requests
-kubernetes==27.2.0
diff --git a/job_controller/jobs/base/Dockerfile b/job_controller/jobs/base/Dockerfile
deleted file mode 100644
index 739fd186e059c630e993921f47d993c851878ad4..0000000000000000000000000000000000000000
--- a/job_controller/jobs/base/Dockerfile
+++ /dev/null
@@ -1,10 +0,0 @@
-# Can be "development" or "production"
-ARG ENV_TYPE
-ARG GIT_COMMIT_SHA
-FROM registry.git.fsmpi.rwth-aachen.de/videoag/backend/${ENV_TYPE}_common_py:${GIT_COMMIT_SHA}
-
-COPY . src/
-
-WORKDIR src/
-
-ENTRYPOINT ["python", "run.py"]
diff --git a/job_controller/jobs/base/build_config.py b/job_controller/jobs/base/build_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..ef232b9719f00b8cfa9bd4bc7d129f22bf2ff11d
--- /dev/null
+++ b/job_controller/jobs/base/build_config.py
@@ -0,0 +1,7 @@
+BUILD_DEPENDENCIES = ["../../../common_py/"]
+
+DOCKERFILE_EXTRA = """
+COPY $MODULE_DIR/ ./src/
+
+CMD ["python", "run.py"]
+"""
\ No newline at end of file
diff --git a/job_controller/jobs/ffmpeg_base/Dockerfile b/job_controller/jobs/ffmpeg_base/Dockerfile
deleted file mode 100644
index 3817a9e873f647af6531099cbbd0eac8362efe98..0000000000000000000000000000000000000000
--- a/job_controller/jobs/ffmpeg_base/Dockerfile
+++ /dev/null
@@ -1,9 +0,0 @@
-# We have this image to avoid installing ffmpeg multiple times
-
-# Can be "development" or "production"
-ARG ENV_TYPE
-ARG GIT_COMMIT_SHA
-FROM registry.git.fsmpi.rwth-aachen.de/videoag/backend/${ENV_TYPE}_job_base:${GIT_COMMIT_SHA}
-
-RUN apt-get update && apt-get install -y ffmpeg
-
diff --git a/job_controller/jobs/media_process_scheduler/Dockerfile b/job_controller/jobs/media_process_scheduler/Dockerfile
deleted file mode 100644
index 62c1502bf773250f8201fc3d16b6b52140549072..0000000000000000000000000000000000000000
--- a/job_controller/jobs/media_process_scheduler/Dockerfile
+++ /dev/null
@@ -1,6 +0,0 @@
-# Can be "development" or "production"
-ARG ENV_TYPE
-ARG GIT_COMMIT_SHA
-FROM registry.git.fsmpi.rwth-aachen.de/videoag/backend/${ENV_TYPE}_job_base:${GIT_COMMIT_SHA}
-
-COPY . .
diff --git a/job_controller/jobs/media_process_scheduler/build_config.py b/job_controller/jobs/media_process_scheduler/build_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..e73ed5960bc7ac739b51ee252e3f074f3a11b9aa
--- /dev/null
+++ b/job_controller/jobs/media_process_scheduler/build_config.py
@@ -0,0 +1,6 @@
+TARGET_IMAGE_NAME = "job_media_process_scheduler"
+BUILD_DEPENDENCIES = ["../base/"]
+
+DOCKERFILE_EXTRA = """
+COPY $MODULE_DIR/job.py ./src/
+"""
\ No newline at end of file
diff --git a/job_controller/jobs/sample_thumbnail/Dockerfile b/job_controller/jobs/sample_thumbnail/Dockerfile
deleted file mode 100644
index 4a7248d245c12cf539aec8288fb1f1024a259aaa..0000000000000000000000000000000000000000
--- a/job_controller/jobs/sample_thumbnail/Dockerfile
+++ /dev/null
@@ -1,6 +0,0 @@
-# Can be "development" or "production"
-ARG ENV_TYPE
-ARG GIT_COMMIT_SHA
-FROM registry.git.fsmpi.rwth-aachen.de/videoag/backend/${ENV_TYPE}_job_ffmpeg_base:${GIT_COMMIT_SHA}
-
-COPY . /app/src
diff --git a/job_controller/jobs/sample_thumbnail/build_config.py b/job_controller/jobs/sample_thumbnail/build_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..d3d83b260044ce6b52d23831f34b16ee09fdde5d
--- /dev/null
+++ b/job_controller/jobs/sample_thumbnail/build_config.py
@@ -0,0 +1,7 @@
+TARGET_IMAGE_NAME = "job_sample_thumbnail"
+BUILD_DEPENDENCIES = ["../base/"]
+
+APT_RUNTIME_DEPENDENCIES = ["ffmpeg"]
+DOCKERFILE_EXTRA = """
+COPY $MODULE_DIR/job.py ./src/
+"""
\ No newline at end of file
diff --git a/job_controller/jobs/source_file_sorter/Dockerfile b/job_controller/jobs/source_file_sorter/Dockerfile
deleted file mode 100644
index 4a7248d245c12cf539aec8288fb1f1024a259aaa..0000000000000000000000000000000000000000
--- a/job_controller/jobs/source_file_sorter/Dockerfile
+++ /dev/null
@@ -1,6 +0,0 @@
-# Can be "development" or "production"
-ARG ENV_TYPE
-ARG GIT_COMMIT_SHA
-FROM registry.git.fsmpi.rwth-aachen.de/videoag/backend/${ENV_TYPE}_job_ffmpeg_base:${GIT_COMMIT_SHA}
-
-COPY . /app/src
diff --git a/job_controller/jobs/source_file_sorter/build_config.py b/job_controller/jobs/source_file_sorter/build_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..14c9d521d6af7af32b7f00486841e7e0eca0658b
--- /dev/null
+++ b/job_controller/jobs/source_file_sorter/build_config.py
@@ -0,0 +1,7 @@
+TARGET_IMAGE_NAME = "job_source_file_sorter"
+BUILD_DEPENDENCIES = ["../base/"]
+
+APT_RUNTIME_DEPENDENCIES = ["ffmpeg"]
+DOCKERFILE_EXTRA = """
+COPY $MODULE_DIR/job.py ./src/
+"""
\ No newline at end of file
diff --git a/job_controller/requirements.txt b/job_controller/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..a60cc87931a5ef4d8c171ce479f9ab8c876b9291
--- /dev/null
+++ b/job_controller/requirements.txt
@@ -0,0 +1,5 @@
+# The Job Controller Dependencies (Versions picked in February 2025)
+# Note that this does NOT include the common_py dependencies!
+
+requests==2.32.3
+kubernetes==32.0.1