diff --git a/.gitignore b/.gitignore
index c45b68fee4b24637ad479902870e07ddc6066f21..2716dd5328320636fbd1a575c4e99b0c471b4f78 100644
--- a/.gitignore
+++ b/.gitignore
@@ -355,4 +355,12 @@ poetry.toml
 # LSP config files
 pyrightconfig.json
 
-# End of https://www.toptal.com/developers/gitignore/api/jetbrains,python,pycharm+all,jupyternotebooks
\ No newline at end of file
+# End of https://www.toptal.com/developers/gitignore/api/jetbrains,python,pycharm+all,jupyternotebooks
+
+# helm build artifacts
+.tgz
+
+
+.maed
+.zip
+.sqlite
\ No newline at end of file
diff --git a/.idea/superset-mitm-service.iml b/.idea/superset-mitm-service.iml
index 1b52fbbcf02bf1e98b69b7d254edbdb9f307a4e3..f8f3d905955451af5409069ea036cafc3baed028 100644
--- a/.idea/superset-mitm-service.iml
+++ b/.idea/superset-mitm-service.iml
@@ -3,8 +3,9 @@
   <component name="NewModuleRootManager">
     <content url="file://$MODULE_DIR$">
       <sourceFolder url="file://$MODULE_DIR$/test" isTestSource="true" />
+      <excludeFolder url="file://$MODULE_DIR$/.venv" />
     </content>
-    <orderEntry type="jdk" jdkName="Poetry (superset-mitm-service)" jdkType="Python SDK" />
+    <orderEntry type="jdk" jdkName="uv (superset-mitm-service)" jdkType="Python SDK" />
     <orderEntry type="sourceFolder" forTests="false" />
   </component>
 </module>
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
index c68aeb1506959a21a88b661bb7b9dc97972136cd..5af1cfaf30e6504d5e0e7d9a311f0db222e5ef77 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -28,6 +28,6 @@ RUN mkdir /code/db
 # CMD ["python", "-m", "/code/app"]
 # escape=\
 # apparently allergic to env vars "\$API_PORT", "--root-path", "\$API_PREFIX"]
-CMD "/code/startup.sh"
+CMD ["/code/startup.sh"]
 # CMD ["sh", "-c", "fastapi run app/main.py"]
 # CMD "fastapi run app/main.py --port $API_PORT"
\ No newline at end of file
diff --git a/app/db/adapters.py b/app/db/adapters.py
index 2fdc0d107617fa67d4ffc6d9f2e6f94beee0d5b7..20a1af967e581766ddb05a62aafb37262ae500c8 100644
--- a/app/db/adapters.py
+++ b/app/db/adapters.py
@@ -11,7 +11,7 @@ class PydanticType(sa.types.TypeDecorator, Generic[T]):
     """Pydantic type.
     SAVING:
     - Uses SQLAlchemy JSON type under the hood.
-    - Acceps the pydantic model and converts it to a dict on save.
+    - Accepts the pydantic model and converts it to a dict on save.
     - SQLAlchemy engine JSON-encodes the dict to a string.
     RETRIEVING:
     - Pulls the string from the database.
diff --git a/app/db/logic.py b/app/db/logic.py
new file mode 100644
index 0000000000000000000000000000000000000000..476280e6cecedaa0f41a64bf0fd920b6f038f4b6
--- /dev/null
+++ b/app/db/logic.py
@@ -0,0 +1,4 @@
+from app.db.models import AddTrackedMitMDataset
+from app.routes.mitm_dataset.requests import AddTrackedMitMDatasetRequest
+
+
diff --git a/app/routers/__init__.py b/app/db/migration/__init__.py
similarity index 100%
rename from app/routers/__init__.py
rename to app/db/migration/__init__.py
diff --git a/app/db/migration/migrate_schema.py b/app/db/migration/migrate_schema.py
new file mode 100644
index 0000000000000000000000000000000000000000..c756dd11520516707612ff8dfb6cf7b61bba9c09
--- /dev/null
+++ b/app/db/migration/migrate_schema.py
@@ -0,0 +1,112 @@
+import os
+import tempfile
+from alembic.config import Config
+from alembic import command
+from sqlalchemy import create_engine
+import sqlalchemy as sa
+
+DATABASE_URL = "postgresql://user:password@host:port/dbname"
+
+def run_dynamic_migration(sa_url: sa.URL,target_metadata: sa.MetaData):
+    # Create a temporary directory to hold a minimal Alembic environment
+    with tempfile.TemporaryDirectory() as temp_dir:
+        # Create a temporary alembic.ini file
+        alembic_ini_path = os.path.join(temp_dir, "alembic.ini")
+        with open(alembic_ini_path, "w") as f:
+            f.write(f"""
+[alembic]
+script_location = {temp_dir}/migrations
+sqlalchemy.url = {sa_url.render_as_string(hide_password=False)}
+
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+propagate = 0
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+propagate = 0
+""")
+        # Set up a minimal migrations directory with an env.py
+        migrations_dir = os.path.join(temp_dir, "migrations")
+        os.makedirs(migrations_dir, exist_ok=True)
+        versions_dir = os.path.join(migrations_dir, "versions")
+        os.makedirs(versions_dir, exist_ok=True)
+
+        # Create an env.py file that tells Alembic about your target metadata
+        env_py_path = os.path.join(migrations_dir, "env.py")
+        with open(env_py_path, "w") as f:
+            f.write(f"""
+from alembic import context
+from sqlalchemy import engine_from_config, pool
+from logging.config import fileConfig
+import sys
+import os
+sys.path.insert(0, os.getcwd())
+
+# Import your target metadata (adjust the import as necessary)
+from app.db.models import SQLModel
+target_metadata = SQLModel.metadata
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+fileConfig(config.config_file_name)
+
+target_metadata = target_metadata
+
+def run_migrations_offline():
+    url = config.get_main_option("sqlalchemy.url")
+    context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
+    with context.begin_transaction():
+        context.run_migrations()
+
+def run_migrations_online():
+    connectable = engine_from_config(
+        config.get_section(config.config_ini_section),
+        prefix="sqlalchemy.",
+        poolclass=pool.NullPool)
+    with connectable.connect() as connection:
+        context.configure(connection=connection, target_metadata=target_metadata)
+        with context.begin_transaction():
+            context.run_migrations()
+
+if context.is_offline_mode():
+    run_migrations_offline()
+else:
+    run_migrations_online()
+""")
+        # Create an Alembic configuration object
+        alembic_cfg = Config(alembic_ini_path)
+        # Set the target metadata programmatically
+        alembic_cfg.attributes['target_metadata'] = target_metadata
+
+        # Autogenerate a new revision (i.e. compute the diff)
+        # Note: This creates a new revision file under the temporary migrations/versions folder.
+        command.revision(alembic_cfg, message="Dynamic migration", autogenerate=True)
+
+        # Apply the migration to upgrade the DB schema to head (i.e. the newly generated revision)
+        command.upgrade(alembic_cfg, "head")
+
+# Call the function at runtime when needed
+if __name__ == "__main__":
+    engine = create_engine(DATABASE_URL)
+    # Optionally, you might verify connectivity here.
+    run_dynamic_migration()
diff --git a/app/db/models.py b/app/db/models.py
index 6dc3dd28018980af1e9f15b7d5f4062a56e40ec9..c1182252a3e47d9809d2519250673d0a117e23c7 100644
--- a/app/db/models.py
+++ b/app/db/models.py
@@ -1,14 +1,18 @@
 import uuid
+from datetime import datetime
 from uuid import UUID
 
 import pydantic
+import sqlmodel
 from mitm_tooling.representation import Header, SQLRepresentationSchema, mk_sql_rep_schema
-from mitm_tooling.transformation.superset.common import SupersetDBConnectionInfo
-from mitm_tooling.transformation.superset.definitions.mitm_dataset import MitMDatasetIdentifier
+from mitm_tooling.transformation.superset.asset_bundles import MitMDatasetIdentifierBundle, DatasourceIdentifierBundle
+from mitm_tooling.transformation.superset.common import DBConnectionInfo
+from mitm_tooling.transformation.superset.definitions.mitm_dataset import MitMDatasetIdentifier, SupersetMitMDatasetDef
 from pydantic import AnyUrl, BaseModel
 from sqlmodel import SQLModel, Field
 
 from app.db.adapters import PydanticType, StrType
+from app.utils.response_utils import FromBaseMixin
 
 APPLICATION_DB_SCHEMA = 'main'  # 'APPLICATION_DB'
 
@@ -20,7 +24,8 @@ class AddTrackedMitMDataset(BaseModel):
     sql_alchemy_uri: AnyUrl
     mitm_header: Header
 
-class TrackedMitMDataset(SQLModel, table=True):
+
+class TrackedMitMDataset(FromBaseMixin, AddTrackedMitMDataset, SQLModel, table=True):
     model_config = pydantic.ConfigDict(arbitrary_types_allowed=True)
     __tablename__ = 'uploaded_mitm_datasets'
     __table_args__ = {'schema': APPLICATION_DB_SCHEMA}
@@ -29,18 +34,26 @@ class TrackedMitMDataset(SQLModel, table=True):
     uuid: UUID = Field(primary_key=True, default_factory=uuid.uuid4)
     dataset_name: str = Field()
     schema_name: str = Field()
-    is_local: bool = Field(default=True, nullable=False)
     sql_alchemy_uri: AnyUrl = Field(sa_type=StrType.wrap(AnyUrl))
+    is_managed_locally: bool = Field(default=True)
+    last_edited: datetime = Field(sa_type=sqlmodel.DateTime, default_factory=datetime.now)
+
     mitm_header: Header = Field(sa_type=PydanticType.wrap(Header), repr=False)
+    identifier_bundle: MitMDatasetIdentifierBundle = Field(sa_type=PydanticType.wrap(MitMDatasetIdentifierBundle), repr=False)
 
     @property
     def identifier(self) -> MitMDatasetIdentifier:
-        return MitMDatasetIdentifier(dataset_name=self.dataset_name, uuid=self.uuid)
+        return self.identifier_bundle.mitm_dataset
 
     @property
-    def superset_connection_info(self) -> SupersetDBConnectionInfo:
-        return SupersetDBConnectionInfo(sql_alchemy_uri=self.sql_alchemy_uri, schema_name=self.schema_name)
+    def datasource_identifiers(self) -> DatasourceIdentifierBundle:
+        return self.identifier_bundle
+
+    @property
+    def superset_connection_info(self) -> DBConnectionInfo:
+        return DBConnectionInfo(sql_alchemy_uri=self.sql_alchemy_uri, schema_name=self.schema_name)
 
     @property
     def sql_rep_schema(self) -> SQLRepresentationSchema:
         return mk_sql_rep_schema(self.mitm_header, override_schema=self.schema_name)
+
diff --git a/app/db/setup.py b/app/db/setup.py
index 95abed8e48c714ebb2742f0741a5564e10c8ec37..81ada95e434d397cb0b1f861a12b77e87fb5416b 100644
--- a/app/db/setup.py
+++ b/app/db/setup.py
@@ -2,7 +2,7 @@ import logging
 
 import sqlalchemy as sa
 from mitm_tooling.utilities.python_utils import pick_from_mapping
-from sqlalchemy import create_engine, inspect
+from sqlalchemy import create_engine, inspect, Engine
 from sqlalchemy.orm import Session
 
 from .utils import create_schema
@@ -28,5 +28,4 @@ def init_db():
             conn.commit()
         SQLModel.metadata.create_all(conn, checkfirst=True)
         conn.commit()
-   # with Session(engine) as session:
-
+   # with Session(engine) as session:
\ No newline at end of file
diff --git a/app/db/utils.py b/app/db/utils.py
index 300acd5b2fa196c7adea488a508dd1497e1d7868..269b425375e11fe0d02e2d55c75a4326dd7dcfd1 100644
--- a/app/db/utils.py
+++ b/app/db/utils.py
@@ -11,12 +11,12 @@ from .models import TrackedMitMDataset
 
 
 def infer_uploaded_mitm_dataset_schema(engine: sa.Engine, mitm_dataset_uuid: UUID) -> DBMetaInfo | None:
-    upload_info = None
     with sqlmodel.Session(engine) as session:
-        upload_info = session.get(TrackedMitMDataset, (mitm_dataset_uuid,))
-    if upload_info is not None:
-        meta, _ = connect_and_reflect(engine, allowed_schemas={upload_info.schema_name})
-        return DBMetaInfo.from_sa_meta(meta, default_schema=upload_info.schema_name)
+        model = session.get(TrackedMitMDataset, (mitm_dataset_uuid,))
+        if model is not None:
+            model.datasource_identifiers
+            meta, _ = connect_and_reflect(engine, allowed_schemas={model.schema_name})
+            return DBMetaInfo.from_sa_meta(meta, default_schema=model.schema_name)
 
 
 def create_schema(conn_or_sess: sa.Connection | Session, unique_schema_name: str) -> None:
@@ -25,3 +25,4 @@ def create_schema(conn_or_sess: sa.Connection | Session, unique_schema_name: str
         conn_or_sess.execute(sa.text(f"ATTACH DATABASE ':memory:' AS {unique_schema_name}"))
     else:
         conn_or_sess.execute(CreateSchema(unique_schema_name, if_not_exists=False))
+
diff --git a/app/routes/definitions/generate.py b/app/routes/definitions/generate.py
new file mode 100644
index 0000000000000000000000000000000000000000..3437db8077f5fe53f7a4908c2f4390174f111da7
--- /dev/null
+++ b/app/routes/definitions/generate.py
@@ -0,0 +1,31 @@
+from mitm_tooling.representation import Header
+from mitm_tooling.transformation.superset import mk_superset_mitm_dataset_bundle, MAEDVisualizationType, \
+    mk_superset_datasource_bundle
+from mitm_tooling.transformation.superset.asset_bundles import MitMDatasetIdentifierBundle, SupersetMitMDatasetBundle, \
+    SupersetDatasourceBundle
+from mitm_tooling.transformation.superset.common import DBConnectionInfo
+
+
+def mk_datasource_bundle(mitm_header: Header,
+                               db_conn_info: DBConnectionInfo,
+                               identifiers: MitMDatasetIdentifierBundle | None = None,
+                               ) -> SupersetDatasourceBundle:
+    datasource_bundle = mk_superset_datasource_bundle(mitm_header,
+                                                      db_conn_info,
+                                                      identifiers)
+    return datasource_bundle
+
+
+def mk_mitm_dataset_bundle(mitm_header: Header,
+                                 db_conn_info: DBConnectionInfo,
+                                 dataset_name: str,
+                                 identifiers: MitMDatasetIdentifierBundle | None = None,
+                                 include_visualizations: bool = False) -> SupersetMitMDatasetBundle:
+    mitm_dataset_bundle = mk_superset_mitm_dataset_bundle(mitm_header,
+                                                          db_conn_info,
+                                                          dataset_name,
+                                                          identifiers,
+                                                          visualization_types=(
+                                                              [
+                                                                  MAEDVisualizationType.Baseline] if include_visualizations else None))
+    return mitm_dataset_bundle
diff --git a/app/routes/definitions/requests.py b/app/routes/definitions/requests.py
index 08cf67ebb9afa5d1e5285d27e97cfa750d47d0e3..875c630ed4b66b19d522ee45dcbf9603d95442ea 100644
--- a/app/routes/definitions/requests.py
+++ b/app/routes/definitions/requests.py
@@ -1,10 +1,13 @@
 import pydantic
 from mitm_tooling.representation import Header
-from mitm_tooling.transformation.superset.common import SupersetDBConnectionInfo
+from mitm_tooling.transformation.superset.asset_bundles import MitMDatasetIdentifierBundle
+from mitm_tooling.transformation.superset.common import DBConnectionInfo
+from mitm_tooling.transformation.superset.definitions import StrUUID
 from mitm_tooling.transformation.superset.definitions.mitm_dataset import MitMDatasetIdentifier
 
 
 class GenerateMitMDatasetDefinitionRequest(pydantic.BaseModel):
+    dataset_name: str
     mitm_header: Header
-    dataset_identifier: MitMDatasetIdentifier
-    db_conn_info: SupersetDBConnectionInfo
+    db_conn_info: DBConnectionInfo
+    identifiers: MitMDatasetIdentifierBundle | None = None
diff --git a/app/routes/definitions/responses.py b/app/routes/definitions/responses.py
index 1869bc23da2f9aa5918a2bd468c62c110e5309df..96e466ace046a2d8231254f7686818d1ceb30ae6 100644
--- a/app/routes/definitions/responses.py
+++ b/app/routes/definitions/responses.py
@@ -1,8 +1,13 @@
+from typing import Self
+
 from mitm_tooling.transformation.superset.definitions import SupersetMitMDatasetImport
-from mitm_tooling.transformation.superset.definition_bundles import SupersetMitMDatasetBundle
+from mitm_tooling.transformation.superset.asset_bundles import SupersetMitMDatasetBundle
+
+from app.utils.response_utils import FromBaseMixin
+
 
-class MitMDatasetBundleResponse(SupersetMitMDatasetBundle):
+class MitMDatasetBundleResponse(FromBaseMixin, SupersetMitMDatasetBundle):
     pass
 
-class MitMDatasetImportResponse(SupersetMitMDatasetImport):
+class MitMDatasetImportResponse(FromBaseMixin, SupersetMitMDatasetImport):
     pass
\ No newline at end of file
diff --git a/app/routes/definitions/router.py b/app/routes/definitions/router.py
index e0678aefc7b9f2d6e25aebc73914531bc88c104d..491e57be758f8b164acc661ee554c8684361889a 100644
--- a/app/routes/definitions/router.py
+++ b/app/routes/definitions/router.py
@@ -8,30 +8,28 @@ from starlette.responses import StreamingResponse
 from app.dependencies.orm import TrackedMitMDatasetDependency
 from app.routes.definitions.requests import GenerateMitMDatasetDefinitionRequest
 from app.routes.definitions.responses import MitMDatasetBundleResponse, MitMDatasetImportResponse
+from .generate import mk_mitm_dataset_bundle
 
 router = APIRouter(prefix='/definitions', tags=['definitions'])
 logger = logging.getLogger(__name__)
 
-from mitm_tooling.transformation.superset.interface import mk_superset_mitm_dataset_bundle, \
-    mk_superset_visualization_bundle
-
 
 @router.post('/mitm_dataset')
 def generate_mitm_dataset_bundle(request: GenerateMitMDatasetDefinitionRequest,
                                  include_visualizations: bool = False) -> MitMDatasetBundleResponse:
-    mitm_dataset_bundle = mk_superset_mitm_dataset_bundle(request.mitm_header, request.dataset_identifier,
-                                                          request.db_conn_info)
-    if include_visualizations:
-        mitm_dataset_bundle.visualization_bundle = mk_superset_visualization_bundle(request.mitm_header,
-                                                                                    mitm_dataset_bundle.datasource_bundle)
-    return MitMDatasetBundleResponse(**mitm_dataset_bundle.model_dump())
+    mitm_dataset_bundle = mk_mitm_dataset_bundle(request.mitm_header,
+                                                 request.db_conn_info,
+                                                 request.dataset_name,
+                                                 identifiers=request.identifiers,
+                                                 include_visualizations=include_visualizations)
+    return MitMDatasetBundleResponse.from_base(mitm_dataset_bundle)
 
 
 @router.post('/mitm_dataset/import')
 def generate_mitm_dataset_import(request: GenerateMitMDatasetDefinitionRequest,
                                  include_visualizations: bool = False) -> MitMDatasetImportResponse:
-    return MitMDatasetImportResponse(
-        **generate_mitm_dataset_bundle(request, include_visualizations).to_import().model_dump())
+    return MitMDatasetImportResponse.from_base(generate_mitm_dataset_bundle(request,
+                                                                            include_visualizations).to_import())
 
 
 @router.post('/mitm_dataset/import/zip', response_class=StreamingResponse,
@@ -49,11 +47,11 @@ def generate_mitm_dataset_import_zip(request: GenerateMitMDatasetDefinitionReque
 def generate_tracked_mitm_dataset_bundle(tracked_dataset: TrackedMitMDatasetDependency,
                                          include_visualizations: bool = False) -> MitMDatasetBundleResponse:
     request = GenerateMitMDatasetDefinitionRequest(mitm_header=tracked_dataset.mitm_header,
-                                                   dataset_identifier=tracked_dataset.identifier,
-                                                   db_conn_info=tracked_dataset.superset_connection_info)
-    return MitMDatasetBundleResponse(
-        **generate_mitm_dataset_bundle(request,
-                                       include_visualizations=include_visualizations).model_dump())
+                                                   dataset_name=tracked_dataset.dataset_name,
+                                                   db_conn_info=tracked_dataset.superset_connection_info,
+                                                   identifiers=tracked_dataset.identifier_bundle)
+    return MitMDatasetBundleResponse.from_base(generate_mitm_dataset_bundle(request,
+                                                                            include_visualizations=include_visualizations))
 
 
 @router.get('/mitm_dataset/{uuid}/import')
@@ -61,7 +59,7 @@ def generate_tracked_mitm_dataset_import(tracked_dataset: TrackedMitMDatasetDepe
                                          include_visualizations: bool = False) -> MitMDatasetImportResponse:
     mitm_dataset_bundle = generate_tracked_mitm_dataset_bundle(tracked_dataset,
                                                                include_visualizations=include_visualizations)
-    return MitMDatasetImportResponse(**mitm_dataset_bundle.to_import().model_dump())
+    return MitMDatasetImportResponse.from_base(mitm_dataset_bundle.to_import().model_dump())
 
 
 @router.get('/mitm_dataset/{uuid}/import/zip', response_class=StreamingResponse,
@@ -74,3 +72,17 @@ def generate_tracked_mitm_dataset_import_zip(tracked_dataset: TrackedMitMDataset
     write_superset_import_as_zip(bio, assets)
     bio.seek(0)
     return StreamingResponse(bio, media_type='application/zip')
+
+
+@router.get('/mitm_dataset/{uuid}')
+def generate_tracked_mitm_dataset_bundle(tracked_dataset: TrackedMitMDatasetDependency,
+                                         include_visualizations: bool = False) -> MitMDatasetBundleResponse:
+    request = GenerateMitMDatasetDefinitionRequest(mitm_header=tracked_dataset.mitm_header,
+                                                   dataset_name=tracked_dataset.dataset_name,
+                                                   db_conn_info=tracked_dataset.superset_connection_info,
+                                                   identifiers=tracked_dataset.identifier_bundle)
+    return MitMDatasetBundleResponse.from_base(
+        generate_mitm_dataset_bundle(request,
+                                     include_visualizations=include_visualizations).model_dump())
+
+# TODO to generate "follow up" visualizations, datasource uuids have to be stored or transmitted
diff --git a/app/routes/mitm_dataset/register.py b/app/routes/mitm_dataset/register.py
new file mode 100644
index 0000000000000000000000000000000000000000..9a208167b14639ac2c39b78bfdc4b55bb252066c
--- /dev/null
+++ b/app/routes/mitm_dataset/register.py
@@ -0,0 +1,21 @@
+from mitm_tooling.transformation.superset.asset_bundles import MitMDatasetIdentifierBundle
+from mitm_tooling.transformation.superset.common import DBConnectionInfo
+from mitm_tooling.transformation.superset.definitions import MitMDatasetIdentifier
+
+from app.db.models import TrackedMitMDataset, AddTrackedMitMDataset
+from app.dependencies.db import ORMSessionDependency
+from app.routes.definitions.generate import mk_mitm_dataset_bundle
+
+
+def register_mitm_dataset(session: ORMSessionDependency, request: AddTrackedMitMDataset) -> TrackedMitMDataset:
+    db_conn_info= DBConnectionInfo(sql_alchemy_uri=request.sql_alchemy_uri, schema_name=request.schema_name)
+    identifiers = MitMDatasetIdentifierBundle(mitm_dataset=MitMDatasetIdentifier(dataset_name=request.dataset_name, uuid=request.uuid))
+    definition = mk_mitm_dataset_bundle(request.mitm_header, db_conn_info, request.dataset_name, identifiers=identifiers)
+    identifier_bundle = definition.identifiers
+
+    model = TrackedMitMDataset.from_base(request, identifier_bundle=identifier_bundle)
+
+    session.add(model)
+    session.commit()
+    session.refresh(model)
+    return model
diff --git a/app/routes/mitm_dataset/register_external.py b/app/routes/mitm_dataset/register_external.py
index 2d52f82f8c3b2bbff5abcaf6a6edca29a3cc8af0..0d794321baad779681624a346109cc1cc10e4ab8 100644
--- a/app/routes/mitm_dataset/register_external.py
+++ b/app/routes/mitm_dataset/register_external.py
@@ -11,7 +11,7 @@ from app.routes.mitm_dataset.requests import RegisterExternalMitMDatasetRequest
 from app.routes.mitm_dataset.responses import RegisterMitMResponse
 
 
-def register_mitm_dataset(
+def register_external_mitm_dataset(
         session: ORMSessionDependency,
         request: RegisterExternalMitMDatasetRequest,
 ) -> RegisterMitMResponse:
@@ -30,7 +30,7 @@ def register_mitm_dataset(
     with Session(remote_engine) as session:
         for mapping in request.mappings:
             hep, from_clause = mapping.apply(db_metas)
-            header_entries.extend(hep.from_session(session))
+            header_entries.extend(hep.apply_session(session))
             concept_table_name = mk_concept_table_name(mapping.mitm, mapping.concept)
             mapped_vvs.append(VirtualView.from_from_clause(concept_table_name, from_clause, result_meta))
 
diff --git a/app/routes/mitm_dataset/router.py b/app/routes/mitm_dataset/router.py
index 29b8bcb929a496f3fda462fc78b9ce1d13f4795d..0b02b5c2c956a5c5a1124618fa70c4583d035384 100644
--- a/app/routes/mitm_dataset/router.py
+++ b/app/routes/mitm_dataset/router.py
@@ -1,26 +1,21 @@
 import logging
-from codecs import StreamWriter
 from typing import Sequence
 
 import sqlmodel
 from fastapi import UploadFile, File, HTTPException
 from fastapi.routing import APIRouter
 from mitm_tooling.definition import MITM
-from mitm_tooling.extraction.sql.data_models import DBMetaInfo
-from mitm_tooling.io import read_zip
-from mitm_tooling.representation import mk_sql_rep_schema, insert_mitm_data
-from mitm_tooling.transformation.superset.common import name_plus_uuid
-from mitm_tooling.transformation.superset.factories.utils import mk_uuid
-from mitm_tooling.utilities.sql_utils import sa_url_into_any_url
+from mitm_tooling.utilities.identifiers import mk_uuid
+from pydantic import ValidationError
 from starlette.responses import StreamingResponse
 
 from app.db.models import TrackedMitMDataset
-from app.db.utils import create_schema
 from app.dependencies.db import DBEngineDependency, ORMSessionDependency
 from app.dependencies.orm import TrackedMitMDatasetDependency
 from .export import export_via_mapping
 from .requests import AddTrackedMitMDatasetRequest, RegisterExternalMitMDatasetRequest
 from .responses import UploadMitMResponse, RegisterMitMResponse, TrackedMitMEntry
+from .upload import upload_mitm_file
 
 router = APIRouter(prefix='/mitm_dataset', tags=['mitm_dataset'])
 logger = logging.getLogger(__name__)
@@ -32,25 +27,8 @@ def upload_mitm_dataset(
         dataset_name: str,
         mitm: MITM = MITM.MAED,
         mitm_zip: UploadFile = File(media_type='application/zip')) -> UploadMitMResponse:
-    mitm_data = read_zip(mitm_zip.file, mitm)
-
-    uuid = mk_uuid()
-    unique_schema_name = name_plus_uuid(dataset_name, uuid, sep='_')
-
-    sql_rep_schema = mk_sql_rep_schema(mitm_data.header, override_schema=unique_schema_name, view_generators=None)
-
     try:
-        with engine.connect() as connection:
-            create_schema(connection, unique_schema_name)
-            insert_mitm_data(connection, sql_rep_schema, mitm_data)
-            connection.commit()
-
-        model = TrackedMitMDataset(uuid=uuid, dataset_name=dataset_name, schema_name=unique_schema_name,
-                                   sql_alchemy_uri=sa_url_into_any_url(engine.url), mitm_header=mitm_data.header)
-        with sqlmodel.Session(engine) as session:
-            session.add(model)
-            session.commit()
-            session.refresh(model)
+        model = upload_mitm_file(mitm, mitm_zip.file, dataset_name=dataset_name, uuid=mk_uuid(), engine=engine)
 
         return UploadMitMResponse(status='success', tracked_mitm_dataset=model)
     except Exception as e:
@@ -58,15 +36,25 @@ def upload_mitm_dataset(
         raise HTTPException(500, str(e))
         # return UploadMitMResponse(status='failure', msg=str(e))
 
+
 @router.post('/register')
-def register_mitm_dataset(session: ORMSessionDependency, request: RegisterExternalMitMDatasetRequest) -> RegisterMitMResponse:
-    from .register_external import register_mitm_dataset
-    return register_mitm_dataset(session, request)
+def register_mitm_dataset(session: ORMSessionDependency,
+                          request: RegisterExternalMitMDatasetRequest) -> RegisterMitMResponse:
+    from .register_external import register_external_mitm_dataset
+    return register_external_mitm_dataset(session, request)
+
 
 @router.post('/')
-def post_mitm_dataset(session: ORMSessionDependency, new_mitm_dataset: AddTrackedMitMDatasetRequest) -> None:
-    new = TrackedMitMDataset.model_validate(**new_mitm_dataset.model_dump())
-    session.add(new)
+def post_mitm_dataset(session: ORMSessionDependency,
+                      new_mitm_dataset: AddTrackedMitMDatasetRequest) -> TrackedMitMDataset:
+    try:
+        new = TrackedMitMDataset.model_validate(**new_mitm_dataset.model_dump())
+        session.add(new)
+        session.commit()
+        session.refresh(new)
+        return new
+    except ValidationError as exc:
+        raise HTTPException(400, f'TrackedMitMDataset creation failed: {str(exc)}') from exc
 
 
 @router.get('/{uuid}')
@@ -83,10 +71,11 @@ def get_mitm_datasets(session: ORMSessionDependency) -> Sequence[TrackedMitMData
 @router.delete('/{uuid}')
 def delete_mitm_dataset(session: ORMSessionDependency, tracked_dataset: TrackedMitMDatasetDependency) -> None:
     session.delete(tracked_dataset)
+    session.commit()
 
 
 @router.post('/export/{uuid}', response_class=StreamingResponse,
-            responses={200: {'content': {'application/zip': {}}}})
+             responses={200: {'content': {'application/zip': {}}}})
 def export_mitm_dataset(engine: DBEngineDependency, tracked_dataset: TrackedMitMDatasetDependency) -> StreamingResponse:
     remote_engine, exportable = export_via_mapping(tracked_dataset)
     import sqlalchemy as sa
diff --git a/app/routes/mitm_dataset/upload.py b/app/routes/mitm_dataset/upload.py
new file mode 100644
index 0000000000000000000000000000000000000000..1ff088b3a67573d18f55328c67115b61ce3eced4
--- /dev/null
+++ b/app/routes/mitm_dataset/upload.py
@@ -0,0 +1,41 @@
+from uuid import UUID
+
+import sqlmodel
+from mitm_tooling.definition import MITM
+from mitm_tooling.io import read_zip
+from mitm_tooling.representation import mk_sql_rep_schema, MITMData, insert_mitm_data
+from mitm_tooling.transformation.superset.common import MitMDatasetInfo
+from mitm_tooling.utilities.identifiers import name_plus_uuid, mk_uuid
+from mitm_tooling.utilities.io_utils import DataSource
+from mitm_tooling.utilities.sql_utils import sa_url_into_any_url
+from sqlalchemy import Engine
+
+from app.db.models import TrackedMitMDataset
+from app.db.utils import create_schema
+from app.dependencies.db import get_engine
+
+
+def upload_mitm_file(mitm: MITM, mitm_zip: DataSource, dataset_name: str, uuid:UUID | None = None, engine: Engine = None) -> TrackedMitMDataset:
+    mitm_data = read_zip(mitm_zip, mitm)
+    return upload_mitm_data(mitm_data, dataset_name, uuid=uuid, engine=engine)
+
+def upload_mitm_data(mitm_data: MITMData, dataset_name: str, uuid:UUID | None = None, engine: Engine = None) -> TrackedMitMDataset:
+    engine = engine if engine is not None else get_engine()
+    uuid = uuid or mk_uuid()
+    unique_schema_name = name_plus_uuid(dataset_name, uuid, sep='_')
+
+    sql_rep_schema = mk_sql_rep_schema(mitm_data.header, override_schema=unique_schema_name, view_generators=None)
+
+    with engine.connect() as connection:
+        create_schema(connection, unique_schema_name)
+        insert_mitm_data(connection, sql_rep_schema, mitm_data)
+        connection.commit()
+
+    model = TrackedMitMDataset(uuid=uuid, dataset_name=dataset_name, schema_name=unique_schema_name,
+                               sql_alchemy_uri=sa_url_into_any_url(engine.url), mitm_header=mitm_data.header)
+    with sqlmodel.Session(engine) as session:
+        session.add(model)
+        session.commit()
+        session.refresh(model)
+
+    return model
\ No newline at end of file
diff --git a/app/utils/response_utils.py b/app/utils/response_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..fa02c3dc5eeae1818a94d182d1171381559b5741
--- /dev/null
+++ b/app/utils/response_utils.py
@@ -0,0 +1,14 @@
+from typing import Self
+
+import pydantic
+
+
+class FromBaseMixin:
+
+    def __init__(self, **kwargs):
+        super().__init__(**kwargs)
+
+    @classmethod
+    def from_base(cls, base_obj: pydantic.BaseModel, **kwargs) -> Self:
+        kwargs = base_obj.model_dump(round_trip=True) | kwargs
+        return cls(**kwargs)
\ No newline at end of file
diff --git a/docker-compose.yaml b/docker-compose.yaml
index 3f863a98be761734e5ce14cbe964219915b2c7ac..81dd76adaea2d6c59d21fc48643bcc4993e267f4 100644
--- a/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -21,7 +21,7 @@ services:
     container_name: test_mitm_db
     restart: unless-stopped
     ports:
-      - 5432:5432
+      - "5432:5432"
     env_file: docker/.env
     volumes:
       - mitm_db_home:/var/lib/postgresql/data
diff --git a/helm/superset-mitm-service/.helmignore b/helm/superset-mitm-service/.helmignore
new file mode 100644
index 0000000000000000000000000000000000000000..0e8a0eb36f4ca2c939201c0d54b5d82a1ea34778
--- /dev/null
+++ b/helm/superset-mitm-service/.helmignore
@@ -0,0 +1,23 @@
+# Patterns to ignore when building packages.
+# This supports shell glob matching, relative path matching, and
+# negation (prefixed with !). Only one pattern per line.
+.DS_Store
+# Common VCS dirs
+.git/
+.gitignore
+.bzr/
+.bzrignore
+.hg/
+.hgignore
+.svn/
+# Common backup files
+*.swp
+*.bak
+*.tmp
+*.orig
+*~
+# Various IDEs
+.project
+.idea/
+*.tmproj
+.vscode/
diff --git a/helm/superset-mitm-service/Chart.lock b/helm/superset-mitm-service/Chart.lock
new file mode 100644
index 0000000000000000000000000000000000000000..a1e50591889db5f9ed3964d49f3a0912e63e22b8
--- /dev/null
+++ b/helm/superset-mitm-service/Chart.lock
@@ -0,0 +1,6 @@
+dependencies:
+- name: postgresql
+  repository: https://charts.bitnami.com/bitnami
+  version: 16.5.0
+digest: sha256:5d40b9c5bdd615dc56524d8ee9cba1d2d79ff4c711b44f3c5507e378823f1323
+generated: "2025-03-17T17:12:51.0781795+01:00"
diff --git a/helm/superset-mitm-service/Chart.yaml b/helm/superset-mitm-service/Chart.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..88e9b01dafee61200466c731bfccecddcd0f55ad
--- /dev/null
+++ b/helm/superset-mitm-service/Chart.yaml
@@ -0,0 +1,30 @@
+apiVersion: v2
+name: superset-mitm-service
+description: A Helm chart for the superset-mitm-service that includes a timescale db.
+
+# A chart can be either an 'application' or a 'library' chart.
+#
+# Application charts are a collection of templates that can be packaged into versioned archives
+# to be deployed.
+#
+# Library charts provide useful utilities or functions for the chart developer. They're included as
+# a dependency of application charts to inject those utilities and functions into the rendering
+# pipeline. Library charts do not define any templates and therefore cannot be deployed.
+type: application
+
+# This is the chart version. This version number should be incremented each time you make changes
+# to the chart and its templates, including the app version.
+# Versions are expected to follow Semantic Versioning (https://semver.org/)
+version: 0.1.1
+
+# This is the version number of the application being deployed. This version number should be
+# incremented each time you make changes to the application. Versions are not expected to
+# follow Semantic Versioning. They should reflect the version the application is using.
+# It is recommended to use it with quotes.
+appVersion: "0.1.0"
+
+dependencies:
+  - name: postgresql
+    version: 16.5.0
+    repository: https://charts.bitnami.com/bitnami
+    alias: mitm-postgresql
\ No newline at end of file
diff --git a/helm/superset-mitm-service/templates/NOTES.txt b/helm/superset-mitm-service/templates/NOTES.txt
new file mode 100644
index 0000000000000000000000000000000000000000..51be08517009418fb7ab7751935f5290290782a6
--- /dev/null
+++ b/helm/superset-mitm-service/templates/NOTES.txt
@@ -0,0 +1,22 @@
+1. Get the application URL by running these commands:
+{{- if .Values.ingress.enabled }}
+{{- range $host := .Values.ingress.hosts }}
+  {{- range .paths }}
+  http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ .path }}
+  {{- end }}
+{{- end }}
+{{- else if contains "NodePort" .Values.service.type }}
+  export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "superset-mitm-service.fullname" . }})
+  export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
+  echo http://$NODE_IP:$NODE_PORT
+{{- else if contains "LoadBalancer" .Values.service.type }}
+     NOTE: It may take a few minutes for the LoadBalancer IP to be available.
+           You can watch its status by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "superset-mitm-service.fullname" . }}'
+  export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "superset-mitm-service.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
+  echo http://$SERVICE_IP:{{ .Values.service.port }}
+{{- else if contains "ClusterIP" .Values.service.type }}
+  export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "superset-mitm-service.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
+  export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
+  echo "Visit http://127.0.0.1:8080 to use your application"
+  kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
+{{- end }}
diff --git a/helm/superset-mitm-service/templates/_helpers.tpl b/helm/superset-mitm-service/templates/_helpers.tpl
new file mode 100644
index 0000000000000000000000000000000000000000..03aa3ad9275e47c0c9c220972b5ae1ef7fb715b9
--- /dev/null
+++ b/helm/superset-mitm-service/templates/_helpers.tpl
@@ -0,0 +1,62 @@
+{{/*
+Expand the name of the chart.
+*/}}
+{{- define "superset-mitm-service.name" -}}
+{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
+{{- end }}
+
+{{/*
+Create a default fully qualified app name.
+We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
+If release name contains chart name it will be used as a full name.
+*/}}
+{{- define "superset-mitm-service.fullname" -}}
+{{- if .Values.fullnameOverride }}
+{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
+{{- else }}
+{{- $name := default .Chart.Name .Values.nameOverride }}
+{{- if contains $name .Release.Name }}
+{{- .Release.Name | trunc 63 | trimSuffix "-" }}
+{{- else }}
+{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
+{{- end }}
+{{- end }}
+{{- end }}
+
+{{/*
+Create chart name and version as used by the chart label.
+*/}}
+{{- define "superset-mitm-service.chart" -}}
+{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
+{{- end }}
+
+{{/*
+Common labels
+*/}}
+{{- define "superset-mitm-service.labels" -}}
+helm.sh/chart: {{ include "superset-mitm-service.chart" . }}
+{{ include "superset-mitm-service.selectorLabels" . }}
+{{- if .Chart.AppVersion }}
+app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
+{{- end }}
+app.kubernetes.io/managed-by: {{ .Release.Service }}
+{{- end }}
+
+{{/*
+Selector labels
+*/}}
+{{- define "superset-mitm-service.selectorLabels" -}}
+app.kubernetes.io/name: {{ include "superset-mitm-service.name" . }}
+app.kubernetes.io/instance: {{ .Release.Name }}
+{{- end }}
+
+{{/*
+Create the name of the service account to use
+*/}}
+{{- define "superset-mitm-service.serviceAccountName" -}}
+{{- if .Values.serviceAccount.create }}
+{{- default (include "superset-mitm-service.fullname" .) .Values.serviceAccount.name }}
+{{- else }}
+{{- default "default" .Values.serviceAccount.name }}
+{{- end }}
+{{- end }}
diff --git a/helm/superset-mitm-service/templates/deployment.yaml b/helm/superset-mitm-service/templates/deployment.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..017f50f0a5f918261234aedb4368b1233abf263a
--- /dev/null
+++ b/helm/superset-mitm-service/templates/deployment.yaml
@@ -0,0 +1,90 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: {{ include "superset-mitm-service.fullname" . }}
+  labels:
+    {{- include "superset-mitm-service.labels" . | nindent 4 }}
+spec:
+  {{- if not .Values.autoscaling.enabled }}
+  replicas: {{ .Values.replicaCount }}
+  {{- end }}
+  selector:
+    matchLabels:
+      {{- include "superset-mitm-service.selectorLabels" . | nindent 6 }}
+  template:
+    metadata:
+      {{- with .Values.podAnnotations }}
+      annotations:
+        {{- toYaml . | nindent 8 }}
+      {{- end }}
+      labels:
+        {{- include "superset-mitm-service.labels" . | nindent 8 }}
+        {{- with .Values.podLabels }}
+        {{- toYaml . | nindent 8 }}
+        {{- end }}
+    spec:
+      {{- with .Values.imagePullSecrets }}
+      imagePullSecrets:
+        {{- toYaml . | nindent 8 }}
+      {{- end }}
+      serviceAccountName: {{ include "superset-mitm-service.serviceAccountName" . }}
+      {{- with .Values.podSecurityContext }}
+      securityContext:
+        {{- toYaml . | nindent 8 }}
+      {{- end }}
+      containers:
+        - name: {{ .Chart.Name }}
+          {{- with .Values.securityContext }}
+          securityContext:
+            {{- toYaml . | nindent 12 }}
+          {{- end }}
+          image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
+          imagePullPolicy: {{ .Values.image.pullPolicy }}
+          ports:
+            - name: http
+              containerPort: {{ .Values.service.port }}
+              protocol: TCP
+          envFrom:
+            - secretRef:
+                name: {{ tpl .Values.envFromSecret . | quote }}
+          env:
+            - name: API_PORT
+              value: {{ .Values.service.port }}
+            - name: CORS_ORIGIN
+              value: {{ tpl .Values.connections.origin . | quote}}
+            {{- range $key, $value := .Values.apiConfig }}
+            - name: {{ $key | quote}}
+              value: {{ $value | quote }}
+            {{- end }}
+          {{- with .Values.livenessProbe }}
+          livenessProbe:
+            {{- toYaml . | nindent 12 }}
+          {{- end }}
+          {{- with .Values.readinessProbe }}
+          readinessProbe:
+            {{- toYaml . | nindent 12 }}
+          {{- end }}
+          {{- with .Values.resources }}
+          resources:
+            {{- toYaml . | nindent 12 }}
+          {{- end }}
+          {{- with .Values.volumeMounts }}
+          volumeMounts:
+            {{- toYaml . | nindent 12 }}
+          {{- end }}
+      {{- with .Values.volumes }}
+      volumes:
+        {{- toYaml . | nindent 8 }}
+      {{- end }}
+      {{- with .Values.nodeSelector }}
+      nodeSelector:
+        {{- toYaml . | nindent 8 }}
+      {{- end }}
+      {{- with .Values.affinity }}
+      affinity:
+        {{- toYaml . | nindent 8 }}
+      {{- end }}
+      {{- with .Values.tolerations }}
+      tolerations:
+        {{- toYaml . | nindent 8 }}
+      {{- end }}
diff --git a/helm/superset-mitm-service/templates/hpa.yaml b/helm/superset-mitm-service/templates/hpa.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..f00d495c5779fe377dcde73ee052da29726a909b
--- /dev/null
+++ b/helm/superset-mitm-service/templates/hpa.yaml
@@ -0,0 +1,32 @@
+{{- if .Values.autoscaling.enabled }}
+apiVersion: autoscaling/v2
+kind: HorizontalPodAutoscaler
+metadata:
+  name: {{ include "superset-mitm-service.fullname" . }}
+  labels:
+    {{- include "superset-mitm-service.labels" . | nindent 4 }}
+spec:
+  scaleTargetRef:
+    apiVersion: apps/v1
+    kind: Deployment
+    name: {{ include "superset-mitm-service.fullname" . }}
+  minReplicas: {{ .Values.autoscaling.minReplicas }}
+  maxReplicas: {{ .Values.autoscaling.maxReplicas }}
+  metrics:
+    {{- if .Values.autoscaling.targetCPUUtilizationPercentage }}
+    - type: Resource
+      resource:
+        name: cpu
+        target:
+          type: Utilization
+          averageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }}
+    {{- end }}
+    {{- if .Values.autoscaling.targetMemoryUtilizationPercentage }}
+    - type: Resource
+      resource:
+        name: memory
+        target:
+          type: Utilization
+          averageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }}
+    {{- end }}
+{{- end }}
diff --git a/helm/superset-mitm-service/templates/postgresql-auth-secret.yaml b/helm/superset-mitm-service/templates/postgresql-auth-secret.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..969822066d4ab7eed0a3aefcd2df56e0a4af0633
--- /dev/null
+++ b/helm/superset-mitm-service/templates/postgresql-auth-secret.yaml
@@ -0,0 +1,33 @@
+{{/*
+
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+*/}}
+apiVersion: v1
+kind: Secret
+metadata:
+  name: postgresql-auth-env
+  labels:
+      {{- include "superset-mitm-service.labels" . | nindent 4 }}
+  namespace: {{ .Release.Namespace }}
+type: Opaque
+stringData:
+  MITM_DATABASE_DIALECT: {{.Values.mitmDB.mitm_database_dialect  | quote }}
+  MITM_DATABASE_HOST: {{ tpl .Values.mitmDB.mitm_database_host .  | quote }}
+  MITM_DATABASE_PORT: {{.Values.mitmDB.mitm_database_port  | quote }}
+  MITM_DATABASE_USER: {{.Values.mitmDB.mitm_database_user  | quote }}
+  MITM_DATABASE_PASSWORD: {{.Values.mitmDB.mitm_database_password  | quote }}
+  MITM_DATABASE_DB: {{.Values.mitmDB.mitm_database_db  | quote }}
diff --git a/helm/superset-mitm-service/templates/secret-env.yaml b/helm/superset-mitm-service/templates/secret-env.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..a0c99920853dfffb74c543c1f17a8c496392082f
--- /dev/null
+++ b/helm/superset-mitm-service/templates/secret-env.yaml
@@ -0,0 +1,33 @@
+{{/*
+
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+*/}}
+apiVersion: v1
+kind: Secret
+metadata:
+  name: {{ include "superset-mitm-service.fullname" . }}-env
+  labels:
+      {{- include "superset-mitm-service.labels" . | nindent 4 }}
+  namespace: {{ .Release.Namespace }}
+type: Opaque
+stringData:
+  MITM_DATABASE_DIALECT: {{.Values.mitmDB.mitm_database_dialect  | quote }}
+  MITM_DATABASE_USER: {{.Values.mitmDB.mitm_database_user  | quote }}
+  MITM_DATABASE_PASSWORD: {{.Values.mitmDB.mitm_database_password  | quote }}
+  MITM_DATABASE_HOST: {{.Values.mitmDB.mitm_database_host  | quote }}
+  MITM_DATABASE_PORT: {{.Values.mitmDB.mitm_database_port  | quote }}
+  MITM_DATABASE_DB: {{ .Values.mitmDB.mitm_database_db  | quote }}
diff --git a/helm/superset-mitm-service/templates/service.yaml b/helm/superset-mitm-service/templates/service.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..840397c06e5f4ba22cd460d01b553bca761a6c7c
--- /dev/null
+++ b/helm/superset-mitm-service/templates/service.yaml
@@ -0,0 +1,15 @@
+apiVersion: v1
+kind: Service
+metadata:
+  name: {{ include "superset-mitm-service.fullname" . }}
+  labels:
+    {{- include "superset-mitm-service.labels" . | nindent 4 }}
+spec:
+  type: {{ .Values.service.type }}
+  ports:
+    - port: {{ .Values.service.port }}
+      targetPort: http
+      protocol: TCP
+      name: http
+  selector:
+    {{- include "superset-mitm-service.selectorLabels" . | nindent 4 }}
diff --git a/helm/superset-mitm-service/templates/serviceaccount.yaml b/helm/superset-mitm-service/templates/serviceaccount.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..51ca2cbea018b4132ebbff1e0e5e7824d3e4ea6a
--- /dev/null
+++ b/helm/superset-mitm-service/templates/serviceaccount.yaml
@@ -0,0 +1,13 @@
+{{- if .Values.serviceAccount.create -}}
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+  name: {{ include "superset-mitm-service.serviceAccountName" . }}
+  labels:
+    {{- include "superset-mitm-service.labels" . | nindent 4 }}
+  {{- with .Values.serviceAccount.annotations }}
+  annotations:
+    {{- toYaml . | nindent 4 }}
+  {{- end }}
+automountServiceAccountToken: {{ .Values.serviceAccount.automount }}
+{{- end }}
diff --git a/helm/superset-mitm-service/templates/tests/test-connection.yaml b/helm/superset-mitm-service/templates/tests/test-connection.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..4a8e4c07d8dedd3ea12a498e9506118c011b21cf
--- /dev/null
+++ b/helm/superset-mitm-service/templates/tests/test-connection.yaml
@@ -0,0 +1,15 @@
+apiVersion: v1
+kind: Pod
+metadata:
+  name: "{{ include "superset-mitm-service.fullname" . }}-test-connection"
+  labels:
+    {{- include "superset-mitm-service.labels" . | nindent 4 }}
+  annotations:
+    "helm.sh/hook": test
+spec:
+  containers:
+    - name: wget
+      image: busybox
+      command: ['wget']
+      args: ['{{ include "superset-mitm-service.fullname" . }}:{{ .Values.service.port }}']
+  restartPolicy: Never
diff --git a/helm/superset-mitm-service/values.yaml b/helm/superset-mitm-service/values.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..a5100a9473bf87bf73c067c284f0cbaed3c5f2f1
--- /dev/null
+++ b/helm/superset-mitm-service/values.yaml
@@ -0,0 +1,181 @@
+# Default values for superset-mitm-service.
+# This is a YAML-formatted file.
+# Declare variables to be passed into your templates.
+
+# This will set the replicaset count more information can be found here: https://kubernetes.io/docs/concepts/workloads/controllers/replicaset/
+replicaCount: 1
+
+# This sets the container image more information can be found here: https://kubernetes.io/docs/concepts/containers/images/
+image:
+  repository: registry.git-ce.rwth-aachen.de/machine-data/superset-mitm-service
+  # This sets the pull policy for images.
+  pullPolicy: always
+  # Overrides the image tag whose default is the chart appVersion.
+  tag: "latest"
+
+# This is for the secrets for pulling an image from a private repository more information can be found here: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/
+imagePullSecrets: [ ]
+# This is to override the chart name.
+nameOverride: ""
+fullnameOverride: ""
+
+ingress:
+  enabled: false
+
+# This section builds out the service account more information can be found here: https://kubernetes.io/docs/concepts/security/service-accounts/
+serviceAccount:
+  # Specifies whether a service account should be created
+  create: true
+  # Automatically mount a ServiceAccount's API credentials?
+  automount: true
+  # Annotations to add to the service account
+  annotations: { }
+  # The name of the service account to use.
+  # If not set and create is true, a name is generated using the fullname template
+  name: ""
+
+# This is for setting Kubernetes Annotations to a Pod.
+# For more information checkout: https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/
+podAnnotations: { }
+# This is for setting Kubernetes Labels to a Pod.
+# For more information checkout: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/
+podLabels: { }
+
+podSecurityContext: { }
+# fsGroup: 2000
+
+securityContext: { }
+  # capabilities:
+  #   drop:
+  #   - ALL
+  # readOnlyRootFilesystem: true
+# runAsNonRoot: true
+# runAsUser: 1000
+
+# This is for setting up a service more information can be found here: https://kubernetes.io/docs/concepts/services-networking/service/
+service:
+  # This sets the service type more information can be found here: https://kubernetes.io/docs/concepts/services-networking/service/#publishing-services-service-types
+  type: ClusterIP
+  # This sets the ports more information can be found here: https://kubernetes.io/docs/concepts/services-networking/service/#field-spec-ports
+  port: 8180
+
+resources: { }
+  # We usually recommend not to specify default resources and to leave this as a conscious
+  # choice for the user. This also increases chances charts run on environments with little
+  # resources, such as Minikube. If you do want to specify resources, uncomment the following
+  # lines, adjust them as necessary, and remove the curly braces after 'resources:'.
+  # limits:
+  #   cpu: 100m
+  #   memory: 128Mi
+  # requests:
+#   cpu: 100m
+#   memory: 128Mi
+
+startupProbe:
+  httpGet:
+    path: /health
+    port: http
+  initialDelaySeconds: 5
+  timeoutSeconds: 1
+  failureThreshold: 60
+  periodSeconds: 5
+  successThreshold: 1
+livenessProbe:
+  httpGet:
+    path: /health
+    port: http
+  initialDelaySeconds: 5
+  timeoutSeconds: 1
+  failureThreshold: 3
+  periodSeconds: 5
+  successThreshold: 1
+readinessProbe:
+  httpGet:
+    path: /health
+    port: http
+  initialDelaySeconds: 5
+  timeoutSeconds: 1
+  failureThreshold: 3
+  periodSeconds: 5
+  successThreshold: 1
+
+# This section is for setting up autoscaling more information can be found here: https://kubernetes.io/docs/concepts/workloads/autoscaling/
+autoscaling:
+  enabled: false
+  minReplicas: 1
+  maxReplicas: 2
+  targetCPUUtilizationPercentage: 80
+  # targetMemoryUtilizationPercentage: 80
+
+# Additional volumes on the output Deployment definition.
+volumes: [ ]
+# - name: foo
+#   secret:
+#     secretName: mysecret
+#     optional: false
+
+# Additional volumeMounts on the output Deployment definition.
+volumeMounts: [ ]
+# - name: foo
+#   mountPath: "/etc/foo"
+#   readOnly: true
+
+nodeSelector: { }
+
+tolerations: [ ]
+
+affinity: { }
+
+envFromSecret: '{{ template "superset-mitm-service.fullname" . }}-env'
+
+mitmDB:
+  mitm_database_dialect: "postgresql"
+  mitm_database_user: "mitm-pg-user"
+  mitm_database_password: "superweirdpasswordpleasedonotcrack"
+  mitm_database_host: "{{ .Release.Name }}-postgresql"
+  mitm_database_port: "5432"
+  mitm_database_db: "mitm-db"
+
+connections:
+  origin: "http://localhost:8080" # can be overridden with a template string
+
+apiConfig:
+  export_dir: "exports/"
+  upload_dir: "uploads/"
+
+global:
+  security:
+    allowInsecureImages: true
+
+mitm-postgresql:
+  image:
+    registry: docker.io
+    repository: timescale/timescaledb
+    tag: latest-pg16
+
+  auth:
+    existingSecret: "postgresql-auth-env"
+
+  primary:
+    ##
+    ## Persistent Volume Storage configuration.
+    ## ref: https://kubernetes.io/docs/user-guide/persistent-volumes
+    persistence:
+      ##
+      ## Enable PostgreSQL persistence using Persistent Volume Claims.
+      enabled: true
+      ##
+      ## Persistent class
+      # storageClass: classname
+      ##
+      ## Access modes:
+      accessModes:
+        - ReadWriteOnce
+    ## PostgreSQL port
+    service:
+      ports:
+        postgresql: 5432
+
+  initScripts:
+    timescaledb: |
+      CREATE EXTENSION IF NOT EXISTS timescaledb;
\ No newline at end of file
diff --git a/justfile b/justfile
index cece37e56932983884cbcc4900d14190bb885e86..4098ffaa502c383905ece9d9d0dfc6c7523431ec 100644
--- a/justfile
+++ b/justfile
@@ -4,21 +4,28 @@ default:
     @just --list
 
 lock:
-    poetry lock
+    uv lock
 
-update:
-    poetry update
+sync:
+    uv sync
 
 requirements:
-    poetry export --without-hashes -f requirements.txt > requirements.txt --without-hashes
+    uv export --no-hashes > requirements.txt
 
 preflight: lock requirements
 
 schema:
-    python schema/gen_open_api_schema.py
+    uv run schema/gen_open_api_schema.py
 
 up:
     docker compose up
 
 down:
-    docker compose down
\ No newline at end of file
+    docker compose down
+
+helm-package:
+    helm package helm/superset-mitm-service helm/
+
+helm-push a: helm-package
+    # helm registry login registry-1.docker.io -u leahtgu
+    helm push {{a}} oci://registry-1.docker.io/leahtgu
diff --git a/pyproject.toml b/pyproject.toml
index 89f387605be69fc2f2dd8b656d20c9c6882e50df..2fbcc44e37d3c66f3cd03aa039047274827b4e5d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,35 +1,39 @@
-[tool.poetry]
+[project]
 name = "superset-mitm-service"
-version = "0.1.0"
+version = "0.1.1"
 description = ""
-authors = ["Leah Tacke genannt Unterberg <leah.tgu@pads.rwth-aachen.de>"]
+authors = [{ name = "Leah Tacke genannt Unterberg", email = "l.tgu@pads.rwth-aachen.de" }]
+requires-python = ">=3.12,<3.14"
 readme = "README.md"
-package-mode = false
+dependencies = [
+    "python-dotenv>=1.0.1",
+    "pyyaml",
+    "pydantic",
+    "fastapi[standard]>=0.115.8",
+    "uvicorn>=0.34.0",
+    "python-multipart",
+    "psycopg2",
+    "sqlalchemy>=2.0.38",
+    "alembic",
+    "sqlmodel>=0.0.24",
+    "mitm-tooling",
+]
 
-[tool.poetry.dependencies]
-python = "^3.12,<3.14"
-python-dotenv = "^1.0.1"
-pyyaml = "*"
+[dependency-groups]
+dev = ["datamodel-code-generator"]
 
-pydantic = "*"
-fastapi = { version = "^0.115.8", extras = ["standard"] }
-uvicorn = "^0.34.0"
-python-multipart = "*"
-psycopg2 = "*"
-sqlalchemy = "^2.0.38"
-alembic = "*"
-sqlmodel = "^0.0.24"
-mitm-tooling = { version = "*" }
-# mitm-tooling = { version = "*", source = "local", develop = true }
+[tool.uv]
+package = false
 
-[tool.poetry.group.dev.dependencies]
-datamodel-code-generator = "*"
-
-[[tool.poetry.source]]
+[[tool.uv.index]]
 name = "local"
 url = "file:///C:/Users/leah/PycharmProjects/mitm-tooling"
-priority = "explicit"
+explicit = true
+
+[tool.uv.sources]
+# mitm-tooling = { path = "file:///C:/Users/leah/PycharmProjects/mitm-tooling", editable = true }
+mitm-tooling = { git = "https://git-ce.rwth-aachen.de/machine-data/mitm-tooling.git", branch = "master" }
 
 [build-system]
-requires = ["poetry-core"]
-build-backend = "poetry.core.masonry.api"
+requires = ["hatchling"]
+build-backend = "hatchling.build"
diff --git a/startup.sh b/startup.sh
index d302c287f32681b57426d3a99485e4af45dcc66e..64bef21589a1f1a741178478b3703ebfb7bda202 100644
--- a/startup.sh
+++ b/startup.sh
@@ -1 +1,2 @@
+#!/bin/bash
 fastapi run app/main.py --reload --port "${API_PORT:-8180}"
\ No newline at end of file
diff --git a/test/definitions.http b/test/definitions.http
index c855f0343132321cf3dfe4eacc5d84592c9ac903..d87f9c313cf6e6f3f1e885329dcccf1de1cf16da 100644
--- a/test/definitions.http
+++ b/test/definitions.http
@@ -3,12 +3,12 @@ Accept: application/json
 
 ###
 
-GET http://localhost:{{port}}/definitions/mitm_dataset/{{uuid}}/import?include_visualizations=True
+GET http://localhost:{{port}}/definitions/mitm_dataset/{{uuid}}/import?include_visualizations=False
 Accept: application/json
 
 ###
 
-GET http://localhost:{{port}}/definitions/uploaded_mitm_dataset/{{uuid}}/import/zip?include_visualizations=True
+GET http://localhost:{{port}}/definitions/mitm_dataset/{{uuid}}/import/zip?include_visualizations=False
 Accept: application/zip
 
 ###
\ No newline at end of file
diff --git a/test/export.http b/test/export.http
new file mode 100644
index 0000000000000000000000000000000000000000..f7041dbed8da01dc2938a7549f4aec85b3db73f6
--- /dev/null
+++ b/test/export.http
@@ -0,0 +1,6 @@
+###
+
+// @name Export
+POST http://localhost:{{port}}/mitm_dataset/export/{{uuid}}
+
+###
\ No newline at end of file
diff --git a/test/http-client.env.json b/test/http-client.env.json
index a0942cb43f09cec870667f1705efc6450c4631be..5139377f525e03ca026d850a27c154277c327978 100644
--- a/test/http-client.env.json
+++ b/test/http-client.env.json
@@ -1,10 +1,14 @@
 {
   "dev": {
     "port": "8181",
-    "uuid": "ebf7b689-ce39-4de6-acad-af01ccf76f75"
+    "uuid": "ebf7b689-ce39-4de6-acad-af01ccf76f75",
   },
   "docker": {
     "port": "8180",
     "uuid": "8f09a527-3e69-4839-82e9-8db3dd38268c"
+  },
+  "superset": {
+    "port": "8180",
+    "uuid":  "b4004d6a-bcaa-4a48-aa54-271b074109ca"
   }
 }
\ No newline at end of file
diff --git a/test/upload.http b/test/upload.http
index efad6babfc99f665a9a211069b9783cd44e6ad8f..90bb54474792864e7a398d8d0a58ca14f27f3ad3 100644
--- a/test/upload.http
+++ b/test/upload.http
@@ -36,6 +36,10 @@ GET http://localhost:{{port}}/mitm_dataset/{{uuid}}
 
 ###
 
-DELETE http://localhost:{{port}}/mitm_dataset/{{uuid}}
+GET http://localhost:{{port}}/mitm_dataset/b4004d6a-bcaa-4a48-aa54-271b074109ca
+
+###
+
+DELETE http://localhost:{{port}}/mitm_dataset/39f207b3-2588-484d-a63c-91d219d7513d
 
 ###
\ No newline at end of file