diff --git a/.local.env b/.local.env
index a2973f3c8ccfbe1364eafede774ca757dcb8f335..6b5700d4e6d28455a1e8aeaea513cf32d0bfcf50 100644
--- a/.local.env
+++ b/.local.env
@@ -1,8 +1,7 @@
-API_BASE=http://localhost
-API_PORT=8180
+API_PORT=8181
 EXPORT_DIR=exports/
 UPLOAD_DIR=uploads/
-CORS_ORIGIN=http://localhost:8080
+CORS_ORIGIN=http://localhost:8081
 MITM_DATABASE_DIALECT=
 MITM_DATABASE_USER=
 MITM_DATABASE_PASSWORD=
diff --git a/app/db/adapters.py b/app/db/adapters.py
index 8599568956a4fcf6e3d7c65b50dd93eeaa3b74a5..2fdc0d107617fa67d4ffc6d9f2e6f94beee0d5b7 100644
--- a/app/db/adapters.py
+++ b/app/db/adapters.py
@@ -3,6 +3,7 @@ from typing import Type, Generic, TypeVar, Any
 import pydantic
 import sqlalchemy as sa
 from fastapi.encoders import jsonable_encoder
+from sqlalchemy.dialects.postgresql import JSONB
 
 T = TypeVar('T', bound=pydantic.BaseModel)
 
@@ -24,7 +25,7 @@ class PydanticType(sa.types.TypeDecorator, Generic[T]):
     def load_dialect_impl(self, dialect):
         # Use JSONB for PostgreSQL and JSON for other databases.
         if dialect.name == "postgresql":
-            return dialect.type_descriptor(sa.JSONB())
+            return dialect.type_descriptor(JSONB())
         else:
             return dialect.type_descriptor(sa.JSON())
 
diff --git a/app/db/models.py b/app/db/models.py
index 07183a3e0c5c3c5bc1596f9015df06c90ed506fc..6dc3dd28018980af1e9f15b7d5f4062a56e40ec9 100644
--- a/app/db/models.py
+++ b/app/db/models.py
@@ -20,7 +20,6 @@ class AddTrackedMitMDataset(BaseModel):
     sql_alchemy_uri: AnyUrl
     mitm_header: Header
 
-
 class TrackedMitMDataset(SQLModel, table=True):
     model_config = pydantic.ConfigDict(arbitrary_types_allowed=True)
     __tablename__ = 'uploaded_mitm_datasets'
diff --git a/app/db/setup.py b/app/db/setup.py
index 3621b7ce09f846f8adca0e84dbaa48b9633c4799..95abed8e48c714ebb2742f0741a5564e10c8ec37 100644
--- a/app/db/setup.py
+++ b/app/db/setup.py
@@ -22,9 +22,11 @@ engine = create_engine(MITM_DATABASE_URL, execution_options=execution_options)
 def init_db():
     from .models import SQLModel, APPLICATION_DB_SCHEMA
     logger.info(f'Setting up MITM DB @ {MITM_DATABASE_URL}')
-    with Session(engine) as session:
+    with engine.connect() as conn:
         if APPLICATION_DB_SCHEMA not in inspect(engine).get_schema_names():
-            create_schema(session, APPLICATION_DB_SCHEMA)
-            session.commit()
-        SQLModel.metadata.create_all(session.connection(), checkfirst=True)
-        session.commit()
\ No newline at end of file
+            create_schema(conn, APPLICATION_DB_SCHEMA)
+            conn.commit()
+        SQLModel.metadata.create_all(conn, checkfirst=True)
+        conn.commit()
+   # with Session(engine) as session:
+
diff --git a/app/db/utils.py b/app/db/utils.py
index cd496d72bb6be602235e268f23c448e9a17ef8eb..300acd5b2fa196c7adea488a508dd1497e1d7868 100644
--- a/app/db/utils.py
+++ b/app/db/utils.py
@@ -4,6 +4,7 @@ import sqlalchemy as sa
 import sqlmodel
 from mitm_tooling.extraction.sql.data_models import DBMetaInfo
 from mitm_tooling.extraction.sql.db import connect_and_reflect
+from sqlalchemy.orm.session import Session
 from sqlalchemy.sql.ddl import CreateSchema
 
 from .models import TrackedMitMDataset
@@ -18,8 +19,9 @@ def infer_uploaded_mitm_dataset_schema(engine: sa.Engine, mitm_dataset_uuid: UUI
         return DBMetaInfo.from_sa_meta(meta, default_schema=upload_info.schema_name)
 
 
-def create_schema(conn: sa.Connection | sa.orm.session.Session, unique_schema_name: str) -> None:
-    if conn.dialect.name == 'sqlite':
-        conn.execute(sa.text(f"ATTACH DATABASE ':memory:' AS {unique_schema_name}"))
+def create_schema(conn_or_sess: sa.Connection | Session, unique_schema_name: str) -> None:
+    dialect = conn_or_sess.bind.dialect if isinstance(conn_or_sess, Session) else conn_or_sess.dialect
+    if dialect.name == 'sqlite':
+        conn_or_sess.execute(sa.text(f"ATTACH DATABASE ':memory:' AS {unique_schema_name}"))
     else:
-        conn.execute(CreateSchema(unique_schema_name, if_not_exists=False))
+        conn_or_sess.execute(CreateSchema(unique_schema_name, if_not_exists=False))
diff --git a/app/routes/definitions/router.py b/app/routes/definitions/router.py
index ee649798be69d492f6579b7aa6c44a3347e450c6..e0678aefc7b9f2d6e25aebc73914531bc88c104d 100644
--- a/app/routes/definitions/router.py
+++ b/app/routes/definitions/router.py
@@ -49,8 +49,8 @@ def generate_mitm_dataset_import_zip(request: GenerateMitMDatasetDefinitionReque
 def generate_tracked_mitm_dataset_bundle(tracked_dataset: TrackedMitMDatasetDependency,
                                          include_visualizations: bool = False) -> MitMDatasetBundleResponse:
     request = GenerateMitMDatasetDefinitionRequest(mitm_header=tracked_dataset.mitm_header,
-                                                   dataset_identifier=tracked_dataset.dataset_identifier,
-                                                   db_conn_info=tracked_dataset.db_conn_info)
+                                                   dataset_identifier=tracked_dataset.identifier,
+                                                   db_conn_info=tracked_dataset.superset_connection_info)
     return MitMDatasetBundleResponse(
         **generate_mitm_dataset_bundle(request,
                                        include_visualizations=include_visualizations).model_dump())
diff --git a/app/routes/mitm_dataset/export.py b/app/routes/mitm_dataset/export.py
new file mode 100644
index 0000000000000000000000000000000000000000..59686a64c6241680597e5d3b1c0d9b67a3850b08
--- /dev/null
+++ b/app/routes/mitm_dataset/export.py
@@ -0,0 +1,40 @@
+import logging
+
+import sqlalchemy as sa
+from mitm_tooling.extraction.sql.data_models import DBMetaInfo, SourceDBType
+from mitm_tooling.extraction.sql.mapping import MappingExport, Exportable
+from mitm_tooling.extraction.sql.mapping.mapping import ConceptMappingException
+from mitm_tooling.transformation.sql.into_exportable import sql_rep_into_exportable
+from mitm_tooling.transformation.sql.into_mappings import sql_rep_into_mappings
+from mitm_tooling.utilities.sql_utils import create_sa_engine
+
+from app.db.models import TrackedMitMDataset
+
+logger = logging.getLogger(__name__)
+
+
+def export_via_mapping(tracked_dataset: TrackedMitMDataset) -> tuple[sa.Engine, Exportable]:
+    sql_alchemy_uri = tracked_dataset.sql_alchemy_uri
+    sql_rep_schema = tracked_dataset.sql_rep_schema
+    schema_name = tracked_dataset.schema_name
+    header = tracked_dataset.mitm_header
+
+    cms = sql_rep_into_mappings(header, sql_rep_schema)
+
+    db_meta_info = DBMetaInfo.from_sa_meta(sql_rep_schema.meta, default_schema=schema_name)
+    db_metas = {SourceDBType.OriginalDB: db_meta_info}
+    remote_engine = create_sa_engine(sql_alchemy_uri)
+
+    try:
+        exportable = MappingExport(mitm=header.mitm, mapped_concepts=cms, filename='export.zip').apply(db_metas)
+        return remote_engine, exportable
+    except ConceptMappingException as exc:
+        logger.error(f'Concept Mapping failed due to: {repr(exc)}')
+        raise exc
+
+
+def export_directly(tracked_dataset: TrackedMitMDataset) -> tuple[sa.Engine, Exportable]:
+    sql_alchemy_uri = tracked_dataset.sql_alchemy_uri
+    sql_rep_schema = tracked_dataset.sql_rep_schema
+    header = tracked_dataset.mitm_header
+    return create_sa_engine(sql_alchemy_uri), sql_rep_into_exportable(header, sql_rep_schema)
diff --git a/app/routes/mitm_dataset/refresh.py b/app/routes/mitm_dataset/refresh.py
new file mode 100644
index 0000000000000000000000000000000000000000..84e5da3451d4fb68386493402bf703ef22fc3d12
--- /dev/null
+++ b/app/routes/mitm_dataset/refresh.py
@@ -0,0 +1,19 @@
+from abc import ABC
+from typing import Literal, Any
+
+import pydantic
+from mitm_tooling.data_types import MITMDataType
+from mitm_tooling.definition import ConceptName, TypeName
+from mitm_tooling.representation import Header, SQLRepresentationSchema, HeaderEntry
+from mitm_tooling.representation.intermediate.deltas import diff_header
+from pydantic import ConfigDict
+
+from app.db.models import TrackedMitMDataset
+
+
+
+def refresh(tracked_dataset: TrackedMitMDataset, new_header: Header):
+    old_header = tracked_dataset.mitm_header
+    header_delta = diff_header(old_header, new_header)
+
+    row_delta = ...
\ No newline at end of file
diff --git a/app/routes/mitm_dataset/responses.py b/app/routes/mitm_dataset/responses.py
index 1d36eee007425674d56d246f96b9119330d5dcf4..ad9b36d3ad06b0e23810a3ef9a0922e6bcc99d5d 100644
--- a/app/routes/mitm_dataset/responses.py
+++ b/app/routes/mitm_dataset/responses.py
@@ -1,4 +1,5 @@
 from typing import Literal
+from uuid import UUID
 
 import pydantic
 
@@ -17,3 +18,7 @@ class UploadMitMResponse(TrackMitMResponse):
 
 class RegisterMitMResponse(TrackMitMResponse):
     pass
+
+class TrackedMitMEntry(pydantic.BaseModel):
+    dataset_name: str
+    uuid: UUID
\ No newline at end of file
diff --git a/app/routes/mitm_dataset/router.py b/app/routes/mitm_dataset/router.py
index 51438a523ab5e29bc0233b5538a49ab7689d4d18..29b8bcb929a496f3fda462fc78b9ce1d13f4795d 100644
--- a/app/routes/mitm_dataset/router.py
+++ b/app/routes/mitm_dataset/router.py
@@ -1,22 +1,26 @@
 import logging
+from codecs import StreamWriter
 from typing import Sequence
 
 import sqlmodel
 from fastapi import UploadFile, File, HTTPException
 from fastapi.routing import APIRouter
 from mitm_tooling.definition import MITM
+from mitm_tooling.extraction.sql.data_models import DBMetaInfo
 from mitm_tooling.io import read_zip
 from mitm_tooling.representation import mk_sql_rep_schema, insert_mitm_data
 from mitm_tooling.transformation.superset.common import name_plus_uuid
 from mitm_tooling.transformation.superset.factories.utils import mk_uuid
 from mitm_tooling.utilities.sql_utils import sa_url_into_any_url
+from starlette.responses import StreamingResponse
 
 from app.db.models import TrackedMitMDataset
 from app.db.utils import create_schema
 from app.dependencies.db import DBEngineDependency, ORMSessionDependency
 from app.dependencies.orm import TrackedMitMDatasetDependency
-from app.routes.mitm_dataset.requests import AddTrackedMitMDatasetRequest, RegisterExternalMitMDatasetRequest
-from app.routes.mitm_dataset.responses import UploadMitMResponse, RegisterMitMResponse
+from .export import export_via_mapping
+from .requests import AddTrackedMitMDatasetRequest, RegisterExternalMitMDatasetRequest
+from .responses import UploadMitMResponse, RegisterMitMResponse, TrackedMitMEntry
 
 router = APIRouter(prefix='/mitm_dataset', tags=['mitm_dataset'])
 logger = logging.getLogger(__name__)
@@ -66,16 +70,27 @@ def post_mitm_dataset(session: ORMSessionDependency, new_mitm_dataset: AddTracke
 
 
 @router.get('/{uuid}')
-def get_mitm_dataset(uploaded_dataset: TrackedMitMDatasetDependency) -> TrackedMitMDataset:
-    return uploaded_dataset
+def get_mitm_dataset(tracked_dataset: TrackedMitMDatasetDependency) -> TrackedMitMDataset:
+    return tracked_dataset
 
 
-@router.get('/', response_model=list[TrackedMitMDataset])
+@router.get('/', response_model=list[TrackedMitMEntry])
 def get_mitm_datasets(session: ORMSessionDependency) -> Sequence[TrackedMitMDataset]:
     sequence = session.exec(sqlmodel.select(TrackedMitMDataset)).all()
     return sequence
 
 
 @router.delete('/{uuid}')
-def delete_mitm_dataset(session: ORMSessionDependency, uploaded_dataset: TrackedMitMDatasetDependency) -> None:
-    session.delete(uploaded_dataset)
+def delete_mitm_dataset(session: ORMSessionDependency, tracked_dataset: TrackedMitMDatasetDependency) -> None:
+    session.delete(tracked_dataset)
+
+
+@router.post('/export/{uuid}', response_class=StreamingResponse,
+            responses={200: {'content': {'application/zip': {}}}})
+def export_mitm_dataset(engine: DBEngineDependency, tracked_dataset: TrackedMitMDatasetDependency) -> StreamingResponse:
+    remote_engine, exportable = export_via_mapping(tracked_dataset)
+    import sqlalchemy as sa
+    with sa.orm.Session(remote_engine) as session:
+        ze = exportable.export_to_memory(session)
+        buf = ze.to_buffer()
+        return StreamingResponse(buf, media_type='application/zip')
diff --git a/docker-compose.yaml b/docker-compose.yaml
index 42c555e9164800762ee71a44e8d3f8d786ad54ad..3f863a98be761734e5ce14cbe964219915b2c7ac 100644
--- a/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -2,6 +2,8 @@ services:
   mitm-service:
     build:
       context: .
+      pull: true
+    pull_policy: always
     container_name: test_mitm_service
     volumes:
       - ./exports:/exports
diff --git a/justfile b/justfile
index 510899fb77ec7f935e06d78ce4ad9a40116bfa6d..cece37e56932983884cbcc4900d14190bb885e86 100644
--- a/justfile
+++ b/justfile
@@ -4,7 +4,7 @@ default:
     @just --list
 
 lock:
-    poetry lock --no-update
+    poetry lock
 
 update:
     poetry update
diff --git a/pyproject.toml b/pyproject.toml
index 6d397174cf0283c96b213b52a225eafd04c6afe1..89f387605be69fc2f2dd8b656d20c9c6882e50df 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -17,6 +17,7 @@ uvicorn = "^0.34.0"
 python-multipart = "*"
 psycopg2 = "*"
 sqlalchemy = "^2.0.38"
+alembic = "*"
 sqlmodel = "^0.0.24"
 mitm-tooling = { version = "*" }
 # mitm-tooling = { version = "*", source = "local", develop = true }
diff --git a/test/definitions.http b/test/definitions.http
index 2603ba6b1a52864029fb16fc5f447dbe5880d96f..c855f0343132321cf3dfe4eacc5d84592c9ac903 100644
--- a/test/definitions.http
+++ b/test/definitions.http
@@ -1,14 +1,14 @@
-GET http://localhost:8180/definitions/mitm_dataset/{{uuid}}?include_visualizations=True
+GET http://localhost:{{port}}/definitions/mitm_dataset/{{uuid}}?include_visualizations=True
 Accept: application/json
 
 ###
 
-GET http://localhost:8180/definitions/mitm_dataset/{{uuid}}/import?include_visualizations=True
+GET http://localhost:{{port}}/definitions/mitm_dataset/{{uuid}}/import?include_visualizations=True
 Accept: application/json
 
 ###
 
-GET http://localhost:8180/definitions/uploaded_mitm_dataset/{{uuid}}/import/zip?include_visualizations=True
+GET http://localhost:{{port}}/definitions/uploaded_mitm_dataset/{{uuid}}/import/zip?include_visualizations=True
 Accept: application/zip
 
 ###
\ No newline at end of file
diff --git a/test/http-client.env.json b/test/http-client.env.json
index 13d17c5f26c314b70dec886075c8d9eca84d27b6..a0942cb43f09cec870667f1705efc6450c4631be 100644
--- a/test/http-client.env.json
+++ b/test/http-client.env.json
@@ -1,5 +1,10 @@
 {
   "dev": {
-    "uuid": "4259f878-78d1-4fa4-bdc2-ae27b30d44bd"
+    "port": "8181",
+    "uuid": "ebf7b689-ce39-4de6-acad-af01ccf76f75"
+  },
+  "docker": {
+    "port": "8180",
+    "uuid": "8f09a527-3e69-4839-82e9-8db3dd38268c"
   }
 }
\ No newline at end of file
diff --git a/test/upload.http b/test/upload.http
index 6f94014b1aa59fce558759c5757ee98ac8f4bd7b..efad6babfc99f665a9a211069b9783cd44e6ad8f 100644
--- a/test/upload.http
+++ b/test/upload.http
@@ -1,4 +1,4 @@
-POST http://localhost:8180/mitm_dataset/upload?dataset_name=myname_0&mitm=MAED
+POST http://localhost:{{port}}/mitm_dataset/upload?dataset_name=myname_0&mitm=MAED
 Accept: application/json
 Content-Type: multipart/form-data; boundary=WebAppBoundary
 
@@ -11,11 +11,11 @@ Content-Type: application/zip
 
 ###
 
-GET http://localhost:8180/mitm_dataset/
+GET http://localhost:{{port}}/mitm_dataset/
 
 ###
 
-POST http://localhost:8180/mitm_dataset/upload?dataset_name=myname_1&mitm=MAED
+POST http://localhost:{{port}}/mitm_dataset/upload?dataset_name=myname_1&mitm=MAED
 Accept: application/json
 Content-Type: multipart/form-data; boundary=WebAppBoundary
 
@@ -28,10 +28,14 @@ Content-Type: application/zip
 
 ###
 
-GET http://localhost:8180/mitm_dataset/
+GET http://localhost:{{port}}/mitm_dataset/
 
 ###
 
-GET http://localhost:8180/mitm_dataset/64b8601b-6edb-493d-a9a1-484d346e0a02
+GET http://localhost:{{port}}/mitm_dataset/{{uuid}}
+
+###
+
+DELETE http://localhost:{{port}}/mitm_dataset/{{uuid}}
 
 ###
\ No newline at end of file