diff --git a/app/db/models/__init__.py b/app/db/models/__init__.py
index 47e5579d16d0dedc795ba2c61361fe78cdd069f5..932a01fac10d150d3bd96da2750d0df4dae0890d 100644
--- a/app/db/models/__init__.py
+++ b/app/db/models/__init__.py
@@ -1,4 +1,4 @@
 from .common import FromPydanticModelsMixin, APPLICATION_DB_SCHEMA
-from .tracked_mitm_dataset import BaseTrackedMitMDataset, AddTrackedMitMDataset, TrackedMitMDataset, ExternalTrackedMitMDataset
+from .tracked_mitm_dataset import BaseTrackedMitMDataset, AddTrackedMitMDataset, TrackedMitMDataset, TrackedMappedMitMDataset
 from .mapped_sources import MappedDB, MappedDBSource, MappedDBPull
 from .presentation import ListTrackedMitMDataset
diff --git a/app/db/models/mapped_sources.py b/app/db/models/mapped_sources.py
index 40d6107b64df3e9d6d1e0cd9596b085296d9940f..486b72e9b7db4ee34bdd28f3ec2c69db57827480 100644
--- a/app/db/models/mapped_sources.py
+++ b/app/db/models/mapped_sources.py
@@ -2,6 +2,7 @@ from __future__ import annotations
 
 import uuid
 from datetime import datetime
+from typing import TYPE_CHECKING
 from uuid import UUID
 
 import pydantic
@@ -11,10 +12,10 @@ from mitm_tooling.extraction.sql.data_models import CompiledVirtualView
 from mitm_tooling.extraction.sql.data_models.db_meta import DBMetaInfoBase
 from mitm_tooling.extraction.sql.data_models.db_probe import DBProbeBase
 from mitm_tooling.extraction.sql.mapping import ConceptMapping
-from mitm_tooling.representation import Header
+from mitm_tooling.representation.intermediate import Header
+from mitm_tooling.representation.sql import SQLRepInsertionResult
 from pydantic import BaseModel, AnyUrl
-from sqlalchemy.orm import relationship
-from sqlmodel import Field, SQLModel
+from sqlmodel import Field, SQLModel, Relationship
 
 from .common import APPLICATION_DB_SCHEMA
 from ..adapters import PydanticType, StrType
@@ -35,27 +36,8 @@ class DBInfo(BaseModel):
     db_probe: DBProbeBase
 
 
-class MappedDBPull(SQLModel, table=True):
-    model_config = pydantic.ConfigDict(arbitrary_types_allowed=True)
-    __tablename__ = 'mapped_db_pulls'
-    __table_args__ = {'schema': APPLICATION_DB_SCHEMA}
-
-    id: int = Field(primary_key=True, sa_column_kwargs={'autoincrement': True})
-    tracked_mitm_dataset_id: int = Field(nullable=False,
-                                         foreign_key=f'{APPLICATION_DB_SCHEMA}.tracked_mitm_datasets.id')
-    mapped_db_source_id: int = Field(nullable=False, foreign_key=f'{APPLICATION_DB_SCHEMA}.mapped_db_sources.id')
-
-    time: datetime = Field(sa_type=sqlmodel.DateTime, default_factory=datetime.now)
-    instances_imported: int = Field(default=0)
-    rows_created: int = Field(default=0)
-
-    @property
-    def tracked_mitm_dataset(self) -> 'ExternalTrackedMitMDataset':
-        return relationship('ExternalTrackedMitMDataset', foreign_keys='tracked_mitm_dataset_id', lazy='joined')
-
-    @property
-    def mapped_db_source(self) -> MappedDBSource:
-        return relationship('MappedDBSource', foreign_keys='mapped_db_source_id', lazy='joined')
+if TYPE_CHECKING:
+    from .tracked_mitm_dataset import TrackedMappedMitMDataset
 
 
 class MappedDBSource(SQLModel, table=True):
@@ -69,17 +51,45 @@ class MappedDBSource(SQLModel, table=True):
     mitm_mapping: MappedDB = Field(sa_type=PydanticType.wrap(MappedDB), repr=False)
     mitm_header: Header = Field(sa_type=PydanticType.wrap(Header), repr=False)
 
+    @property
+    def tracked_mitm_datasets(self) -> list[TrackedMappedMitMDataset]:
+        from .tracked_mitm_dataset import TrackedMappedMitMDataset
+        return Relationship(back_populates='mapped_db_source', sa_relationship_args=(TrackedMappedMitMDataset,))
+
     @property
     def pulls(self) -> list[MappedDBPull]:
-        return relationship('MappedDBPull')
+        return Relationship(back_populates='mapped_db_source', sa_relationship_args=(MappedDBPull,))
 
-    # @hybridproperty
     @property
     def last_pulled(self) -> datetime | None:
-        from sqlalchemy import select, func
-        stmt = select(func.max(MappedDBPull.time)).where(MappedDBPull.mapped_db_source_id == self.id)
-        return stmt.scalar_subquery()
+        return max(p.time_complete for p in self.pulls)
+
+
+class MappedDBPull(SQLModel, table=True):
+    model_config = pydantic.ConfigDict(arbitrary_types_allowed=True)
+    __tablename__ = 'mapped_db_pulls'
+    __table_args__ = {'schema': APPLICATION_DB_SCHEMA}
+
+    id: int = Field(primary_key=True, sa_column_kwargs={'autoincrement': True})
+    mapped_mitm_dataset_id: int = Field(nullable=False,
+                                        foreign_key=f'{APPLICATION_DB_SCHEMA}.mapped_mitm_datasets.id')
+    mapped_db_source_id: int = Field(nullable=False, foreign_key=f'{APPLICATION_DB_SCHEMA}.mapped_db_sources.id')
+
+    time_start: datetime = Field(sa_type=sqlmodel.DateTime, default_factory=datetime.now)
+    time_complete: datetime = Field(sa_type=sqlmodel.DateTime, default_factory=datetime.now)
+    instances_imported: int = Field(default=0)
+    rows_created: int = Field(default=0)
+    insertion_result: SQLRepInsertionResult = Field(sa_type=PydanticType.wrap(SQLRepInsertionResult))
+
+    @property
+    def mapped_mitm_dataset(self) -> TrackedMappedMitMDataset:
+        from .tracked_mitm_dataset import TrackedMappedMitMDataset
+        return Relationship(back_populates='pulls',
+                            sa_relationship_args=(TrackedMappedMitMDataset,),
+                            sa_relationship_kwargs=dict(foreign_keys='mapped_mitm_dataset_id'))
 
     @property
-    def tracked_mitm_datasets(self) -> list['ExternalTrackedMitMDataset']:
-        return relationship('ExternalTrackedMitMDataset', back_populates='mapped_db_source')
+    def mapped_db_source(self) -> MappedDBSource:
+        return Relationship(back_populates='pulls',
+                            sa_relationship_args=(MappedDBSource,),
+                            sa_relationship_kwargs=dict(foreign_keys='mapped_db_source_id'))
diff --git a/app/db/models/tracked_mitm_dataset.py b/app/db/models/tracked_mitm_dataset.py
index dba89717856e6feb48cd5531d06e5e7cd7bd95bc..3a1f6c451a973859936e1faa03e3f7341e587c7e 100644
--- a/app/db/models/tracked_mitm_dataset.py
+++ b/app/db/models/tracked_mitm_dataset.py
@@ -1,47 +1,56 @@
-from __future__ import annotations
+#from __future__ import annotations
 
 import uuid
-from abc import ABC
 from datetime import datetime
-from typing import TYPE_CHECKING
 from uuid import UUID
 
 import pydantic
 import sqlmodel
 from mitm_tooling.definition import MITM
-from mitm_tooling.representation import Header, SQLRepresentationSchema, mk_sql_rep_schema
+from mitm_tooling.representation.intermediate import Header
+from mitm_tooling.representation.sql import SQLRepresentationSchema, mk_sql_rep_schema
+from mitm_tooling.representation.sql import SchemaName
 from mitm_tooling.transformation.superset.asset_bundles import MitMDatasetIdentifierBundle, DatasourceIdentifierBundle
 from mitm_tooling.transformation.superset.common import DBConnectionInfo
 from mitm_tooling.transformation.superset.definitions import MitMDatasetIdentifier
 from pydantic import BaseModel, AnyUrl
-from sqlalchemy.orm import relationship
-from sqlmodel import SQLModel, Field
+from sqlmodel import SQLModel, Field, Relationship
 
 from app.db.adapters import StrType, PydanticType
 from .common import FromPydanticModelsMixin, APPLICATION_DB_SCHEMA
+# if TYPE_CHECKING:
+from .mapped_sources import MappedDBSource, MappedDBPull, MappedDB
 
-if TYPE_CHECKING:
-    from .mapped_sources import MappedDBSource, MappedDBPull
 
-
-class BaseTrackedMitMDataset(BaseModel, ABC):
+class BaseTrackedMitMDataset(BaseModel):
     uuid: UUID
     dataset_name: str
 
 
-class AddTrackedMitMDataset(BaseModel):
+class SlimTrackedMitMDataset(BaseTrackedMitMDataset):
+    mitm: MITM
+
+
+class AddTrackedMitMDataset(FromPydanticModelsMixin, BaseModel):
+    model_config = pydantic.ConfigDict(arbitrary_types_allowed=True)
     uuid: UUID | None = None
     dataset_name: str
-    schema_name: str
+    schema_name: SchemaName
     sql_alchemy_uri: AnyUrl
     mitm_header: Header
 
 
-class TrackedMitMDataset(FromPydanticModelsMixin, AddTrackedMitMDataset, BaseTrackedMitMDataset, SQLModel, table=True):
+class GetTrackedMitMDataset(AddTrackedMitMDataset):
+    id: int
+    uuid: UUID
+    lives_on_mitm_db: bool
+    schema_under_external_control: bool
+    header_changed: datetime
+    data_changed: datetime
+
+
+class TrackedMitMDataset(GetTrackedMitMDataset, BaseTrackedMitMDataset, SQLModel):
     model_config = pydantic.ConfigDict(arbitrary_types_allowed=True)
-    __tablename__ = 'tracked_mitm_datasets'
-    __table_args__ = {'schema': APPLICATION_DB_SCHEMA}
-    __mapper_args__ = {'polymorphic_on': 'type', 'polymorphic_identity': 'local'}
 
     id: int = Field(primary_key=True, sa_column_kwargs={'autoincrement': True})
     uuid: UUID = Field(default_factory=uuid.uuid4, index=True, unique=True, nullable=False)
@@ -51,8 +60,12 @@ class TrackedMitMDataset(FromPydanticModelsMixin, AddTrackedMitMDataset, BaseTra
     schema_name: str = Field()
     sql_alchemy_uri: AnyUrl = Field(sa_type=StrType.wrap(AnyUrl))
 
-    is_managed_locally: bool = Field(default=True)
-    last_edited: datetime = Field(sa_type=sqlmodel.DateTime, default_factory=datetime.now)
+    lives_on_mitm_db: bool = Field(default=True)
+    schema_under_external_control: bool = Field(default=False)
+
+    header_changed: datetime = Field(sa_type=sqlmodel.DateTime, default_factory=datetime.now)
+    data_changed: datetime = Field(sa_type=sqlmodel.DateTime, default_factory=datetime.now)
+
     mitm_header: Header = Field(sa_type=PydanticType.wrap(Header), repr=False)
     identifier_bundle: MitMDatasetIdentifierBundle = Field(sa_type=PydanticType.wrap(MitMDatasetIdentifierBundle),
                                                            repr=False)
@@ -75,39 +88,94 @@ class TrackedMitMDataset(FromPydanticModelsMixin, AddTrackedMitMDataset, BaseTra
     def sql_rep_schema(self) -> SQLRepresentationSchema:
         return mk_sql_rep_schema(self.mitm_header, override_schema=self.schema_name)
 
-    # @pydantic.computed_field()
     @property
     def mitm(self) -> MITM:
         return self.mitm_header.mitm
 
 
-class ExternalTrackedMitMDataset(TrackedMitMDataset):
+class LocalMitMDataset(TrackedMitMDataset, table=True):
     model_config = pydantic.ConfigDict(arbitrary_types_allowed=True)
-    __mapper_args__ = {'polymorphic_identity': 'mapped'}
+    __tablename__ = 'local_mitm_datasets'
+    __table_args__ = {'schema': APPLICATION_DB_SCHEMA}
+    # __mapper_args__ = {'polymorphic_on': 'type', 'polymorphic_identity': 'local'}
+    id: int = Field(primary_key=True, sa_column_kwargs={'autoincrement': True})
 
-    type: str = Field(default='mapped', nullable=False)
-    sql_alchemy_uri: AnyUrl = Field(sa_type=StrType.wrap(AnyUrl))
-    is_managed_locally: bool = Field(default=False)
-    last_edited: datetime = Field(sa_type=sqlmodel.DateTime, default_factory=datetime.now)
-    mitm_header: Header = Field(sa_type=PydanticType.wrap(Header), repr=False)
-    identifier_bundle: MitMDatasetIdentifierBundle = Field(sa_type=PydanticType.wrap(MitMDatasetIdentifierBundle),
-                                                           repr=False)
 
-    mapped_db_source_id: int = Field(foreign_key=f'{APPLICATION_DB_SCHEMA}.mapped_db_sources.id', nullable=False)
+class AddExternalMitMDataset(FromPydanticModelsMixin, BaseModel):
+    model_config = pydantic.ConfigDict(arbitrary_types_allowed=True)
+    uuid: UUID | None = None
+    dataset_name: str
+    schema_name: SchemaName
+    sql_alchemy_uri: AnyUrl
+
+
+class GetExternalMitMDataset(GetTrackedMitMDataset):
+    pass
+
+
+class TrackedExternalMitMDataset(TrackedMitMDataset, table=True):
+    model_config = pydantic.ConfigDict(arbitrary_types_allowed=True)
+    __tablename__ = 'external_mitm_datasets'
+    __table_args__ = {'schema': APPLICATION_DB_SCHEMA}
+    # __mapper_args__ = {'polymorphic_identity': 'external'}
+    id: int = Field(primary_key=True, sa_column_kwargs={'autoincrement': True})
+
+    lives_on_mitm_db: bool = Field(default=False)
+    schema_under_external_control: bool = Field(default=True)
+
+
+class LastPulledMixin(BaseModel):
+    pulls: list[MappedDBPull]
+
+    @property
+    def last_pulled(self) -> datetime | None:
+        return max(p.time_complete for p in self.pulls)
+
+    # @last_pulled.inplace.expression
+    # @classmethod
+    # def last_pulled_exp(cls) -> ColumnElement[datetime]:
+    #    from sqlalchemy import select, func
+    #    stmt = select(func.max('MappedDBPull.time')).where('MappedDBPull.tracked_mitm_dataset_id' == cls.id)
+    #    return stmt.scalar_subquery()
+
+
+class AddMappedMitMDataset(BaseModel):
+    model_config = pydantic.ConfigDict(arbitrary_types_allowed=True)
+    uuid: UUID | None = None
+    dataset_name: str
+    sql_alchemy_uri: AnyUrl
+    mapped_db: MappedDB
+
+
+class GetMappedMitMDataset(GetTrackedMitMDataset):
+    # @property
+    # def mapped_db_source(self) -> MappedDBSource | None:
+    #    pass
+    mapped_db_source: MappedDBSource | None
+    pulls: list[MappedDBPull]
+
+
+class TrackedMappedMitMDataset(TrackedMitMDataset, table=True):
+    model_config = pydantic.ConfigDict(arbitrary_types_allowed=True)
+    __table_args__ = {'schema': APPLICATION_DB_SCHEMA}
+    __tablename__ = 'mapped_mitm_datasets'
+    # __mapper_args__ = {'polymorphic_identity': 'mapped'}
+    id: int = Field(primary_key=True, sa_column_kwargs={'autoincrement': True})
+
+    lives_on_mitm_db: bool = Field(default=True)
+    schema_under_external_control: bool = Field(default=True)
+
+    mapped_db_source_id: int | None = Field(foreign_key=f'{APPLICATION_DB_SCHEMA}.mapped_db_sources.id')
 
     @property
-    def mapped_db_source(self) -> MappedDBSource:
-        return relationship('MappedDBSource',
-                            back_populates='tracked_mitm_datasets',
-                            foreign_keys='mapped_db_source_id')
+    def mapped_db_source(self) -> MappedDBSource | None:
+        return Relationship(back_populates='tracked_mitm_datasets',
+                            sa_relationship_kwargs=dict(foreign_keys='mapped_db_source_id'))
 
     @property
     def pulls(self) -> list[MappedDBPull]:
-        return relationship('MappedDBPull', foreign_keys='tracked_mitm_dataset_id')
+        return Relationship(back_populates='tracked_mitm_dataset')
 
-    # @hybridproperty
     @property
     def last_pulled(self) -> datetime | None:
-        from sqlalchemy import select, func
-        stmt = select(func.max('MappedDBPull.time')).where('MappedDBPull.tracked_mitm_dataset_id' == self.id)
-        return stmt.scalar_subquery()
+        return max(p.time_complete for p in self.pulls)
diff --git a/app/db/setup.py b/app/db/setup.py
index a4d2bc2c60f95bf88c09f4b83a69cc6c535503cd..80ab35ed5975f0f99aa5be17401b598af63359d6 100644
--- a/app/db/setup.py
+++ b/app/db/setup.py
@@ -10,7 +10,7 @@ from ..config import app_cfg
 logger = logging.getLogger(__name__)
 MITM_DATABASE_URL = sa.engine.URL.create(*pick_from_mapping(app_cfg, ['MITM_DATABASE_DIALECT', 'MITM_DATABASE_USER',
                                                                       'MITM_DATABASE_PASSWORD', 'MITM_DATABASE_HOST',
-                                                                      'MITM_DATABASE_PORT', 'MITM_DATABASE_DB'], flatten=True))
+                                                                      'MITM_DATABASE_PORT', 'MITM_DATABASE_DB']))
 
 execution_options = {}
 if MITM_DATABASE_URL.get_dialect().name == 'sqlite':
@@ -19,7 +19,7 @@ if MITM_DATABASE_URL.get_dialect().name == 'sqlite':
 engine = create_engine(MITM_DATABASE_URL, execution_options=execution_options)
 
 def init_db():
-    from .models import TrackedMitMDataset, ExternalTrackedMitMDataset, MappedDBSource, MappedDBPull
+    from .models import TrackedMitMDataset, TrackedMappedMitMDataset, MappedDBSource, MappedDBPull
     from .models import APPLICATION_DB_SCHEMA
     from sqlmodel import SQLModel
     from .utils import create_schema
diff --git a/app/db/utils.py b/app/db/utils.py
index f0c089cd8297135ac7af97c0a83ef3711f438626..6a0ad4da66055623536d95aa76ad8c2b8556d434 100644
--- a/app/db/utils.py
+++ b/app/db/utils.py
@@ -1,51 +1,44 @@
 import contextlib
 import logging
 from typing import Generator
-from uuid import UUID
 
 import sqlalchemy as sa
 import sqlmodel
 from mitm_tooling.extraction.sql.data_models import DBMetaInfo, DBProbe
 from mitm_tooling.extraction.sql.db import connect_and_reflect
+from mitm_tooling.extraction.sql.db import create_db_probe
 from sqlalchemy import Engine
-from sqlalchemy.exc import OperationalError, SQLAlchemyError
-from sqlalchemy.orm import Session
+from sqlalchemy.exc import SQLAlchemyError
 from sqlalchemy.orm.session import Session
 from sqlalchemy.sql.ddl import CreateSchema, DropSchema
 from sqlmodel import Session as ORMSession
 
 from .models import TrackedMitMDataset
 
-from mitm_tooling.extraction.sql.db import create_db_probe
-
 logger = logging.getLogger(__name__)
 
+
 @contextlib.contextmanager
 def mk_session(engine: Engine) -> Generator[Session, None, None]:
     with Session(engine) as session:
         yield session
 
+
 @contextlib.contextmanager
 def mk_orm_session(engine: Engine) -> Generator[ORMSession, None, None]:
     with sqlmodel.Session(engine) as session:
         yield session
 
 
-def infer_tracked_mitm_dataset_schema(engine: sa.Engine, uuid: UUID) -> DBMetaInfo | None:
-    with mk_session(engine) as session:
-        model = session.query(TrackedMitMDataset).filter(TrackedMitMDataset.uuid == uuid).one_or_none()
-        if model is not None:
-            meta, _ = connect_and_reflect(engine, allowed_schemas={model.schema_name})
-            return DBMetaInfo.from_sa_meta(meta, default_schema=model.schema_name)
-        return None
+def infer_tracked_mitm_dataset_schema(engine: sa.Engine, tracked_mitm_dataset: TrackedMitMDataset) -> DBMetaInfo:
+    meta, _ = connect_and_reflect(engine, allowed_schemas={tracked_mitm_dataset.schema_name})
+    return DBMetaInfo.from_sa_meta(meta, default_schema=tracked_mitm_dataset.schema_name)
 
 
-def probe_tracked_mitm_dataset_schema(engine: sa.Engine, uuid: UUID) -> DBProbe | None:
-    db_meta = infer_tracked_mitm_dataset_schema(engine, uuid)
-    if db_meta is not None:
-        with mk_session(engine) as session:
-            return create_db_probe(session, db_meta, sample_size=1000)
-    return None
+def probe_tracked_mitm_dataset_schema(engine: sa.Engine, tracked_mitm_dataset: TrackedMitMDataset) -> DBProbe:
+    db_meta = infer_tracked_mitm_dataset_schema(engine, tracked_mitm_dataset)
+    with mk_session(engine) as session:
+        return create_db_probe(session, db_meta, sample_size=1000)
 
 
 def create_schema(conn_or_sess: sa.Connection | Session, unique_schema_name: str) -> bool:
diff --git a/app/dependencies/orm.py b/app/dependencies/orm.py
index 66281981e65f8c3de04e6451b94f2b1cb0797a3a..375df7bbd9aca26240188185d92ef64e8ea983a9 100644
--- a/app/dependencies/orm.py
+++ b/app/dependencies/orm.py
@@ -1,17 +1,66 @@
-from typing import Annotated
+from typing import Annotated, Literal, Set
 from uuid import UUID
 
 import fastapi
 from fastapi import HTTPException, Depends
 
 from .db import ORMSessionDependency
-from ..db.models import TrackedMitMDataset
+from ..db.models import TrackedMappedMitMDataset
+from ..db.models.tracked_mitm_dataset import LocalMitMDataset, TrackedExternalMitMDataset
 
+AnyMitMDataset = LocalMitMDataset | TrackedExternalMitMDataset | TrackedMappedMitMDataset
 
-def get_uploaded_dataset(session: ORMSessionDependency, uuid: UUID = fastapi.Path()) -> TrackedMitMDataset:
-    o = session.query(TrackedMitMDataset).filter(TrackedMitMDataset.uuid == uuid).one_or_none()
+
+def get_tracked_dataset(session: ORMSessionDependency, uuid: UUID = fastapi.Path()) -> AnyMitMDataset:
+    o = session.query(LocalMitMDataset).filter(LocalMitMDataset.uuid == uuid).one_or_none()
+    if o is None:
+        o = session.query(TrackedExternalMitMDataset).filter(TrackedExternalMitMDataset.uuid == uuid).one_or_none()
+    if o is None:
+        o = session.query(TrackedMappedMitMDataset).filter(TrackedMappedMitMDataset.uuid == uuid).one_or_none()
     if o is None:
         raise HTTPException(status_code=404, detail='Referenced MitM Dataset does not exist.')
     return o
 
-TrackedMitMDatasetDependency = Annotated[TrackedMitMDataset, Depends(get_uploaded_dataset)]
\ No newline at end of file
+
+def get_tracked_local_dataset(session: ORMSessionDependency,
+                              uuid: UUID = fastapi.Path()) -> LocalMitMDataset:
+    o = session.query(LocalMitMDataset).filter(LocalMitMDataset.uuid == uuid).one_or_none()
+    if o is None:
+        raise HTTPException(status_code=404, detail='Referenced MitM Dataset does not exist.')
+    return o
+
+
+def get_tracked_external_dataset(session: ORMSessionDependency,
+                                 uuid: UUID = fastapi.Path()) -> TrackedExternalMitMDataset:
+    o = session.query(TrackedExternalMitMDataset).filter(TrackedExternalMitMDataset.uuid == uuid).one_or_none()
+    if o is None:
+        raise HTTPException(status_code=404, detail='Referenced MitM Dataset does not exist.')
+    return o
+
+
+def get_tracked_mapped_dataset(session: ORMSessionDependency,
+                               uuid: UUID = fastapi.Path()) -> TrackedExternalMitMDataset:
+    o = session.query(TrackedExternalMitMDataset).filter(TrackedExternalMitMDataset.uuid == uuid).one_or_none()
+    if o is None:
+        raise HTTPException(status_code=404, detail='Referenced MitM Dataset does not exist.')
+    return o
+
+
+TrackedMitMDatasetDependency = Annotated[AnyMitMDataset, Depends(get_tracked_dataset)]
+TrackedLocalMitMDatasetDependency = Annotated[LocalMitMDataset, Depends(get_tracked_local_dataset)]
+TrackedExternalMitMDatasetDependency = Annotated[TrackedExternalMitMDataset, Depends(get_tracked_external_dataset)]
+TrackedMappedMitMDatasetDependency = Annotated[TrackedMappedMitMDataset, Depends(get_tracked_mapped_dataset)]
+
+
+def get_tracked_datasets(session: ORMSessionDependency,
+                         types: Set[Literal['local', 'external', 'mapped']] = frozenset(('local', 'external',
+                                                                                         'mapped'))) -> list[
+    AnyMitMDataset]:
+    os = []
+    if 'local' in types:
+        os.extend(session.query(LocalMitMDataset).all())
+    if 'external' in types:
+        os.extend(session.query(TrackedExternalMitMDataset).all())
+    if 'mapped' in types:
+        os.extend(session.query(TrackedMappedMitMDataset).all())
+    return os
diff --git a/app/models/__init__.py b/app/logic/__init__.py
similarity index 100%
rename from app/models/__init__.py
rename to app/logic/__init__.py
diff --git a/app/logic/append.py b/app/logic/append.py
new file mode 100644
index 0000000000000000000000000000000000000000..4645f31c9f1e3e34d588c246491c7cc373597ab2
--- /dev/null
+++ b/app/logic/append.py
@@ -0,0 +1,40 @@
+import logging
+from typing import Callable
+
+from mitm_tooling.extraction.sql.mapping import Exportable
+from mitm_tooling.representation.df import TypedMitMDataFrameStream
+from mitm_tooling.representation.sql import SQLRepInsertionResult
+from mitm_tooling.representation.sql.sql_insertion import append_data
+from mitm_tooling.transformation.df import exportable_to_typed_mitm_dataframes_stream
+from mitm_tooling.utilities.sql_utils import create_sa_engine
+from pydantic import AnyUrl
+
+from app.db.models import TrackedMitMDataset
+
+logger = logging.getLogger(__name__)
+
+
+def append_exportable(source: AnyUrl,
+                      exportable: Exportable,
+                      tracked_mitm_dataset: TrackedMitMDataset) -> SQLRepInsertionResult:
+    source_engine = create_sa_engine(source)
+
+    def get_instances():
+        return exportable_to_typed_mitm_dataframes_stream(source_engine, exportable, stream_data=False)
+
+    return append_instances(get_instances, tracked_mitm_dataset)
+
+
+def append_instances(
+        gen_instances: Callable[[], TypedMitMDataFrameStream],
+        tracked_mitm_dataset: TrackedMitMDataset,
+) -> SQLRepInsertionResult:
+    sql_rep_schema = tracked_mitm_dataset.sql_rep_schema
+    target_engine = create_sa_engine(tracked_mitm_dataset.sql_alchemy_uri)
+
+    with target_engine.begin() as conn:
+        insertion_result = append_data(conn, lambda: sql_rep_schema, gen_instances)
+        logger.info(f'Appended MitM Data into schema {tracked_mitm_dataset.schema_name}: {insertion_result}')
+        conn.commit()
+
+        return insertion_result
diff --git a/app/logic/definitions.py b/app/logic/definitions.py
new file mode 100644
index 0000000000000000000000000000000000000000..4a9388214c48a2b80ce449c41f55d793d77fe696
--- /dev/null
+++ b/app/logic/definitions.py
@@ -0,0 +1,57 @@
+from datetime import datetime
+from typing import Sequence
+
+from mitm_tooling.definition import MITM
+from mitm_tooling.representation.intermediate import Header
+from mitm_tooling.transformation.superset import VisualizationType, MAEDVisualizationType, \
+    mk_superset_datasource_bundle, mk_superset_mitm_dataset_bundle
+from mitm_tooling.transformation.superset.asset_bundles import MitMDatasetIdentifierBundle, SupersetDatasourceBundle, \
+    SupersetMitMDatasetBundle, SupersetVisualizationBundle
+from mitm_tooling.transformation.superset.common import DBConnectionInfo
+from app.db.utils import ORMSession
+from app.db.models import TrackedMitMDataset
+
+
+def get_default_visualization_types(mitm: MITM) -> list[VisualizationType]:
+    if mitm == MITM.MAED:
+        return [MAEDVisualizationType.Baseline]
+    else:
+        return []
+
+
+def mk_datasource_bundle(mitm_header: Header,
+                         db_conn_info: DBConnectionInfo,
+                         identifiers: MitMDatasetIdentifierBundle | None = None
+                         ) -> SupersetDatasourceBundle:
+    datasource_bundle = mk_superset_datasource_bundle(mitm_header,
+                                                      db_conn_info,
+                                                      identifiers)
+    return datasource_bundle
+
+
+def mk_mitm_dataset_bundle(mitm_header: Header,
+                           db_conn_info: DBConnectionInfo,
+                           dataset_name: str,
+                           identifiers: MitMDatasetIdentifierBundle | None = None,
+                           include_default_visualizations: bool = False,
+                           visualization_types: Sequence[VisualizationType] | None = None) -> SupersetMitMDatasetBundle:
+    mitm_dataset_bundle = mk_superset_mitm_dataset_bundle(mitm_header,
+                                                          db_conn_info,
+                                                          dataset_name,
+                                                          identifiers,
+                                                          visualization_types=(get_default_visualization_types(
+                                                              mitm_header.mitm) if include_default_visualizations else []) + (
+                                                                                      visualization_types or [])
+                                                          )
+    return mitm_dataset_bundle
+
+
+def track_visualizations(orm_session: ORMSession,
+                         tracked_dataset: TrackedMitMDataset,
+                         visualization_bundle: SupersetVisualizationBundle) -> TrackedMitMDataset:
+    viz_id_map = visualization_bundle.viz_identifier_map
+    tracked_dataset.identifier_bundle = tracked_dataset.identifier_bundle.with_visualizations(viz_id_map)
+    tracked_dataset.last_edited = datetime.now()
+    orm_session.commit()
+    orm_session.refresh(tracked_dataset)
+    return tracked_dataset
diff --git a/app/routes/mitm_dataset/export.py b/app/logic/export.py
similarity index 91%
rename from app/routes/mitm_dataset/export.py
rename to app/logic/export.py
index 83185bd4dd29e07c7f2a58d2b6e8a2ea079efe2b..690fc7f9b25afa538218a70ed7988eec593e2efc 100644
--- a/app/routes/mitm_dataset/export.py
+++ b/app/logic/export.py
@@ -9,7 +9,6 @@ from mitm_tooling.transformation.sql.into_mappings import sql_rep_into_mappings
 from mitm_tooling.utilities.sql_utils import create_sa_engine
 
 from app.db.models import TrackedMitMDataset
-from app.routes.mitm_dataset.mapped_db import mk_exportable
 
 logger = logging.getLogger(__name__)
 
@@ -24,7 +23,7 @@ def export_via_mapping(tracked_dataset: TrackedMitMDataset) -> tuple[sa.Engine,
 
     logger.info('Preparing to export CMs:\n' + '\n'.join((str(cm) for cm in cms)))
 
-    db_meta_info = DBMetaInfo.from_sa_meta(sql_rep_schema.meta, default_schema=schema_name)
+    db_meta_info = DBMetaInfo.from_sa_meta(sql_rep_schema.sa_meta, default_schema=schema_name)
     db_metas = {SourceDBType.OriginalDB: db_meta_info}
     remote_engine = create_sa_engine(sql_alchemy_uri)
 
diff --git a/app/routes/mitm_dataset/mapped_db.py b/app/logic/mapped_db.py
similarity index 100%
rename from app/routes/mitm_dataset/mapped_db.py
rename to app/logic/mapped_db.py
diff --git a/app/logic/pull_mapped.py b/app/logic/pull_mapped.py
new file mode 100644
index 0000000000000000000000000000000000000000..222c30e20733b487c332833571e6f240470ca1b7
--- /dev/null
+++ b/app/logic/pull_mapped.py
@@ -0,0 +1,48 @@
+from datetime import datetime
+
+from mitm_tooling.utilities.sql_utils import create_sa_engine
+
+from app.db.models.mapped_sources import MappedDBPull
+from app.db.models.tracked_mitm_dataset import TrackedMappedMitMDataset
+from app.dependencies.db import ORMSessionDependency
+from .append import append_exportable
+from .mapped_db import mk_exportable
+from mitm_tooling.utilities.sql_utils import create_sa_engine
+
+from app.db.models.mapped_sources import MappedDBPull
+from app.db.models.tracked_mitm_dataset import TrackedMappedMitMDataset
+from app.dependencies.db import ORMSessionDependency
+from .append import append_exportable
+from .mapped_db import mk_exportable
+
+
+def pull_mapped_mitm_dataset(session: ORMSessionDependency,
+                             tracked_mapped_dataset: TrackedMappedMitMDataset) -> MappedDBPull:
+    db_source = tracked_mapped_dataset.mapped_db_source
+
+    remote_engine = create_sa_engine(db_source.sql_alchemy_uri)
+
+    time_start = datetime.now()
+
+    exportable = mk_exportable(remote_engine, db_source.mitm_mapping)
+
+    ir = append_exportable(db_source.sql_alchemy_uri, exportable, tracked_mapped_dataset)
+
+    time_complete = datetime.now()
+    tracked_mapped_dataset.header_changed = time_complete
+    tracked_mapped_dataset.data_changed = time_complete
+
+    pull_model = MappedDBPull(mapped_mitm_dataset_id=tracked_mapped_dataset.id,
+                              mapped_db_source_id=db_source.id,
+                              instances_imported=ir.inserted_instances,
+                              rows_created=ir.inserted_rows,
+                              insertion_result=ir,
+                              time_start=time_start,
+                              time_complete=time_complete
+                              )
+
+    session.add(pull_model)
+    session.flush()
+    session.refresh(pull_model)
+
+    return pull_model
diff --git a/app/logic/refresh.py b/app/logic/refresh.py
new file mode 100644
index 0000000000000000000000000000000000000000..274765b248a61fea2d57b4eac90df612797e06a4
--- /dev/null
+++ b/app/logic/refresh.py
@@ -0,0 +1,30 @@
+import datetime
+
+from mitm_tooling.representation.intermediate import Header
+from mitm_tooling.representation.intermediate.deltas import diff_header
+from mitm_tooling.transformation.sql import mitm_db_into_header
+from mitm_tooling.utilities.sql_utils import create_sa_engine
+
+from app.db.models import TrackedMitMDataset
+from app.db.models.tracked_mitm_dataset import AddExternalMitMDataset
+from app.dependencies.db import ORMSession
+
+
+def pull_header(tracked_dataset: TrackedMitMDataset | AddExternalMitMDataset) -> Header | None:
+    remote_engine = create_sa_engine(tracked_dataset.sql_alchemy_uri)
+    return mitm_db_into_header(remote_engine, override_schema=tracked_dataset.schema_name)
+
+
+def update_header(session: ORMSession, model: TrackedMitMDataset, header: Header) -> TrackedMitMDataset:
+    model.mitm_header = header
+    model.header_changed = datetime.datetime.now()
+    session.add(model)
+    session.commit()
+    return model
+
+
+def refresh(tracked_dataset: TrackedMitMDataset, new_header: Header):
+    old_header = tracked_dataset.mitm_header
+    header_delta = diff_header(old_header, new_header)
+    row_delta = ...
+    raise NotImplementedError
diff --git a/app/logic/register.py b/app/logic/register.py
new file mode 100644
index 0000000000000000000000000000000000000000..02158917b9feb7068eff3429576c7ed36135bf38
--- /dev/null
+++ b/app/logic/register.py
@@ -0,0 +1,33 @@
+from typing import TypeVar
+
+from mitm_tooling.transformation.superset.asset_bundles import MitMDatasetIdentifierBundle
+from mitm_tooling.transformation.superset.common import DBConnectionInfo
+from mitm_tooling.transformation.superset.definitions import MitMDatasetIdentifier
+
+from app.db.models import TrackedMitMDataset, AddTrackedMitMDataset
+from app.db.models.tracked_mitm_dataset import LocalMitMDataset
+from app.dependencies.db import ORMSessionDependency
+from app.logic.definitions import mk_mitm_dataset_bundle
+
+T = TypeVar('T')
+
+
+def register_mitm_dataset(session: ORMSessionDependency,
+                          add_model: AddTrackedMitMDataset,
+                          model_cls: type[T] = LocalMitMDataset,
+                          **kwargs) -> T:
+    db_conn_info = DBConnectionInfo(sql_alchemy_uri=add_model.sql_alchemy_uri, schema_name=add_model.schema_name)
+    identifiers = MitMDatasetIdentifierBundle(mitm_dataset=MitMDatasetIdentifier(dataset_name=add_model.dataset_name,
+                                                                                 uuid=add_model.uuid))
+    definition = mk_mitm_dataset_bundle(add_model.mitm_header,
+                                        db_conn_info,
+                                        add_model.dataset_name,
+                                        identifiers=identifiers)
+    identifier_bundle = definition.identifiers
+
+    model = model_cls.from_models(add_model, identifier_bundle=identifier_bundle, **kwargs)
+
+    session.add(model)
+    session.commit()
+    session.refresh(model)
+    return model
diff --git a/app/logic/register_external.py b/app/logic/register_external.py
new file mode 100644
index 0000000000000000000000000000000000000000..23d0ab82172707aaa2f817cdb552e6d1ac041f8e
--- /dev/null
+++ b/app/logic/register_external.py
@@ -0,0 +1,23 @@
+from uuid import UUID
+
+from mitm_tooling.representation.sql import SchemaName
+from mitm_tooling.transformation.sql import mitm_db_into_header
+from mitm_tooling.utilities.sql_utils import create_sa_engine
+from pydantic import BaseModel, AnyUrl
+
+from app.db.models.tracked_mitm_dataset import AddTrackedMitMDataset, \
+    TrackedExternalMitMDataset, AddExternalMitMDataset
+from app.dependencies.db import ORMSessionDependency
+
+
+def register_external_mitm_dataset(session: ORMSessionDependency,
+                                   add_model: AddExternalMitMDataset) -> TrackedExternalMitMDataset:
+    from .refresh import pull_header
+    header = pull_header(add_model)
+
+    from .register import register_mitm_dataset
+    return register_mitm_dataset(session,
+                                 AddTrackedMitMDataset.from_models(add_model, mitm_header=header),
+                                 model_cls=TrackedExternalMitMDataset,
+                                 lives_on_mitm_db=False,
+                                 schema_under_external_control=True)
diff --git a/app/logic/register_mapped.py b/app/logic/register_mapped.py
new file mode 100644
index 0000000000000000000000000000000000000000..559bb404415c159e0b80b7ad657edd47b2e24658
--- /dev/null
+++ b/app/logic/register_mapped.py
@@ -0,0 +1,42 @@
+from datetime import datetime
+from uuid import UUID
+
+from mitm_tooling.utilities.sql_utils import create_sa_engine
+from pydantic import BaseModel, AnyUrl
+
+from app.db.models.mapped_sources import MappedDBSource, MappedDBPull, MappedDB
+from app.db.models.tracked_mitm_dataset import TrackedMappedMitMDataset, AddMappedMitMDataset
+from app.dependencies.db import ORMSessionDependency
+from .append import append_exportable
+from .mapped_db import mk_exportable
+from .upload import upload_exportable
+
+
+def register_mapped_mitm_dataset(
+        session: ORMSessionDependency,
+        add_model: AddMappedMitMDataset,
+) -> tuple[TrackedMappedMitMDataset, MappedDBSource]:
+    remote_engine = create_sa_engine(add_model.sql_alchemy_uri)
+    exportable = mk_exportable(remote_engine, add_model.mapped_db)
+    # header = exportable.generate_header(remote_engine)
+
+    add_tracked_mitm_dataset = upload_exportable(add_model.sql_alchemy_uri,
+                                                 exportable,
+                                                 add_model.dataset_name,
+                                                 skip_instances=True)
+
+    mapped_db_source = MappedDBSource(sql_alchemy_uri=add_model.sql_alchemy_uri,
+                                      mitm_mapping=add_model.mapped_db,
+                                      mitm_header=add_tracked_mitm_dataset.mitm_header)
+    session.add(mapped_db_source)
+
+    external_tracked_mitm_dataset = TrackedMappedMitMDataset.from_models(add_tracked_mitm_dataset,
+                                                                         mapped_db_source=mapped_db_source)
+
+    session.add(external_tracked_mitm_dataset)
+
+    session.commit()
+    session.refresh(external_tracked_mitm_dataset)
+    session.refresh(mapped_db_source)
+
+    return external_tracked_mitm_dataset, mapped_db_source
\ No newline at end of file
diff --git a/app/routes/mitm_dataset/upload.py b/app/logic/upload.py
similarity index 61%
rename from app/routes/mitm_dataset/upload.py
rename to app/logic/upload.py
index b394d93dfe2573af48e01b200fc3ae3411cc5443..89b25ee4fd5a19d9da4b63d5c5397399c7366375 100644
--- a/app/routes/mitm_dataset/upload.py
+++ b/app/logic/upload.py
@@ -6,10 +6,13 @@ import sqlalchemy as sa
 from mitm_tooling.definition import MITM
 from mitm_tooling.extraction.sql.mapping import Exportable
 from mitm_tooling.io import read_zip
-from mitm_tooling.representation import mk_sql_rep_schema, MITMData, Header, SQLRepresentationSchema
-from mitm_tooling.representation.sql_representation import insert_db_schema, insert_mitm_data_instances
-from mitm_tooling.transformation.sql.from_exportable import insert_exportable_instances
-from mitm_tooling.utilities.identifiers import name_plus_uuid, mk_uuid, mk_short_uuid_str
+from mitm_tooling.representation.df import TypedMitMDataFrameStream
+from mitm_tooling.representation.intermediate import Header, MITMData
+from mitm_tooling.representation.sql import SQLRepresentationSchema, SQLRepInsertionResult, mk_sql_rep_schema, \
+    insert_data
+from mitm_tooling.transformation.df import exportable_to_typed_mitm_dataframes_stream
+from mitm_tooling.transformation.df import mitm_data_into_mitm_dataframes
+from mitm_tooling.utilities.identifiers import mk_uuid, mk_short_uuid_str, name_plus_uuid
 from mitm_tooling.utilities.io_utils import DataSource
 from mitm_tooling.utilities.sql_utils import sa_url_into_any_url, create_sa_engine
 from pydantic import AnyUrl
@@ -25,11 +28,8 @@ logger = logging.getLogger(__name__)
 def _skip_instances(
         conn: sa.Connection,
         sql_rep_schema: SQLRepresentationSchema,
-) -> tuple[int, int]:
-    """
-    Skip the instances insertion.
-    """
-    return 0, 0
+) -> SQLRepInsertionResult:
+    return SQLRepInsertionResult(inserted_types=[], inserted_instances=0, inserted_rows=0)
 
 
 def upload_mitm_file(mitm: MITM,
@@ -46,13 +46,12 @@ def upload_mitm_data(mitm_data: MITMData,
                      uuid: UUID | None = None,
                      engine: Engine = None,
                      skip_instances: bool = False) -> AddTrackedMitMDataset:
-    header_creator = lambda: mitm_data.header
     if skip_instances:
-        instances_inserter = _skip_instances
+        get_instances = lambda: ()
     else:
-        instances_inserter = lambda conn, sql_rep_schema: insert_mitm_data_instances(conn, sql_rep_schema, mitm_data)
+        get_instances = lambda: mitm_data_into_mitm_dataframes(mitm_data).typed_stream()
 
-    return upload_data(header_creator, instances_inserter, dataset_name, uuid, engine)
+    return upload_data(lambda: mitm_data.header, get_instances, dataset_name, uuid, engine)
 
 
 def upload_exportable(source: AnyUrl,
@@ -63,39 +62,42 @@ def upload_exportable(source: AnyUrl,
                       skip_instances: bool = False) -> AddTrackedMitMDataset:
     source_engine = create_sa_engine(source)
 
-    header_creator = lambda: exportable.generate_header(source_engine)
+    get_header = lambda: exportable.generate_header(source_engine)
 
     if skip_instances:
-        instances_inserter = _skip_instances
+        get_instances = lambda: ()
     else:
-        def instances_inserter(conn, sql_rep_schema):
-            return insert_exportable_instances(source_engine, exportable, conn, sql_rep_schema, stream_data=False)
+        def get_instances():
+            return exportable_to_typed_mitm_dataframes_stream(source_engine, exportable, stream_data=False)
 
-    return upload_data(header_creator, instances_inserter, dataset_name, uuid, engine)
+    return upload_data(get_header, get_instances, dataset_name, uuid, engine)
 
 
-def upload_data(header_creator: Callable[[], Header],
-                instances_inserter: Callable[[sa.Connection, SQLRepresentationSchema], tuple[int, int]],
+def upload_data(get_header: Callable[[], Header],
+                get_instances: Callable[[], TypedMitMDataFrameStream],
                 dataset_name: str,
                 uuid: UUID | None = None,
                 engine: Engine = None) -> AddTrackedMitMDataset:
     engine = engine if engine is not None else get_engine()
     sql_alchemy_uri = sa_url_into_any_url(engine.url)
     uuid = uuid or mk_uuid()
-    unique_schema_name = mk_short_uuid_str(uuid) # name_plus_uuid(dataset_name, uuid, sep='_')
+    unique_schema_name = name_plus_uuid('mitm_dataset', uuid, sep='_')
 
     logger.info(f'Uploading MitM Data with uuid {uuid} into target schema {unique_schema_name} on connected DB {engine.url}.')
 
-    header = header_creator()
+    header = get_header()
     sql_rep_schema = mk_sql_rep_schema(header, override_schema=unique_schema_name, skip_fk_constraints=True)
 
     with engine.begin() as connection:
         create_schema(connection, unique_schema_name)
         logger.info(f'Created schema: {unique_schema_name}')
-        insert_db_schema(connection, sql_rep_schema)
-        logger.info(f'Populated schema: {unique_schema_name}')
-        instances_inserter(connection, sql_rep_schema)
-        logger.info(f'Inserted MitM Data into schema: {unique_schema_name}')
+
+        insertion_result = insert_data(connection,
+                                       lambda: sql_rep_schema,
+                                       get_instances,
+                                       gen_override_header=lambda: header)
+
+        logger.info(f'Inserted MitM Data into schema {unique_schema_name}: {insertion_result}')
         connection.commit()
 
     return AddTrackedMitMDataset(uuid=uuid,
diff --git a/app/routes/admin/router.py b/app/routes/admin/router.py
index 469da615dbc5c535ffdfa3834f81b79bf78c4052..866aa3f92663d4b8ff9255d807d0fe0805e9aca9 100644
--- a/app/routes/admin/router.py
+++ b/app/routes/admin/router.py
@@ -1,43 +1,37 @@
-import io
 import logging
-from typing import Literal
 
 from fastapi import APIRouter
-from mitm_tooling.transformation.superset import write_superset_import_as_zip
-from mitm_tooling.transformation.superset.definitions import MetadataType
-from sqlmodel import SQLModel
-from starlette.responses import StreamingResponse
-
-from app.dependencies.orm import TrackedMitMDatasetDependency
-from app.routes.definitions.requests import GenerateIndependentMitMDatasetDefinitionRequest, \
-    GenerateVisualizationsRequest
-from app.routes.definitions.responses import MitMDatasetBundleResponse, MitMDatasetImportResponse, \
-    VisualizationImportResponse
-from ...db.models import TrackedMitMDataset, ListTrackedMitMDataset
-from ...db.utils import delete_schema, mk_session, mk_orm_session
-from ...dependencies.db import ORMSessionDependency, DBEngineDependency, ORMSession
+from pydantic import BaseModel
+from sqlalchemy import inspect
+from sqlalchemy.sql.ddl import DropSchema
 from sqlmodel.sql.expression import select
 
-
+from ...db.models import TrackedMitMDataset, ListTrackedMitMDataset, TrackedMappedMitMDataset
+from ...db.models.tracked_mitm_dataset import LocalMitMDataset, TrackedExternalMitMDataset
+from ...db.utils import delete_schema, mk_session, mk_orm_session
+from ...dependencies.db import DBEngineDependency, ORMSession
 
 router = APIRouter(prefix='/admin', tags=['admin'])
 logger = logging.getLogger(__name__)
 logger.setLevel(logging.INFO)  # Ensure the logger level is set to INFO
 
 
-class ClearDBResponse(SQLModel):
-    """Response model for clearing the database."""
+class DropSchemasResponse(BaseModel):
+    dropped_schemas: list[str] | None = None
+
+
+class DropMitMDatasetsResponse(DropSchemasResponse):
     # status: Literal["success", "error"] = "error"
     dropped_mitm_datasets: list[ListTrackedMitMDataset] | None = None
-    dropped_schemas: list[str] | None = None
 
-@router.post('/clear-db')
-def clear_db(engine: DBEngineDependency) -> ClearDBResponse:
-    """Clear the database."""
+@router.post('/drop-mitm-datasets')
+def drop_mitm_datasets(engine: DBEngineDependency) -> DropMitMDatasetsResponse:
     schemas_to_drop = []
     dropped_datasets = []
     with mk_orm_session(engine) as orm_session:
-        tracked_datasets = list(orm_session.exec(select(TrackedMitMDataset)).all())
+        tracked_datasets = list(orm_session.exec(select(LocalMitMDataset)).all())
+        tracked_datasets += list(orm_session.exec(select(TrackedExternalMitMDataset)).all())
+        tracked_datasets += list(orm_session.exec(select(TrackedMappedMitMDataset)).all())
 
         orm_session: ORMSession
         for tds in tracked_datasets:
@@ -57,5 +51,17 @@ def clear_db(engine: DBEngineDependency) -> ClearDBResponse:
                 logger.info('Dropped schema: %s', schema_name)
         session.commit()
 
-    return ClearDBResponse(dropped_mitm_datasets=dropped_datasets, dropped_schemas=dropped_schemas)
+    return DropMitMDatasetsResponse(dropped_mitm_datasets=dropped_datasets, dropped_schemas=dropped_schemas)
+
+
+@router.post('/drop-db')
+def drop_db(engine: DBEngineDependency) -> DropSchemasResponse:
+    insp = inspect(engine)
+    schemas_to_drop = insp.get_schema_names()
+    with mk_session(engine) as session:
+        for schema_name in schemas_to_drop:
+            session.execute(DropSchema(schema_name, cascade=True))
+            logger.info('Dropped schema: %s', schema_name)
+        session.commit()
 
+    return DropSchemasResponse(dropped_schemas=schemas_to_drop)
diff --git a/app/routes/data/router.py b/app/routes/data/router.py
index d3d24774edaf777586d8850110570390382a0ed4..41f44f912f1e1621218022aeebe86273018ade9e 100644
--- a/app/routes/data/router.py
+++ b/app/routes/data/router.py
@@ -5,6 +5,7 @@ from fastapi import APIRouter, Path
 
 from app.db.utils import infer_tracked_mitm_dataset_schema, probe_tracked_mitm_dataset_schema
 from app.dependencies.db import DBEngineDependency
+from app.dependencies.orm import TrackedMitMDatasetDependency
 from app.routes.data.responses import DBMetaResponse, DBProbeResponse
 
 router = APIRouter(prefix='/data', tags=['data'])
@@ -13,10 +14,10 @@ logger.setLevel(logging.INFO)  # Ensure the logger level is set to INFO
 
 
 @router.get('/db-meta/{uuid}')
-def get_db_meta(engine: DBEngineDependency, uuid: UUID = Path()) -> DBMetaResponse:
-    return DBMetaResponse(db_meta=infer_tracked_mitm_dataset_schema(engine, uuid))
+def get_db_meta(engine: DBEngineDependency, tracked_mitm_dataset: TrackedMitMDatasetDependency) -> DBMetaResponse:
+    return DBMetaResponse(db_meta=infer_tracked_mitm_dataset_schema(engine, tracked_mitm_dataset))
 
 
 @router.get('/db-probe/{uuid}')
-def get_db_probe(engine: DBEngineDependency, uuid: UUID = Path()) -> DBProbeResponse:
-    return DBProbeResponse(db_probe=probe_tracked_mitm_dataset_schema(engine, uuid))
+def get_db_probe(engine: DBEngineDependency, tracked_mitm_dataset: TrackedMitMDatasetDependency) -> DBProbeResponse:
+    return DBProbeResponse(db_probe=probe_tracked_mitm_dataset_schema(engine, tracked_mitm_dataset))
diff --git a/app/routes/definitions/generate.py b/app/routes/definitions/generate.py
index 1f2992eabb6fe8a2753d55b6bbc877dc9d187b95..2ffbbb0638b4e40d4946cb748b86ab1eadc92cea 100644
--- a/app/routes/definitions/generate.py
+++ b/app/routes/definitions/generate.py
@@ -1,53 +1,11 @@
-from collections.abc import Sequence
-from datetime import datetime
-
-from mitm_tooling.definition import MITM
-from mitm_tooling.representation import Header
-from mitm_tooling.transformation.superset import mk_superset_mitm_dataset_bundle, MAEDVisualizationType, \
-    mk_superset_datasource_bundle, VisualizationType
-from mitm_tooling.transformation.superset.asset_bundles import MitMDatasetIdentifierBundle, SupersetMitMDatasetBundle, \
-    SupersetDatasourceBundle, SupersetVisualizationBundle
-from mitm_tooling.transformation.superset.common import DBConnectionInfo
+from mitm_tooling.transformation.superset.asset_bundles import SupersetMitMDatasetBundle
 from mitm_tooling.transformation.superset.definitions import SupersetMitMDatasetImport, \
     MetadataType
 
 from app.db.models import TrackedMitMDataset
 from app.dependencies.db import ORMSession
 from .requests import GenerateIndependentMitMDatasetDefinitionRequest, GenerateVisualizationsRequest
-
-
-def get_default_visualization_types(mitm: MITM) -> list[VisualizationType]:
-    if mitm == MITM.MAED:
-        return [MAEDVisualizationType.Baseline]
-    else:
-        return []
-
-
-def mk_datasource_bundle(mitm_header: Header,
-                         db_conn_info: DBConnectionInfo,
-                         identifiers: MitMDatasetIdentifierBundle | None = None
-                         ) -> SupersetDatasourceBundle:
-    datasource_bundle = mk_superset_datasource_bundle(mitm_header,
-                                                      db_conn_info,
-                                                      identifiers)
-    return datasource_bundle
-
-
-def mk_mitm_dataset_bundle(mitm_header: Header,
-                           db_conn_info: DBConnectionInfo,
-                           dataset_name: str,
-                           identifiers: MitMDatasetIdentifierBundle | None = None,
-                           include_default_visualizations: bool = False,
-                           visualization_types: Sequence[VisualizationType] | None = None) -> SupersetMitMDatasetBundle:
-    mitm_dataset_bundle = mk_superset_mitm_dataset_bundle(mitm_header,
-                                                          db_conn_info,
-                                                          dataset_name,
-                                                          identifiers,
-                                                          visualization_types=(get_default_visualization_types(
-                                                              mitm_header.mitm) if include_default_visualizations else []) + (
-                                                                                      visualization_types or [])
-                                                          )
-    return mitm_dataset_bundle
+from app.logic.definitions import mk_mitm_dataset_bundle, track_visualizations
 
 
 def exec_def_request(request: GenerateIndependentMitMDatasetDefinitionRequest,
@@ -107,17 +65,6 @@ def exec_viz_request(orm_session: ORMSession,
     return mitm_dataset_bundle
 
 
-def track_visualizations(orm_session: ORMSession,
-                         tracked_dataset: TrackedMitMDataset,
-                         visualization_bundle: SupersetVisualizationBundle) -> TrackedMitMDataset:
-    viz_id_map = visualization_bundle.viz_identifier_map
-    tracked_dataset.identifier_bundle = tracked_dataset.identifier_bundle.with_visualizations(viz_id_map)
-    tracked_dataset.last_edited = datetime.now()
-    orm_session.commit()
-    orm_session.refresh(tracked_dataset)
-    return tracked_dataset
-
-
 def exec_viz_import_request(orm_session: ORMSession,
                             tracked_dataset: TrackedMitMDataset,
                             request: GenerateVisualizationsRequest) -> SupersetMitMDatasetImport:
diff --git a/app/routes/definitions/requests.py b/app/routes/definitions/requests.py
index f673d5d0e7b2f2fd7fef47d37f909fdacdcbc41b..688850765b66484fc9fd31502d070a79fd1cc147 100644
--- a/app/routes/definitions/requests.py
+++ b/app/routes/definitions/requests.py
@@ -1,5 +1,5 @@
 import pydantic
-from mitm_tooling.representation import Header
+from mitm_tooling.representation.intermediate import Header
 from mitm_tooling.transformation.superset import VisualizationType, MAEDVisualizationType
 from mitm_tooling.transformation.superset.asset_bundles import MitMDatasetIdentifierBundle
 from mitm_tooling.transformation.superset.common import DBConnectionInfo, MitMDatasetInfo
diff --git a/app/routes/mitm_dataset/append.py b/app/routes/mitm_dataset/append.py
deleted file mode 100644
index 9869f347d6e15e6b4a3543761b8a6dab98fdc8d1..0000000000000000000000000000000000000000
--- a/app/routes/mitm_dataset/append.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import logging
-from typing import Callable
-
-import sqlalchemy as sa
-from mitm_tooling.extraction.sql.mapping import Exportable
-from mitm_tooling.representation import SQLRepresentationSchema
-from mitm_tooling.transformation.sql.from_exportable import insert_exportable_instances
-from mitm_tooling.utilities.sql_utils import create_sa_engine
-from pydantic import AnyUrl
-
-from app.db.models import TrackedMitMDataset
-
-logger = logging.getLogger(__name__)
-
-
-def append_exportable(source: AnyUrl, exportable: Exportable, tracked_mitm_dataset: TrackedMitMDataset) -> tuple[
-    int, int]:
-    source_engine = create_sa_engine(source)
-
-    def instances_inserter(conn, sql_rep_schema):
-        return insert_exportable_instances(source_engine, exportable, conn, sql_rep_schema, stream_data=False)
-
-    return append_instances(instances_inserter, tracked_mitm_dataset)
-
-
-def append_instances(
-        instances_inserter: Callable[[sa.Connection, SQLRepresentationSchema], tuple[int, int]],
-        tracked_mitm_dataset: TrackedMitMDataset,
-) -> tuple[int, int]:
-    sql_rep_schema = tracked_mitm_dataset.sql_rep_schema
-    target_engine = create_sa_engine(tracked_mitm_dataset.sql_alchemy_uri)
-
-    with target_engine.begin() as conn:
-        ii, ir = instances_inserter(conn, sql_rep_schema)
-        logger.info(f'Appended MitM Data into schema: {tracked_mitm_dataset.schema_name}')
-        conn.commit()
-
-    return ii, ir
diff --git a/app/routes/mitm_dataset/external/__init__.py b/app/routes/mitm_dataset/external/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/app/routes/mitm_dataset/external/requests.py b/app/routes/mitm_dataset/external/requests.py
new file mode 100644
index 0000000000000000000000000000000000000000..d58dfcf4da8a6150e908225ee5026d4eb5457984
--- /dev/null
+++ b/app/routes/mitm_dataset/external/requests.py
@@ -0,0 +1,13 @@
+from uuid import UUID
+
+import pydantic
+from mitm_tooling.representation.sql import SchemaName
+from mitm_tooling.transformation.superset.common import DBConnectionInfo
+from pydantic import AnyUrl
+
+from app.db.models import MappedDB
+from app.db.models.tracked_mitm_dataset import AddExternalMitMDataset
+
+
+class RegisterExternalMitMDatasetRequest(AddExternalMitMDataset):
+    pass
diff --git a/app/routes/mitm_dataset/external/responses.py b/app/routes/mitm_dataset/external/responses.py
new file mode 100644
index 0000000000000000000000000000000000000000..d7ae8826b4947d4af13af6861883f8ae4ca4191e
--- /dev/null
+++ b/app/routes/mitm_dataset/external/responses.py
@@ -0,0 +1,21 @@
+from datetime import datetime
+
+import pydantic
+from pydantic import BaseModel, ConfigDict
+
+from app.db.models import MappedDBSource, MappedDBPull, TrackedMappedMitMDataset
+from app.db.models.tracked_mitm_dataset import TrackedExternalMitMDataset
+from app.routes.mitm_dataset.responses import TrackMitMResponse
+
+
+class RegisterExternalMitMResponse(TrackMitMResponse):
+    pass
+
+
+class RefreshExternalMitMResponse(BaseModel):
+    model_config = ConfigDict(arbitrary_types_allowed=True)
+
+    tracked_mitm_dataset: TrackedExternalMitMDataset
+    time: datetime = pydantic.Field(default_factory=datetime.now)
+    instances_imported: int = pydantic.Field(default=0)
+    rows_created: int = pydantic.Field(default=0)
\ No newline at end of file
diff --git a/app/routes/mitm_dataset/external/router.py b/app/routes/mitm_dataset/external/router.py
new file mode 100644
index 0000000000000000000000000000000000000000..24d9d234a4a070870e200a1081532eed7b5cb2e9
--- /dev/null
+++ b/app/routes/mitm_dataset/external/router.py
@@ -0,0 +1,43 @@
+import logging
+
+from fastapi.routing import APIRouter
+
+from app.db.models import ListTrackedMitMDataset
+from app.db.models.tracked_mitm_dataset import TrackedExternalMitMDataset, GetExternalMitMDataset
+from app.dependencies.db import ORMSessionDependency
+from app.dependencies.orm import TrackedExternalMitMDatasetDependency, get_tracked_datasets
+from app.logic.refresh import pull_header, update_header
+from .requests import RegisterExternalMitMDatasetRequest
+from .responses import RegisterExternalMitMResponse
+
+router = APIRouter(prefix='/external', tags=['external'])
+logger = logging.getLogger(__name__)
+
+
+@router.post('/register')
+def register_external_mitm_dataset(session: ORMSessionDependency,
+                                   request: RegisterExternalMitMDatasetRequest) -> RegisterExternalMitMResponse:
+    from app.logic.register_external import register_external_mitm_dataset
+    external_tracked_mitm_dataset = register_external_mitm_dataset(session, request)
+    return RegisterExternalMitMResponse(status='success', tracked_mitm_dataset=external_tracked_mitm_dataset)
+
+
+@router.post('/refresh/{uuid}')
+def refresh_external_mitm_dataset(session: ORMSessionDependency,
+                             external_tracked_dataset: TrackedExternalMitMDatasetDependency) -> None:
+    inferred_header = pull_header(external_tracked_dataset)
+    update_header(session, external_tracked_dataset, inferred_header)
+    logger.info(
+        f'Refreshed external dataset {external_tracked_dataset.uuid} {external_tracked_dataset.dataset_name} @ {external_tracked_dataset.url}'
+    )
+
+
+@router.get('/{uuid}', response_model=GetExternalMitMDataset)
+def get_external_mitm_dataset(tracked_dataset: TrackedExternalMitMDatasetDependency) -> TrackedExternalMitMDataset:
+    return tracked_dataset
+
+
+@router.get('/', response_model=list[ListTrackedMitMDataset])
+def get_external_mitm_datasets(session: ORMSessionDependency) -> list[TrackedExternalMitMDataset]:
+    sequence = get_tracked_datasets(session, types={'external'})
+    return sequence
diff --git a/app/routes/mitm_dataset/mapped/__init__.py b/app/routes/mitm_dataset/mapped/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/app/routes/mitm_dataset/mapped/requests.py b/app/routes/mitm_dataset/mapped/requests.py
new file mode 100644
index 0000000000000000000000000000000000000000..4ab571e29f615c50ff908d88a05491394c545052
--- /dev/null
+++ b/app/routes/mitm_dataset/mapped/requests.py
@@ -0,0 +1,11 @@
+from uuid import UUID
+
+import pydantic
+from pydantic import AnyUrl
+
+from app.db.models import MappedDB
+from app.db.models.tracked_mitm_dataset import AddMappedMitMDataset
+
+
+class RegisterMappedMitMDatasetRequest(AddMappedMitMDataset):
+    pass
diff --git a/app/routes/mitm_dataset/mapped/responses.py b/app/routes/mitm_dataset/mapped/responses.py
new file mode 100644
index 0000000000000000000000000000000000000000..4f807dd27c23a87e1fb9d452ee1fbc41d9ca9045
--- /dev/null
+++ b/app/routes/mitm_dataset/mapped/responses.py
@@ -0,0 +1,19 @@
+from datetime import datetime
+
+import pydantic
+from pydantic import BaseModel
+
+from app.db.models import MappedDBSource, MappedDBPull, TrackedMappedMitMDataset
+from app.routes.mitm_dataset.responses import TrackMitMResponse
+
+
+class RegisterExternalMitMResponse(TrackMitMResponse):
+    mapped_db_source: MappedDBSource | None = None
+
+
+class MappedDBPullResponse(BaseModel):
+    tracked_mitm_dataset: TrackedMappedMitMDataset
+    mapped_db_source: MappedDBSource
+    time: datetime = pydantic.Field(default_factory=datetime.now)
+    instances_imported: int = pydantic.Field(default=0)
+    rows_created: int = pydantic.Field(default=0)
\ No newline at end of file
diff --git a/app/routes/mitm_dataset/mapped/router.py b/app/routes/mitm_dataset/mapped/router.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b2586edb867e8bc1186d4afdcce535c9e1c6a6a
--- /dev/null
+++ b/app/routes/mitm_dataset/mapped/router.py
@@ -0,0 +1,46 @@
+import logging
+
+from fastapi.routing import APIRouter
+
+from app.db.models import ListTrackedMitMDataset
+from app.db.models.tracked_mitm_dataset import TrackedExternalMitMDataset, GetMappedMitMDataset, \
+    TrackedMappedMitMDataset
+from app.dependencies.db import ORMSessionDependency
+from app.dependencies.orm import TrackedExternalMitMDatasetDependency, get_tracked_datasets
+from .requests import RegisterMappedMitMDatasetRequest
+from .responses import RegisterExternalMitMResponse, MappedDBPullResponse
+
+router = APIRouter(prefix='/mapped', tags=['mapped'])
+logger = logging.getLogger(__name__)
+
+
+@router.post('/register')
+def register_mapped_mitm_dataset(session: ORMSessionDependency,
+                                 request: RegisterMappedMitMDatasetRequest) -> RegisterExternalMitMResponse:
+    from app.logic.register_mapped import register_mapped_mitm_dataset
+    external_tracked_mitm_dataset, mapped_db_source = register_mapped_mitm_dataset(session, request)
+    return RegisterExternalMitMResponse(status='success', tracked_mitm_dataset=external_tracked_mitm_dataset,
+                                        mapped_db_source=mapped_db_source)
+
+
+@router.post('/pull/{uuid}')
+def pull_mapped_mitm_dataset(session: ORMSessionDependency,
+                             external_tracked_dataset: TrackedExternalMitMDatasetDependency) -> MappedDBPullResponse:
+    from app.logic.pull_mapped import pull_mapped_mitm_dataset
+    mapped_db_pull = pull_mapped_mitm_dataset(session, external_tracked_dataset)
+    return MappedDBPullResponse(mapped_db_source=mapped_db_pull.mapped_db_source,
+                                tracked_mitm_dataset=mapped_db_pull.mapped_mitm_dataset,
+                                time=mapped_db_pull.time_complete,
+                                instances_imported=mapped_db_pull.instances_imported,
+                                rows_created=mapped_db_pull.rows_created)
+
+
+@router.get('/{uuid}', response_model=GetMappedMitMDataset)
+def get_mapped_mitm_dataset(tracked_dataset: TrackedExternalMitMDatasetDependency) -> TrackedMappedMitMDataset:
+    return tracked_dataset
+
+
+@router.get('/', response_model=list[ListTrackedMitMDataset])
+def get_mapped_mitm_datasets(session: ORMSessionDependency) -> list[TrackedExternalMitMDataset]:
+    sequence = get_tracked_datasets(session, types={'mapped'})
+    return sequence
diff --git a/app/routes/mitm_dataset/refresh.py b/app/routes/mitm_dataset/refresh.py
deleted file mode 100644
index 84e5da3451d4fb68386493402bf703ef22fc3d12..0000000000000000000000000000000000000000
--- a/app/routes/mitm_dataset/refresh.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from abc import ABC
-from typing import Literal, Any
-
-import pydantic
-from mitm_tooling.data_types import MITMDataType
-from mitm_tooling.definition import ConceptName, TypeName
-from mitm_tooling.representation import Header, SQLRepresentationSchema, HeaderEntry
-from mitm_tooling.representation.intermediate.deltas import diff_header
-from pydantic import ConfigDict
-
-from app.db.models import TrackedMitMDataset
-
-
-
-def refresh(tracked_dataset: TrackedMitMDataset, new_header: Header):
-    old_header = tracked_dataset.mitm_header
-    header_delta = diff_header(old_header, new_header)
-
-    row_delta = ...
\ No newline at end of file
diff --git a/app/routes/mitm_dataset/register.py b/app/routes/mitm_dataset/register.py
deleted file mode 100644
index 811cbd6ed1d759f79f749271a70d26fe60128619..0000000000000000000000000000000000000000
--- a/app/routes/mitm_dataset/register.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from mitm_tooling.transformation.superset.asset_bundles import MitMDatasetIdentifierBundle
-from mitm_tooling.transformation.superset.common import DBConnectionInfo
-from mitm_tooling.transformation.superset.definitions import MitMDatasetIdentifier
-
-from app.db.models import TrackedMitMDataset, AddTrackedMitMDataset
-from app.dependencies.db import ORMSessionDependency
-from app.routes.definitions.generate import mk_mitm_dataset_bundle
-
-
-def register_mitm_dataset(session: ORMSessionDependency, add_model: AddTrackedMitMDataset) -> TrackedMitMDataset:
-    db_conn_info= DBConnectionInfo(sql_alchemy_uri=add_model.sql_alchemy_uri, schema_name=add_model.schema_name)
-    identifiers = MitMDatasetIdentifierBundle(mitm_dataset=MitMDatasetIdentifier(dataset_name=add_model.dataset_name, uuid=add_model.uuid))
-    definition = mk_mitm_dataset_bundle(add_model.mitm_header, db_conn_info, add_model.dataset_name, identifiers=identifiers)
-    identifier_bundle = definition.identifiers
-
-    model = TrackedMitMDataset.from_models(add_model, identifier_bundle=identifier_bundle)
-
-    session.add(model)
-    session.commit()
-    session.refresh(model)
-    return model
diff --git a/app/routes/mitm_dataset/register_external.py b/app/routes/mitm_dataset/register_external.py
deleted file mode 100644
index 8ea786f965631addbe054ab5b7e1b06da4c89817..0000000000000000000000000000000000000000
--- a/app/routes/mitm_dataset/register_external.py
+++ /dev/null
@@ -1,64 +0,0 @@
-from datetime import datetime
-
-import sqlalchemy as sa
-from mitm_tooling.definition import MITM
-from mitm_tooling.extraction.sql.data_models import VirtualDB, DBMetaInfo, SourceDBType, VirtualView, \
-    CompiledVirtualView
-from mitm_tooling.extraction.sql.db import connect_and_reflect
-from mitm_tooling.extraction.sql.mapping import ConceptMapping, MappingExport, Exportable
-from mitm_tooling.representation import Header, mk_sql_rep_schema
-from mitm_tooling.representation.sql_representation import mk_concept_table_name
-from mitm_tooling.transformation.sql.from_sql import db_engine_into_db_meta
-from mitm_tooling.utilities.python_utils import take_first
-from mitm_tooling.utilities.sql_utils import create_sa_engine
-from pydantic import AnyUrl, BaseModel
-from sqlalchemy.orm import Session
-
-from app.db.models.mapped_sources import MappedDBSource, MappedDBPull
-from app.db.models.tracked_mitm_dataset import ExternalTrackedMitMDataset
-from app.dependencies.db import ORMSessionDependency
-from app.routes.mitm_dataset.append import append_exportable
-from app.routes.mitm_dataset.mapped_db import mk_exportable
-from app.routes.mitm_dataset.requests import RegisterExternalMitMDatasetRequest
-from app.routes.mitm_dataset.responses import RegisterExternalMitMResponse
-from app.routes.mitm_dataset.upload import upload_exportable
-
-
-def register_external_mitm_dataset(
-        session: ORMSessionDependency,
-        request: RegisterExternalMitMDatasetRequest,
-) -> RegisterExternalMitMResponse:
-
-
-    remote_engine = create_sa_engine(request.sql_alchemy_uri)
-    exportable = mk_exportable(remote_engine, request.mapped_db)
-    # header = exportable.generate_header(remote_engine)
-
-    add_tracked_mitm_dataset = upload_exportable(request.sql_alchemy_uri, exportable, request.dataset_name, skip_instances=True)
-
-    mapped_db_source = MappedDBSource(sql_alchemy_uri=request.sql_alchemy_uri, mitm_mapping=request.mapped_db, mitm_header=add_tracked_mitm_dataset.mitm_header)
-    session.add(mapped_db_source)
-
-    external_tracked_mitm_dataset = ExternalTrackedMitMDataset.from_models(add_tracked_mitm_dataset, mapped_db_source=mapped_db_source)
-
-    session.add(external_tracked_mitm_dataset)
-
-    session.commit()
-    session.refresh(external_tracked_mitm_dataset)
-    session.refresh(mapped_db_source)
-    return RegisterExternalMitMResponse(tracked_mitm_dataset=external_tracked_mitm_dataset, mapped_db_source=mapped_db_source)
-
-def pull(session: ORMSessionDependency, external_tracked_mitm_dataset: ExternalTrackedMitMDataset):
-    db_source = external_tracked_mitm_dataset.mapped_db_source
-
-    remote_engine = create_sa_engine(db_source.sql_alchemy_uri)
-
-    exportable = mk_exportable(remote_engine, db_source.mitm_mapping)
-
-    ii, ir = append_exportable(db_source.sql_alchemy_uri, exportable, external_tracked_mitm_dataset)
-    external_tracked_mitm_dataset.last_edited = datetime.now()
-
-    pull = MappedDBPull(tracked_mitm_dataset_id=external_tracked_mitm_dataset.id, mapped_db_source_id=db_source.id, instances_imported=ii, rows_created=ir)
-
-    session.add(pull)
-    session.flush()
diff --git a/app/routes/mitm_dataset/requests.py b/app/routes/mitm_dataset/requests.py
index 5ad67111e48f5f7484aba4f799ade1c8d9f5a6df..f2faef27f48b0ea2e2f8f21d6cecb4fea8e05f4b 100644
--- a/app/routes/mitm_dataset/requests.py
+++ b/app/routes/mitm_dataset/requests.py
@@ -1,11 +1,8 @@
-from uuid import UUID
-
 import pydantic
-from mitm_tooling.representation import Header
+from mitm_tooling.representation.intermediate import Header
 from pydantic import AnyUrl
 
 from app.db.models import AddTrackedMitMDataset
-from app.db.models.mapped_sources import MappedDB
 
 
 class AddTrackedMitMDatasetRequest(AddTrackedMitMDataset):
@@ -18,10 +15,3 @@ class EditTrackedMitMDatasetRequest(pydantic.BaseModel):
     sql_alchemy_uri: AnyUrl
     mitm_header: Header
     is_managed_locally: bool
-
-
-class RegisterExternalMitMDatasetRequest(pydantic.BaseModel):
-    uuid: UUID | None = None
-    dataset_name: str
-    sql_alchemy_uri: AnyUrl
-    mapped_db: MappedDB
diff --git a/app/routes/mitm_dataset/responses.py b/app/routes/mitm_dataset/responses.py
index 3813a1c62dc2239dc22dc71944002e4d13cb0cb2..2b13fc4ce68b88454916a8967edaa12abd76dc91 100644
--- a/app/routes/mitm_dataset/responses.py
+++ b/app/routes/mitm_dataset/responses.py
@@ -1,17 +1,12 @@
-from abc import ABC
 from typing import Literal
-from uuid import UUID
 
 import pydantic
-from mitm_tooling.definition import MITM
-from pydantic import Field
 
-from app.db.models import TrackedMitMDataset, BaseTrackedMitMDataset, ListTrackedMitMDataset
-from app.db.models.mapped_sources import MappedDBSource
+from app.db.models import TrackedMitMDataset, ListTrackedMitMDataset
 
 
 class TrackMitMResponse(pydantic.BaseModel):
-    status: Literal['success', 'failure']
+    status: Literal['success', 'failure'] = 'failure'
     tracked_mitm_dataset: TrackedMitMDataset | None = None
     msg: str | None = None
 
@@ -20,8 +15,4 @@ class UploadMitMResponse(TrackMitMResponse):
     pass
 
 
-class RegisterExternalMitMResponse(TrackMitMResponse):
-    mapped_db_source: MappedDBSource | None = None
-
-
 MitMsListResponse = pydantic.TypeAdapter(list[ListTrackedMitMDataset])
diff --git a/app/routes/mitm_dataset/router.py b/app/routes/mitm_dataset/router.py
index fdbb2794f0fee30ebf2f2c2643ae2ee1c12f1374..ae7adf384404d07c1d98a2fc6371e33252f6fe51 100644
--- a/app/routes/mitm_dataset/router.py
+++ b/app/routes/mitm_dataset/router.py
@@ -12,16 +12,22 @@ from starlette.responses import StreamingResponse
 
 from app.db.models import TrackedMitMDataset, ListTrackedMitMDataset
 from app.dependencies.db import DBEngineDependency, ORMSessionDependency
-from app.dependencies.orm import TrackedMitMDatasetDependency
-from .export import export_via_mapping
-from .requests import AddTrackedMitMDatasetRequest, RegisterExternalMitMDatasetRequest, EditTrackedMitMDatasetRequest
-from .responses import UploadMitMResponse, RegisterExternalMitMResponse
-from .upload import upload_mitm_file
-from ...db.utils import mk_session
+from app.dependencies.orm import TrackedMitMDatasetDependency, get_tracked_datasets
+from app.logic.export import export_via_mapping
+from app.logic.register import register_mitm_dataset
+from app.logic.upload import upload_mitm_file
+from .requests import AddTrackedMitMDatasetRequest, EditTrackedMitMDatasetRequest
+from .responses import UploadMitMResponse
 
 router = APIRouter(prefix='/mitm_dataset', tags=['mitm_dataset'])
-logger = logging.getLogger(__name__)
 
+from .external.router import router as external_router
+from .mapped.router import router as mapped_router
+
+router.include_router(external_router)
+router.include_router(mapped_router)
+
+logger = logging.getLogger(__name__)
 
 @router.post('/upload')
 def upload_mitm_dataset(
@@ -32,7 +38,6 @@ def upload_mitm_dataset(
         mitm_zip: UploadFile = File(media_type='application/zip')) -> UploadMitMResponse:
     try:
         add_model = upload_mitm_file(mitm, mitm_zip.file, dataset_name=dataset_name, uuid=mk_uuid(), engine=engine)
-        from .register import register_mitm_dataset
         model = register_mitm_dataset(session, add_model)
 
         return UploadMitMResponse(status='success', tracked_mitm_dataset=model)
@@ -42,13 +47,6 @@ def upload_mitm_dataset(
         # return UploadMitMResponse(status='failure', msg=str(e))
 
 
-@router.post('/register')
-def register_external_mitm_dataset(session: ORMSessionDependency,
-                                   request: RegisterExternalMitMDatasetRequest) -> RegisterExternalMitMResponse:
-    from .register_external import register_external_mitm_dataset
-    return register_external_mitm_dataset(session, request)
-
-
 @router.post('/')
 def post_mitm_dataset(session: ORMSessionDependency,
                       new_mitm_dataset: AddTrackedMitMDatasetRequest) -> TrackedMitMDataset:
@@ -80,7 +78,7 @@ def get_mitm_dataset(tracked_dataset: TrackedMitMDatasetDependency) -> TrackedMi
 
 @router.get('/', response_model=list[ListTrackedMitMDataset])
 def get_mitm_datasets(session: ORMSessionDependency) -> Sequence[TrackedMitMDataset]:
-    sequence = session.exec(sqlmodel.select(TrackedMitMDataset)).all()
+    sequence = get_tracked_datasets(session)
     return sequence
 
 
@@ -103,11 +101,13 @@ def export_mitm_dataset(engine: DBEngineDependency,
                         tracked_dataset: TrackedMitMDatasetDependency,
                         use_streaming: bool = False) -> StreamingResponse:
     remote_engine, exportable = export_via_mapping(tracked_dataset)
-    with remote_engine.begin() as conn:
+    with remote_engine.connect() as conn:
         if use_streaming:
             ze = exportable.export_as_stream(conn)
             data = ze.iter_bytes()
         else:
             ze = exportable.export_to_memory(conn)
             data = ze.to_buffer()
-        return StreamingResponse(data, media_type='application/zip', headers={'Content-Disposition': 'attachment; filename=export.zip'})
+        return StreamingResponse(data,
+                                 media_type='application/zip',
+                                 headers={'Content-Disposition': 'attachment; filename=export.zip'})
diff --git a/docker-compose.yaml b/docker-compose.yaml
index 4a9570449ab47bdbe108980c56f7b95071255299..b5e76a3f41a0aced774361dd185818d42d677d63 100644
--- a/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -14,7 +14,7 @@ services:
     env_file: docker/.env
     depends_on:
       mitm-db:
-        condition: service_started
+        condition: service_healthy
     command: ["fastapi", "dev", "app/main.py", "--reload", "--host", "0.0.0.0", "--port", "8180"]
 
   mitm-db:
diff --git a/justfile b/justfile
index 82edd1518088e638e6e099c8912bc5886105279b..4e6f8cfd775e7d2872f0cc88c3ee8d5e3af6d506 100644
--- a/justfile
+++ b/justfile
@@ -12,6 +12,9 @@ sync:
 sync-mt:
     uv sync --upgrade-package mitm-tooling
 
+sy:
+    uv sync --no-install-package mitm-tooling
+
 requirements:
     uv export --no-hashes > requirements.txt
 
diff --git a/pyproject.toml b/pyproject.toml
index b385d94ee39375d845053638b54a56adbdde0583..7557cd86333fc49e5c54c549a40993e1fe6e26e4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -32,7 +32,7 @@ explicit = true
 
 [tool.uv.sources]
 # mitm-tooling = { path = "C:/Users/leah/PycharmProjects/mitm-tooling", editable = true }
-mitm-tooling = { git = "https://git-ce.rwth-aachen.de/machine-data/mitm-tooling.git", branch = "master" }
+mitm-tooling = { git = "https://git-ce.rwth-aachen.de/machine-data/mitm-tooling.git", branch = "develop" }
 
 [build-system]
 requires = ["hatchling"]
diff --git a/test/admin.http b/test/admin.http
index 2edaec2e36882e00e371f02eb4a372f1544e6f8e..b3d0e7de316201b44a5884c4bbc5c8ff0c17fd67 100644
--- a/test/admin.http
+++ b/test/admin.http
@@ -1,11 +1,11 @@
 ###
 
 # @name Drop MitM datasets
-POST http://localhost:{{port}}/admin/clear-db
+POST http://localhost:{{port}}/admin/drop-mitm-datasets
 
 ###
 
-# @name Drop MitM datasets
-POST http://localhost:{{port}}/admin/clear-db
+# @name Drop DB
+POST http://localhost:{{port}}/admin/drop-db
 
 ###
\ No newline at end of file
diff --git a/test/definitions.http b/test/definitions.http
index 8488787bf9da7fda51f6977401bb386026b21826..412e1dbbcc66460ea6f8876d0f488777bc6d5ee9 100644
--- a/test/definitions.http
+++ b/test/definitions.http
@@ -3,7 +3,7 @@ Accept: application/json
 
 ###
 
-GET http://localhost:{{port}}/definitions/mitm_dataset/{{uuid}}/import?include_visualizations=False
+GET http://localhost:{{port}}/definitions/mitm_dataset/{{uuid}}/import?include_visualizations=True
 Accept: application/json
 
 ###
diff --git a/test/http-client.env.json b/test/http-client.env.json
index a70cd6df712c2411f974707758f9591cf194f8d4..3f634b80b0c140a2b7ce768651429744ed9f9f58 100644
--- a/test/http-client.env.json
+++ b/test/http-client.env.json
@@ -1,15 +1,15 @@
 {
   "dev": {
     "port": "8181",
-    "uuid": "f41c6400-abe3-4367-9375-67a8f713d299"
+    "uuid": "d557ed4f-2d77-4d6c-abb6-2e86249b302b"
   },
   "docker": {
     "port": "8180",
-    "uuid": "8ff3bd53-a5d6-40fa-848b-d8b271c7043c"
+    "uuid": "b99e79e2-d2ed-412b-ba99-ed02e9fabe3b"
   },
   "superset": {
     "port": "8180",
-    "uuid":  "6ea60ebf-b3ec-4b64-988d-8dc4e2f48eca"
+    "uuid":  "dadbd5df-662e-4874-89be-d8d4ae7e3b0a"
   },
   "kubernetes": {
     "port": "8080",
diff --git a/test/test.py b/test/test.py
index 46ed140aa1f3b0ffe72e1200c78861584cd5a4e1..39946aa88b79c2d338ea113038d32b6e43b87aa9 100644
--- a/test/test.py
+++ b/test/test.py
@@ -1,4 +1,4 @@
-from mitm_tooling.transformation.superset.definition_bundles import SupersetMitMDatasetBundle
+from mitm_tooling.transformation.superset.asset_bundles import SupersetMitMDatasetBundle
 
 if __name__ == '__main__':
     with open('mitm_dataset_bundle.json') as f: