Skip to content
Snippets Groups Projects
Commit eecb58b3 authored by Leah Tacke genannt Unterberg's avatar Leah Tacke genannt Unterberg
Browse files

hopefully fixed sql representation (views) and refactored superset asset definitions

parent 18683bf6
Branches
No related tags found
No related merge requests found
Showing
with 343 additions and 84 deletions
from collections.abc import Collection
from typing import TypedDict, Any
import pydantic
from sqlalchemy import MetaData, Engine, inspect
from ..data_models import TableMetaInfo
......@@ -12,7 +13,7 @@ class TableDoesNotExist(Exception):
pass
class AdditionalMeta(TypedDict):
class AdditionalMeta(pydantic.BaseModel):
default_schema: SchemaName
......
......@@ -8,3 +8,4 @@ from .df_representation import MITMDataFrames
from .file_representation import write_header_file, write_data_file, read_data_file, read_header_file
from .intermediate_representation import HeaderEntry, Header, MITMData, StreamingMITMData, StreamingConceptData
from .sql_representation import mk_sql_rep_schema, insert_mitm_data, mk_sqlite, SQLRepresentationSchema
from .sql_representation import TableName, SchemaName, QualifiedTableName, Queryable
......@@ -188,21 +188,38 @@ def _gen_denormalized_views(mitm: MITM, concept_tables: ConceptTablesDict, type_
TableName, Queryable], None, None]:
mitm_def = get_mitm_def(mitm)
for concept in mitm_def.main_concepts:
view_name = mk_concept_table_name(mitm, concept) + '_view'
for main_concept in mitm_def.main_concepts:
for concept in mitm_def.get_leafs(main_concept):
view_name = mk_concept_table_name(mitm, concept) + '_denormalized_view'
q = None
if (concept_t := concept_tables.get(concept)) is not None:
if has_type_tables(mitm, concept):
shared_cols = {c.name for c in concept_t.columns}
if concept_type_tables := type_tables.get(concept):
selections = []
for leaf_concept in mitm_def.get_leafs(concept):
if concept_type_tables := type_tables.get(leaf_concept):
col_sets = [{(c.name, str(c.type)) for c in t.columns} for t in concept_type_tables.values()]
shared_cols = set.intersection(*col_sets)
all_cols = set.union(*col_sets)
for type_name, type_t in concept_type_tables.items():
selection = (c if c.name in shared_cols else sa.label(_prefix_col_name(type_name, c.name), c)
for c in type_t.columns)
selection = []
for (col_name, col_type) in all_cols:
if (c := type_t.columns.get(col_name)) is not None and str(c.type) == col_type:
selection.append(c)
else:
selection.append(sa.null().label(col_name))
# selection = (c if (c.name, str(c.type)) in shared_cols else sa.label(_prefix_col_name(type_name, c.name), c)
# for c in type_t.columns)
selections.append(sa.select(*selection))
q = sa.union(*selections).subquery()
if selections:
q = sa.union_all(*selections).subquery()
else:
if (concept_t := concept_tables.get(concept)) is not None:
# base_cols = {(c.name, str(c.type)) for c in concept_t.columns}
q = sa.select(concept_t)
if q is not None:
yield view_name, q
......@@ -221,8 +238,9 @@ _view_generators: tuple[MitMDBViewsGenerator, ...] = (_gen_denormalized_views,)
def mk_sql_rep_schema(header: Header,
view_generators: Iterable[MitMDBViewsGenerator] | None = (_gen_denormalized_views,),
override_schema: SchemaName | None = None) -> SQLRepresentationSchema:
schema_name = override_schema if override_schema else SQL_REPRESENTATION_DEFAULT_SCHEMA
mitm_def = get_mitm_def(header.mitm)
meta = sa.MetaData(schema=override_schema if override_schema else SQL_REPRESENTATION_DEFAULT_SCHEMA)
meta = sa.MetaData(schema=schema_name)
concept_tables: ConceptTablesDict = {}
type_tables: ConceptTypeTablesDict = {}
......@@ -247,7 +265,7 @@ def mk_sql_rep_schema(header: Header,
mitm_def.resolve_foreign_types(concept).items() for name, dt in
resolved_fk.items()]
}, additional_column_generators=(_gen_within_concept_id_col,), schema_item_generators=(
_gen_unique_constraint, _gen_pk_constraint, _gen_index,), override_schema=override_schema)
_gen_unique_constraint, _gen_pk_constraint, _gen_index,), override_schema=schema_name)
concept_tables[concept] = t
for he in header.header_entries:
......@@ -276,7 +294,7 @@ def mk_sql_rep_schema(header: Header,
schema_item_generators=(
_gen_unique_constraint, _gen_pk_constraint, _gen_index,
_gen_foreign_key_constraints),
override_schema=override_schema)
override_schema=schema_name)
if he_concept not in type_tables:
type_tables[he_concept] = {}
......@@ -285,7 +303,7 @@ def mk_sql_rep_schema(header: Header,
if view_generators is not None:
for generator in view_generators:
for name, queryable in generator(header.mitm, concept_tables, type_tables):
views[name] = create_view(name, queryable, meta)
views[name] = create_view(name, queryable, meta) # TODO make `create_view` schema-aware and add `schema=schema_name`
return SQLRepresentationSchema(meta=meta, concept_tables=concept_tables, type_tables=type_tables, views=views)
......
from . import definitions, factories, mitm_specific
from . import exporting, from_sql, from_intermediate
from . import interface
from .exporting import write_superset_assets_def
from .interface import mk_superset_datasource_import, mk_superset_visualization_import, mk_superset_mitm_dataset_import
from .exporting import write_superset_import_as_zip
from .interface import mk_superset_datasource_bundle, mk_superset_visualization_bundle, mk_superset_mitm_dataset_bundle
......@@ -2,7 +2,7 @@ from uuid import UUID
import pydantic
import sqlalchemy as sa
from mitm_tooling.representation.sql.common import SchemaName
from mitm_tooling.representation import SchemaName
from mitm_tooling.representation.sql_representation import SQL_REPRESENTATION_DEFAULT_SCHEMA
from .definitions import StrUrl
from .factories.utils import mk_short_uuid_str
......@@ -22,9 +22,12 @@ class SupersetDBConnectionInfo(pydantic.BaseModel):
schema_name: SchemaName = SQL_REPRESENTATION_DEFAULT_SCHEMA
@property
def db_name_in_uri(self) -> AnyUrl | None:
if self.sql_alchemy_uri is not None:
return any_url_into_sa_url(self.sql_alchemy_uri).database
def sa_url(self) -> sa.URL:
return any_url_into_sa_url(self.sql_alchemy_uri)
@property
def db_name_in_uri(self) -> str:
return self.sa_url.database
@property
def db_name(self) -> str:
......@@ -35,8 +38,8 @@ class SupersetDBConnectionInfo(pydantic.BaseModel):
return dialect_cls_from_url(self.sql_alchemy_uri)
def name_plus_uuid(name: str, uuid: UUID | None = None) -> str:
return f'{name}-{mk_short_uuid_str(uuid)}'
def name_plus_uuid(name: str, uuid: UUID | None = None, sep: str = '-') -> str:
return f'{name}{sep}{mk_short_uuid_str(uuid)}'
def _mk_engine(arg: SQLiteFileOrEngine) -> sa.Engine:
......
......@@ -4,20 +4,20 @@ from typing import Any
import pydantic
from .definitions import SupersetDatabaseDef, SupersetMitMDatasetDef, \
SupersetChartDef, SupersetDashboardDef, BaseSupersetDefinition, SupersetAssetsDef, SupersetDatasetDef, \
ExtendedSupersetAssetsDef, SupersetDefFolder, DatasourceIdentifier
from .factories.assets import mk_assets, mk_extended_assets
from ...representation.sql.common import TableName
SupersetChartDef, SupersetDashboardDef, SupersetAssetsImport, SupersetDatasetDef, \
SupersetMitMDatasetImport, SupersetDefFolder, DatasourceIdentifier
from .factories.importable import mk_assets_import, mk_mitm_dataset_import
from ...representation import TableName
class SupersetAssetBundle(SupersetDefFolder, ABC):
@abstractmethod
def to_assets(self) -> SupersetAssetsDef | ExtendedSupersetAssetsDef:
def to_import(self) -> SupersetAssetsImport | SupersetMitMDatasetImport:
pass
@property
def folder_dict(self) -> dict[str, Any]:
return self.to_assets().folder_dict
return self.to_import().folder_dict
class SupersetDatasourceBundle(SupersetAssetBundle):
......@@ -28,16 +28,16 @@ class SupersetDatasourceBundle(SupersetAssetBundle):
def placeholder_dataset_identifiers(self) -> dict[TableName, DatasourceIdentifier]:
return {ds.table_name: DatasourceIdentifier(dataset_uuid=ds.uuid) for ds in self.datasets}
def to_assets(self) -> SupersetAssetsDef:
return mk_assets(databases=[self.database], datasets=self.datasets)
def to_import(self) -> SupersetAssetsImport:
return mk_assets_import(databases=[self.database], datasets=self.datasets)
class SupersetVisualizationBundle(SupersetAssetBundle):
charts: list[SupersetChartDef] = pydantic.Field(default_factory=list)
dashboards: list[SupersetDashboardDef] = pydantic.Field(default_factory=list)
def to_assets(self) -> SupersetAssetsDef:
return mk_assets(charts=self.charts, dashboards=self.dashboards)
def to_import(self) -> SupersetAssetsImport:
return mk_assets_import(charts=self.charts, dashboards=self.dashboards)
class SupersetMitMDatasetBundle(SupersetAssetBundle):
......@@ -45,9 +45,9 @@ class SupersetMitMDatasetBundle(SupersetAssetBundle):
datasource_bundle: SupersetDatasourceBundle
visualization_bundle: SupersetVisualizationBundle = pydantic.Field(default_factory=SupersetVisualizationBundle)
def to_assets(self) -> ExtendedSupersetAssetsDef:
base_assets = mk_assets(databases=[self.datasource_bundle.database],
def to_import(self) -> SupersetMitMDatasetImport:
base_assets = mk_assets_import(databases=[self.datasource_bundle.database],
datasets=self.datasource_bundle.datasets,
charts=self.visualization_bundle.charts,
dashboards=self.visualization_bundle.dashboards)
return mk_extended_assets(mitm_datasets=[self.mitm_dataset], base_assets=base_assets)
return mk_mitm_dataset_import(mitm_datasets=[self.mitm_dataset], base_assets=base_assets)
from .assets import *
from .charts import *
from .constants import *
from .core import *
from .database import *
from .dataset import *
from .chart import *
from .dashboard import *
from .importable import *
from .post_processing import *
from typing import Annotated
import pydantic
from . import SupersetDefFile, StrUUID, SupersetVizType, QueryContext, StrUrl
from .core import *
class ChartParams(FormData):
datasource: str | DatasourceIdentifier
viz_type: SupersetVizType
......@@ -72,3 +79,29 @@ class TimeSeriesLineParams(TimeSeriesChartParams):
opacity: float = 0.2
markerSize: int = 6
seriesType: str = 'line'
class SupersetChartDef(SupersetDefFile):
uuid: StrUUID
slice_name: str
viz_type: SupersetVizType
dataset_uuid: StrUUID
description: str | None = None
certified_by: str | None = None
certification_details: str | None = None
params: ChartParams | None = None
query_context: QueryContext | None = None
# query_context: Annotated[pydantic.Json | QueryContext | None, pydantic.PlainSerializer(
# lambda x: x.model_dump_json(by_alias=True, serialize_as_any=True, exclude_none=True) if isinstance(x,
# pydantic.BaseModel) else x,
# return_type=pydantic.Json), pydantic.Field(default=None)]
cache_timeout: int | None = None
version: str = '1.0.0'
is_managed_externally: bool = False
external_url: StrUrl | None = None
@property
def filename(self) -> str:
return f'{self.slice_name}_{self.dataset_uuid}'
from abc import ABC, abstractmethod
from datetime import datetime
from datetime import datetime
from enum import StrEnum, IntEnum
......@@ -190,3 +191,11 @@ class BaseSupersetDefinition(pydantic.BaseModel):
class FrozenSupersetDefinition(BaseSupersetDefinition):
model_config = ConfigDict(arbitrary_types_allowed=True, use_enum_values=True, populate_by_name=True, frozen=True)
class SupersetDefFile(BaseSupersetDefinition, ABC):
@property
@abstractmethod
def filename(self) -> str:
pass
from abc import ABC
from typing import Any
from mitm_tooling.representation import ColumnName
......@@ -13,7 +12,7 @@ class SupersetPostProcessing(pydantic.BaseModel, ABC):
class DatasourceIdentifier(FrozenSupersetDefinition):
id: SupersetId = '-1' # -1 as a placeholder
id: SupersetId = -1 # -1 as a placeholder
type: Literal['table', 'annotation'] = 'table'
dataset_uuid: StrUUID = pydantic.Field(exclude=True)
......@@ -175,12 +174,12 @@ class FormData(BaseSupersetDefinition):
class QueryContext(BaseSupersetDefinition):
model_config = pydantic.ConfigDict(arbitrary_types_allowed=True)
datasource: DatasourceIdentifier
queries: list[QueryObject] = pydantic.Field(default_factory=list)
form_data: FormData | None = pydantic.Field(default=None)
form_data: FormData | None = pydantic.Field(default=None, strict=False)
result_type: ChartDataResultType = ChartDataResultType.FULL
result_format: ChartDataResultFormat = ChartDataResultFormat.JSON
force: bool = False
custom_cache_timeout: int | None = None
......@@ -3,6 +3,7 @@ from typing import Literal
import pydantic
from . import SupersetDefFile, StrUUID, StrUrl
from .constants import StrUUID
DashboardInternalID = str
......@@ -111,3 +112,23 @@ class DashboardMetadata(pydantic.BaseModel):
color_scheme: str = 'blueToGreen'
cross_filters_enabled: bool = True
native_filter_configuration: list[NativeFilterConfig] = pydantic.Field(default_factory=list)
class SupersetDashboardDef(SupersetDefFile):
uuid: StrUUID
dashboard_title: str
position: DashboardPositionData
metadata: DashboardMetadata
description: str | None = None
css: str | None = None
slug: str | None = None
is_managed_externally: bool | None = False
external_url: StrUrl | None = None
certified_by: str | None = None
certification_details: str | None = None
published: bool | None = False
version: str = '1.0.0'
@property
def filename(self) -> str:
return f'{self.dashboard_title}_{self.uuid}'
from typing import Any
import pydantic
from mitm_tooling.transformation.superset.definitions import SupersetDefFile, StrUrl, StrUUID
class SupersetDatabaseDef(SupersetDefFile):
database_name: str
sqlalchemy_uri: StrUrl
uuid: StrUUID
# verbose_name : str | None = None
cache_timeout: str | None = None
expose_in_sqllab: bool = True
allow_run_async: bool = False
allow_ctas: bool = False
allow_cvas: bool = False
allow_dml: bool = False
allow_file_upload: bool = False
extra: dict[str, Any] = pydantic.Field(default_factory=lambda: {
'allows_virtual_table_explore': True
})
impersonate_user: bool = False
version: str = '1.0.0'
ssh_tunnel: None = None
@property
def filename(self):
return self.database_name
from typing import Any
import pydantic
from mitm_tooling.transformation.superset.definitions import SupersetDefFile, StrUUID, SupersetMetric, SupersetColumn
class SupersetDatasetDef(SupersetDefFile):
table_name: str
schema_name: str = pydantic.Field(alias='schema')
uuid: StrUUID
database_uuid: StrUUID
main_dttm_col: str | None = None
description: str | None = None
default_endpoint: str | None = None
offset: int = 0
cache_timeout: str | None = None
catalog: str | None = None
sql: str | None = None
params: Any = None
template_params: Any = None
filter_select_enabled: bool = True
fetch_values_predicate: str | None = None
extra: dict[str, Any] = pydantic.Field(default_factory=dict)
normalize_columns: bool = False
always_filter_main_dttm: bool = False
metrics: list[SupersetMetric] = pydantic.Field(default_factory=list)
columns: list[SupersetColumn] = pydantic.Field(default_factory=list)
version: str = '1.0.0'
@property
def filename(self):
return self.table_name
from abc import abstractmethod
from collections import defaultdict
from datetime import UTC
from mitm_tooling.definition import MITM
from .charts import *
from .dashboard import DashboardPositionData, DashboardMetadata
from .chart import *
from .dashboard import SupersetDashboardDef
from .database import SupersetDatabaseDef
from .dataset import SupersetDatasetDef
from .mitm_dataset import SupersetMitMDatasetDef
from .post_processing import *
......@@ -18,14 +19,6 @@ class MetadataType(StrEnum):
MitMDataset = 'MitMDataset'
class SupersetDefFile(BaseSupersetDefinition, ABC):
@property
@abstractmethod
def filename(self) -> str:
pass
class SupersetDefFolder(BaseSupersetDefinition, ABC):
@property
......@@ -35,8 +28,8 @@ class SupersetDefFolder(BaseSupersetDefinition, ABC):
class SupersetMetadataDef(SupersetDefFile):
type: MetadataType
version: str = '1.0.0'
type: MetadataType = MetadataType.SqlaTable
timestamp: StrDatetime = pydantic.Field(default_factory=lambda: datetime.now(UTC))
@property
......@@ -44,114 +37,7 @@ class SupersetMetadataDef(SupersetDefFile):
return 'metadata'
class SupersetDatabaseDef(SupersetDefFile):
database_name: str
sqlalchemy_uri: StrUrl
uuid: StrUUID
# verbose_name : str | None = None
cache_timeout: str | None = None
expose_in_sqllab: bool = True
allow_run_async: bool = False
allow_ctas: bool = False
allow_cvas: bool = False
allow_dml: bool = False
allow_file_upload: bool = False
extra: dict[str, Any] = pydantic.Field(default_factory=lambda: {
'allows_virtual_table_explore': True
})
impersonate_user: bool = False
version: str = '1.0.0'
ssh_tunnel: None = None
@property
def filename(self):
return self.database_name
class SupersetDatasetDef(SupersetDefFile):
table_name: str
schema_name: str = pydantic.Field(alias='schema')
uuid: StrUUID
database_uuid: StrUUID
main_dttm_col: str | None = None
description: str | None = None
default_endpoint: str | None = None
offset: int = 0
cache_timeout: str | None = None
catalog: str | None = None
sql: str | None = None
params: Any = None
template_params: Any = None
filter_select_enabled: bool = True
fetch_values_predicate: str | None = None
extra: dict[str, Any] = pydantic.Field(default_factory=dict)
normalize_columns: bool = False
always_filter_main_dttm: bool = False
metrics: list[SupersetMetric] = pydantic.Field(default_factory=list)
columns: list[SupersetColumn] = pydantic.Field(default_factory=list)
version: str = '1.0.0'
@property
def filename(self):
return self.table_name
class SupersetChartDef(SupersetDefFile):
uuid: StrUUID
slice_name: str
viz_type: SupersetVizType
dataset_uuid: StrUUID
description: str | None = None
certified_by: str | None = None
certification_details: str | None = None
params: ChartParams | None = None
query_context: Annotated[pydantic.Json | QueryContext | None, pydantic.PlainSerializer(
lambda x: x.model_dump_json(by_alias=True, exclude_none=True, serialize_as_any=True) if isinstance(x,
pydantic.BaseModel) else x,
return_type=pydantic.Json), pydantic.Field(default=None)]
cache_timeout: int | None = None
version: str = '1.0.0'
is_managed_externally: bool = False
external_url: StrUrl | None = None
@property
def filename(self) -> str:
return f'{self.slice_name}_{self.dataset_uuid}'
class SupersetDashboardDef(SupersetDefFile):
uuid: StrUUID
dashboard_title: str
position: DashboardPositionData
metadata: DashboardMetadata
description: str | None = None
css: str | None = None
slug: str | None = None
is_managed_externally: bool | None = False
external_url: StrUrl | None = None
certified_by: str | None = None
certification_details: str | None = None
published: bool | None = False
version: str = '1.0.0'
@property
def filename(self) -> str:
return f'{self.dashboard_title}_{self.uuid}'
class SupersetMitMDatasetDef(SupersetDefFile):
uuid: StrUUID
dataset_name: str
mitm: MITM
database_uuid: StrUUID
version: str = '1.0.0'
@property
def filename(self) -> str:
return self.dataset_name
class SupersetAssetsDef(SupersetDefFolder):
class SupersetAssetsImport(SupersetDefFolder):
databases: list[SupersetDatabaseDef] | None = None
datasets: list[SupersetDatasetDef] | None = None
charts: list[SupersetChartDef] | None = None
......@@ -177,13 +63,15 @@ class SupersetAssetsDef(SupersetDefFolder):
return {'my_import': folder_dict}
class ExtendedSupersetAssetsDef(SupersetDefFolder):
class SupersetMitMDatasetImport(SupersetDefFolder):
mitm_datasets: list[SupersetMitMDatasetDef] | None
base_assets: SupersetAssetsDef | None
base_assets: SupersetAssetsImport | None
metadata: SupersetMetadataDef = pydantic.Field(default_factory=lambda : SupersetMetadataDef(type=MetadataType.MitMDataset))
@property
def folder_dict(self) -> dict[str, Any]:
asset_folder_dict = self.base_assets.folder_dict if self.base_assets else {'my_import': {}}
asset_folder_dict['my_import']['.'] = self.metadata
dbs = {}
if self.base_assets.databases:
dbs = {db.uuid: db.database_name for db in self.base_assets.databases}
......
from mitm_tooling.definition import MITM
from mitm_tooling.representation import Header
from mitm_tooling.transformation.superset.definitions import SupersetDefFile, StrUUID, BaseSupersetDefinition, \
SupersetId
class RelatedTable(BaseSupersetDefinition):
table_id : SupersetId | None = None
table_uuid : StrUUID
class RelatedSlice(BaseSupersetDefinition):
slice_id : SupersetId | None = None
slice_uuid : StrUUID
class RelatedDashboard(BaseSupersetDefinition):
dashboard_id : SupersetId | None = None
dashboard_uuid : StrUUID
class SupersetMitMDatasetDef(SupersetDefFile):
uuid: StrUUID
dataset_name: str
mitm: MITM
mitm_header: Header | None = None
database_uuid: StrUUID
tables: list[RelatedTable] | None = None
slices: list[RelatedSlice] | None = None
dashboards: list[RelatedDashboard] | None = None
version: str = '1.0.0'
@property
def filename(self) -> str:
return self.dataset_name
......@@ -7,7 +7,7 @@ from mitm_tooling.utilities.io_utils import FilePath, ByteSink, use_bytes_io
from .definitions import SupersetDefFile, SupersetDefFolder
def write_superset_def_as_zip(target: ByteSink, superset_def: SupersetDefFolder):
def write_superset_import_as_zip(target: ByteSink, superset_def: SupersetDefFolder):
folder_structure = superset_def.folder_dict
with use_bytes_io(target, expected_file_ext='.zip', mode='wb', create_file_if_necessary=True) as f:
with zipfile.ZipFile(f, 'w', zipfile.ZIP_DEFLATED) as zf:
......@@ -16,8 +16,8 @@ def write_superset_def_as_zip(target: ByteSink, superset_def: SupersetDefFolder)
fn = f'{arg.filename}.yaml'
if prefix:
fn = os.path.join(prefix, fn)
dump = arg.model_dump(by_alias=True, mode='python', exclude_none=True, serialize_as_any=True)
s = yaml.dump(dump, default_flow_style=False)
dump = arg.model_dump(by_alias=True, mode='json', exclude_none=True, serialize_as_any=True)
s = yaml.safe_dump(dump, default_flow_style=False)
zf.writestr(fn, s)
# with zf.open(fn, 'w') as df:
......@@ -39,7 +39,3 @@ def write_superset_def_as_zip(target: ByteSink, superset_def: SupersetDefFolder)
mk_node(folder_content, prefix=path)
mk_node(folder_structure)
\ No newline at end of file
def write_superset_assets_def(output_path: FilePath, superset_def: SupersetDefFolder):
write_superset_def_as_zip(output_path, superset_def)
from ..definitions import SupersetMetadataDef, SupersetDatabaseDef, SupersetDashboardDef
from ..definitions.assets import MetadataType, SupersetAssetsDef, SupersetDatasetDef, SupersetChartDef, \
ExtendedSupersetAssetsDef, SupersetMitMDatasetDef
def mk_metadata(metadata_type: MetadataType) -> SupersetMetadataDef:
return SupersetMetadataDef(type=metadata_type)
def mk_assets(databases: list[SupersetDatabaseDef] = None,
datasets: list[SupersetDatasetDef] = None,
charts: list[SupersetChartDef] = None,
dashboards: list[SupersetDashboardDef] = None,
metadata_type: MetadataType | None = None) -> SupersetAssetsDef:
return SupersetAssetsDef(databases=databases, datasets=datasets, charts=charts, dashboards=dashboards,
metadata=SupersetMetadataDef(type=metadata_type or MetadataType.Asset))
def mk_extended_assets(mitm_datasets: list[SupersetMitMDatasetDef],
base_assets: SupersetAssetsDef) -> ExtendedSupersetAssetsDef:
return ExtendedSupersetAssetsDef(mitm_datasets=mitm_datasets, base_assets=base_assets)
from uuid import UUID
from .utils import mk_uuid
from ..definitions import SupersetChartDef, SupersetVizType, ChartParams, QueryContext
def mk_chart_def(name: str, dataset_uuid: UUID, viz_type: SupersetVizType, params: ChartParams,
query_context: QueryContext, uuid: UUID | None = None, **kwargs) -> SupersetChartDef:
return SupersetChartDef(
slice_name=name,
viz_type=viz_type,
dataset_uuid=dataset_uuid,
params=params,
query_context=query_context,
uuid=uuid or mk_uuid(),
**kwargs
)
from typing import Any
from uuid import UUID
from mitm_tooling.representation import ColumnName
from mitm_tooling.transformation.superset.definitions import SupersetDashboardDef, SupersetChartDef
from mitm_tooling.transformation.superset.definitions import SupersetChartDef
from mitm_tooling.transformation.superset.factories.utils import mk_uuid, mk_short_uuid_str
from ..definitions.dashboard import *
......@@ -38,9 +37,11 @@ def mk_dashboard_chart(chart_uuid: UUID, width: int, height: int, slice_name: st
return DashboardChart(id=id or f'CHART-{mk_short_uuid_str()}',
meta=ChartMeta(uuid=chart_uuid, width=width, height=height, sliceName=slice_name))
def chart_def_into_dashboard_chart(chart_def: SupersetChartDef, width: int, height: int) -> DashboardChart:
return mk_dashboard_chart(chart_uuid=chart_def.uuid, width=width, height=height)
def mk_dashboard_position_data(header_text: str, chart_grid: list[list[DashboardChart]]) -> DashboardPositionData:
row_ids = []
elements = {}
......
......@@ -5,10 +5,9 @@ import sqlalchemy as sa
from mitm_tooling.data_types import MITMDataType
from mitm_tooling.extraction.sql.data_models import TableMetaInfo
from mitm_tooling.transformation.superset.definitions import SupersetDatasetDef
from .core import mk_column, mk_metric
from .utils import mk_uuid
from ..definitions import SupersetAggregate
from ..definitions import SupersetAggregate, SupersetDatasetDef
def mk_dataset(tm: TableMetaInfo, database_uuid: UUID, dialect: sa.Dialect | None = None,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment