diff --git a/app/logic/upload.py b/app/logic/upload.py index 89b25ee4fd5a19d9da4b63d5c5397399c7366375..67f04cc0add587f0325777fe0dbb1f842ff966b1 100644 --- a/app/logic/upload.py +++ b/app/logic/upload.py @@ -32,16 +32,16 @@ def _skip_instances( return SQLRepInsertionResult(inserted_types=[], inserted_instances=0, inserted_rows=0) -def upload_mitm_file(mitm: MITM, +async def upload_mitm_file(mitm: MITM, mitm_zip: DataSource, dataset_name: str, uuid: UUID | None = None, engine: Engine = None) -> AddTrackedMitMDataset: mitm_data = read_zip(mitm_zip, mitm) - return upload_mitm_data(mitm_data, dataset_name, uuid=uuid, engine=engine) + return await upload_mitm_data(mitm_data, dataset_name, uuid=uuid, engine=engine) -def upload_mitm_data(mitm_data: MITMData, +async def upload_mitm_data(mitm_data: MITMData, dataset_name: str, uuid: UUID | None = None, engine: Engine = None, @@ -51,10 +51,10 @@ def upload_mitm_data(mitm_data: MITMData, else: get_instances = lambda: mitm_data_into_mitm_dataframes(mitm_data).typed_stream() - return upload_data(lambda: mitm_data.header, get_instances, dataset_name, uuid, engine) + return await upload_data(lambda: mitm_data.header, get_instances, dataset_name, uuid, engine) -def upload_exportable(source: AnyUrl, +async def upload_exportable(source: AnyUrl, exportable: Exportable, dataset_name: str, uuid: UUID | None = None, @@ -70,10 +70,10 @@ def upload_exportable(source: AnyUrl, def get_instances(): return exportable_to_typed_mitm_dataframes_stream(source_engine, exportable, stream_data=False) - return upload_data(get_header, get_instances, dataset_name, uuid, engine) + return await upload_data(get_header, get_instances, dataset_name, uuid, engine) -def upload_data(get_header: Callable[[], Header], +async def upload_data(get_header: Callable[[], Header], get_instances: Callable[[], TypedMitMDataFrameStream], dataset_name: str, uuid: UUID | None = None, @@ -88,7 +88,7 @@ def upload_data(get_header: Callable[[], Header], header = get_header() sql_rep_schema = mk_sql_rep_schema(header, override_schema=unique_schema_name, skip_fk_constraints=True) - with engine.begin() as connection: + with engine.connect() as connection: create_schema(connection, unique_schema_name) logger.info(f'Created schema: {unique_schema_name}') @@ -104,4 +104,4 @@ def upload_data(get_header: Callable[[], Header], dataset_name=dataset_name, schema_name=unique_schema_name, sql_alchemy_uri=sql_alchemy_uri, - mitm_header=header) + mitm_header=header) \ No newline at end of file diff --git a/app/routes/mitm_dataset/router.py b/app/routes/mitm_dataset/router.py index ae7adf384404d07c1d98a2fc6371e33252f6fe51..1c54a5ba3ae1c63a89c617f36e193ca763e1d48b 100644 --- a/app/routes/mitm_dataset/router.py +++ b/app/routes/mitm_dataset/router.py @@ -30,14 +30,14 @@ router.include_router(mapped_router) logger = logging.getLogger(__name__) @router.post('/upload') -def upload_mitm_dataset( +async def upload_mitm_dataset( session: ORMSessionDependency, engine: DBEngineDependency, dataset_name: str, mitm: MITM = MITM.MAED, mitm_zip: UploadFile = File(media_type='application/zip')) -> UploadMitMResponse: try: - add_model = upload_mitm_file(mitm, mitm_zip.file, dataset_name=dataset_name, uuid=mk_uuid(), engine=engine) + add_model = await upload_mitm_file(mitm, mitm_zip.file, dataset_name=dataset_name, uuid=mk_uuid(), engine=engine) model = register_mitm_dataset(session, add_model) return UploadMitMResponse(status='success', tracked_mitm_dataset=model) @@ -65,7 +65,7 @@ def put_mitm_dataset(session: ORMSessionDependency, tracked_dataset: TrackedMitMDatasetDependency, edited_mitm_dataset: EditTrackedMitMDatasetRequest) -> TrackedMitMDataset: tracked_dataset.sqlmodel_update(edited_mitm_dataset) - tracked_dataset.last_edited = datetime.now() + session.commit() session.refresh(tracked_dataset) return tracked_dataset @@ -97,7 +97,7 @@ def refresh_mitm_dataset(session: ORMSessionDependency, @router.post('/export/{uuid}', response_class=StreamingResponse, responses={200: {'content': {'application/zip': {}}}}) -def export_mitm_dataset(engine: DBEngineDependency, +async def export_mitm_dataset(engine: DBEngineDependency, tracked_dataset: TrackedMitMDatasetDependency, use_streaming: bool = False) -> StreamingResponse: remote_engine, exportable = export_via_mapping(tracked_dataset) diff --git a/helm/superset-mitm-service/Chart.yaml b/helm/superset-mitm-service/Chart.yaml index 815b9877181fd332a12d58e2b9f9e2febaf34d74..5189add212b5a35f43ee9ac60df55f291209fd45 100644 --- a/helm/superset-mitm-service/Chart.yaml +++ b/helm/superset-mitm-service/Chart.yaml @@ -1,6 +1,6 @@ apiVersion: v2 name: superset-mitm-service -description: A Helm chart for the superset-mitm-service that includes a timescale db. +description: A Helm chart for the superset-mitm-service that includes a postgres db. # A chart can be either an 'application' or a 'library' chart. # @@ -15,16 +15,16 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.1.3 +version: 0.1.4 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.1.1" +appVersion: "0.1.2" dependencies: - name: postgresql - version: 16.5.0 + version: 16.7.4 repository: https://charts.bitnami.com/bitnami alias: mitm-postgresql \ No newline at end of file diff --git a/helm/superset-mitm-service/templates/deployment.yaml b/helm/superset-mitm-service/templates/deployment.yaml index 85b8524afaf04d8fb8d49edbb6791222ddb2f654..c5baa37c2859b72c1f09ff482c7d787ddfe0fd8f 100644 --- a/helm/superset-mitm-service/templates/deployment.yaml +++ b/helm/superset-mitm-service/templates/deployment.yaml @@ -32,16 +32,22 @@ spec: securityContext: {{- toYaml . | nindent 8 }} {{- end }} + {{- with .Values.initContainers }} initContainers: - - name: wait-for-db - image: postgres:17-alpine - command: [ "sh", "-c" ] + {{- range . }} + - name: {{ .name }} + image: {{ .image }} + command: + {{- toYaml .command | nindent 12 }} args: - - | - until pg_isready -h {{ tpl .Values.mitmDB.mitm_database_host . }} -p {{ .Values.mitmDB.mitm_database_port }}; do - echo "Waiting for postgres DB..."; - sleep 2; - done + {{- toYaml .args | nindent 12 }} + env: + - name: MITM_DATABASE_HOST + value: {{ tpl $.Values.mitmDB.mitm_database_host $ | quote }} + - name: MITM_DATABASE_PORT + value: {{ $.Values.mitmDB.mitm_database_port | quote }} + {{- end }} + {{- end }} containers: - name: {{ .Chart.Name }} {{- with .Values.securityContext }} diff --git a/helm/superset-mitm-service/templates/tests/test-connection.yaml b/helm/superset-mitm-service/templates/tests/test-connection.yaml index 4a8e4c07d8dedd3ea12a498e9506118c011b21cf..46aba30f72992f0591c23a0a5244bc96f140653a 100644 --- a/helm/superset-mitm-service/templates/tests/test-connection.yaml +++ b/helm/superset-mitm-service/templates/tests/test-connection.yaml @@ -10,6 +10,6 @@ spec: containers: - name: wget image: busybox - command: ['wget'] - args: ['{{ include "superset-mitm-service.fullname" . }}:{{ .Values.service.port }}'] + command: [ 'wget' ] + args: [ '{{ include "superset-mitm-service.fullname" . }}:{{ .Values.service.port }}' ] restartPolicy: Never diff --git a/helm/superset-mitm-service/values.yaml b/helm/superset-mitm-service/values.yaml index 5ee29b532cd3f944f950acab5674da723d1ba9b7..5510ed0c49277773dbca1fd89a2535e936c9e13b 100644 --- a/helm/superset-mitm-service/values.yaml +++ b/helm/superset-mitm-service/values.yaml @@ -52,9 +52,9 @@ podSecurityContext: { } # fsGroup: 2000 securityContext: { } - # capabilities: - # drop: - # - ALL +# capabilities: +# drop: +# - ALL # readOnlyRootFilesystem: true # runAsNonRoot: true # runAsUser: 1000 @@ -67,13 +67,13 @@ service: port: 8180 resources: { } - # We usually recommend not to specify default resources and to leave this as a conscious - # choice for the user. This also increases chances charts run on environments with little - # resources, such as Minikube. If you do want to specify resources, uncomment the following - # lines, adjust them as necessary, and remove the curly braces after 'resources:'. - # limits: - # cpu: 100m - # memory: 128Mi +# We usually recommend not to specify default resources and to leave this as a conscious +# choice for the user. This also increases chances charts run on environments with little +# resources, such as Minikube. If you do want to specify resources, uncomment the following +# lines, adjust them as necessary, and remove the curly braces after 'resources:'. +# limits: +# cpu: 100m +# memory: 128Mi # requests: # cpu: 100m # memory: 128Mi @@ -125,15 +125,30 @@ volumes: # Additional volumeMounts on the output Deployment definition. volumeMounts: - - name: uploads - mountPath: /uploads - - name: exports - mountPath: /exports + - name: uploads + mountPath: /uploads + - name: exports + mountPath: /exports # - name: foo # mountPath: "/etc/foo" # readOnly: true +# Init containers configuration +initContainers: + - name: wait-for-db + image: postgres:17-alpine + command: + - sh + - -c + args: + - | + until pg_isready -h ${MITM_DATABASE_HOST} -p ${MITM_DATABASE_PORT}; do + echo "Waiting for postgres DB..."; + sleep 2; + done + + nodeSelector: { } tolerations: [ ] @@ -151,7 +166,9 @@ mitmDB: mitm_database_db: "mitm_db" connections: - origin: "http://localhost:8080" # can be overridden with a template string + # CORS origin for API requests + # can be overridden with a template string + origin: "http://localhost:8080" apiConfig: export_dir: "/exports/" @@ -175,8 +192,6 @@ mitm-postgresql: automountServiceAccountToken: true primary: - podAnnotations: - sidecar.istio.io/inject: "false" ## ## Persistent Volume Storage configuration. ## ref: https://kubernetes.io/docs/user-guide/persistent-volumes @@ -199,4 +214,4 @@ mitm-postgresql: # initScripts: # timescaledb: | - # CREATE EXTENSION IF NOT EXISTS timescaledb; \ No newline at end of file + # CREATE EXTENSION IF NOT EXISTS timescaledb; diff --git a/test/http-client.env.json b/test/http-client.env.json index 3f634b80b0c140a2b7ce768651429744ed9f9f58..0eb9cfda8e2afd597e4119c55ffad360c76dbbfc 100644 --- a/test/http-client.env.json +++ b/test/http-client.env.json @@ -11,8 +11,12 @@ "port": "8180", "uuid": "dadbd5df-662e-4874-89be-d8d4ae7e3b0a" }, - "kubernetes": { - "port": "8080", - "uuid": "b4004d6a-bcaa-4a48-aa54-271b074109ca" + "kube-superset": { + "port": "8880", + "uuid": "84a6c43e-9b1d-4f85-b905-7ea72b3e6022" + }, + "kube": { + "port": "8881", + "uuid": "84a6c43e-9b1d-4f85-b905-7ea72b3e6022" } } \ No newline at end of file