Skip to content
Snippets Groups Projects
Commit 554355b5 authored by Leah Tacke genannt Unterberg's avatar Leah Tacke genannt Unterberg
Browse files

slight update

parent 18c1b762
Branches
No related tags found
No related merge requests found
.gitignore
.idea
.pytest_cache
.venv
.git/
__pycache__/
**/__pycache__/
DockerfileConda
README.md
\ No newline at end of file
[server] [server]
maxUploadSize = 4096 maxUploadSize = 4096
[browser]
gatherUsageStats = false
\ No newline at end of file
FROM continuumio/miniconda3:latest as conda ARG WORKDIR="/app"
ENV PYTHONUNBUFFERED=1 FROM python:3.11 as builder
WORKDIR /app
# RUN git clone https://git-ce.rwth-aachen.de/leah.tgu/mdata_app.git . ARG WORKDIR
COPY .streamlit/ .streamlit/
COPY logic/ logic/
COPY pages/ pages/
COPY resources/ resources/
COPY environment.yml streamlit_app.py ./
FROM conda as builder ENV PYTHONFAULTHANDLER=1 \
PYTHONHASHSEED=random \
PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1
RUN conda env create -f "environment.yml" ENV PIP_DEFAULT_TIMEOUT=100 \
PIP_DISABLE_PIP_VERSION_CHECK=1 \
PIP_NO_CACHE_DIR=1 \
POETRY_VERSION=1.5
#RUN apk add build-base libffi-dev
FROM builder as final RUN pip install "poetry==$POETRY_VERSION" && poetry config virtualenvs.in-project true
WORKDIR ${WORKDIR}
ADD pyproject.toml poetry.lock ./
RUN poetry install --only=main --no-root
#RUN apt-get update && apt-get install -y \
# build-essential \
# curl \
# software-properties-common \
# git \
# python3-dev \
# libblas-dev \
# liblapack-dev \
# && rm -rf /var/lib/apt/lists/*
#RUN git clone --single-branch -b release https://git-ce.rwth-aachen.de/leah.tgu/mdata_app.git .
#COPY .streamlit/ .streamlit/
#COPY logic/ logic/
#COPY pages/ pages/
#COPY resources/ resources/
#COPY poetry.lock pyproject.toml streamlit_app.py ./
FROM python:3.11-slim as final
ARG WORKDIR
WORKDIR ${WORKDIR}
COPY --from=builder ${WORKDIR} .
ADD .streamlit/ .streamlit/
ADD logic/ logic/
ADD pages/ pages/
ADD resources/ resources/
ADD streamlit_app.py .streamlit ./
EXPOSE 8501 EXPOSE 8501
HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
SHELL ["conda", "run", "-n", "mdata-app", "/bin/bash", "-c"]
# CMD ["./docker-entrypoint.sh"] # CMD ["./docker-entrypoint.sh"]
ENTRYPOINT ["conda", "run", "--no-capture-output", "-n", "mdata-app", "streamlit", "run", "streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"] ENTRYPOINT ["./.venv/bin/python", "-m", "streamlit", "run", "streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
\ No newline at end of file
FROM continuumio/miniconda3:latest as conda FROM continuumio/miniconda3:latest as conda
ENV PYTHONFAULTHANDLER=1 \ ENV PYTHONUNBUFFERED=1
PYTHONHASHSEED=random \
PYTHONUNBUFFERED=1
WORKDIR /app WORKDIR /app
FROM conda as builder #RUN apt-get update && apt-get install -y \
# build-essential \
ENV PIP_DEFAULT_TIMEOUT=100 \ # curl \
PIP_DISABLE_PIP_VERSION_CHECK=1 \ # software-properties-common \
PIP_NO_CACHE_DIR=1 # git \
# python3-dev \
# libblas-dev \
RUN apt-get update && apt-get install -y \ # liblapack-dev \
build-essential \ # && rm -rf /var/lib/apt/lists/*
curl \
software-properties-common \ #RUN git clone --single-branch -b release https://git-ce.rwth-aachen.de/leah.tgu/mdata_app.git .
git \ COPY .streamlit/ .streamlit/
python3-dev \
libblas-dev \
liblapack-dev \
&& rm -rf /var/lib/apt/lists/*
# RUN git clone https://git-ce.rwth-aachen.de/leah.tgu/mdata_app.git .
COPY logic/ logic/ COPY logic/ logic/
COPY pages/ pages/ COPY pages/ pages/
COPY resources/ resources/ COPY resources/ resources/
COPY poetry.lock pyproject.toml environment.yml streamlit_app.py ./ COPY environment.yml streamlit_app.py ./
RUN conda env create -f "environment.yml" FROM conda as builder
RUN conda env create -f "environment.yml"
FROM builder as final FROM builder as final
COPY --from=builder /opt/venv venv
EXPOSE 8501 EXPOSE 8501
HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
......
FROM python:3.9-slim as base
ENV PYTHONFAULTHANDLER=1 \
PYTHONHASHSEED=random \
PYTHONUNBUFFERED=1
WORKDIR /app
FROM base as builder
ENV PIP_DEFAULT_TIMEOUT=100 \
PIP_DISABLE_PIP_VERSION_CHECK=1 \
PIP_NO_CACHE_DIR=1 \
POETRY_VERSION=1.4
RUN pip install "poetry==$POETRY_VERSION"
RUN apt-get update && apt-get install -y \
build-essential \
curl \
software-properties-common \
git \
python3-dev \
libblas-dev \
liblapack-dev \
&& rm -rf /var/lib/apt/lists/*
# RUN git clone https://git-ce.rwth-aachen.de/leah.tgu/mdata_app.git .
COPY .streamlit/ .streamlit/
COPY logic/ logic/
COPY pages/ pages/
COPY resources/ resources/
COPY poetry.lock pyproject.toml streamlit_app.py ./
RUN poetry config virtualenvs.in-project true && \
poetry install --only=main --no-root && \
poetry build
FROM base as final
COPY --from=builder /app/.venv ./.venv
COPY --from=builder /app/dist .
COPY docker-entrypoint.sh .
RUN ./.venv/bin/pip install *.whl
EXPOSE 8501
HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
# CMD ["./docker-entrypoint.sh"]
ENTRYPOINT ["streamlit", "run", "streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
\ No newline at end of file
name: mdata-app name: mdata-app
dependencies: dependencies:
- python=3.10 - python=3.11
- numpy - numpy
- matplotlib - matplotlib
- seaborn - seaborn
...@@ -11,4 +11,4 @@ dependencies: ...@@ -11,4 +11,4 @@ dependencies:
- pip: - pip:
- tsdownsample - tsdownsample
- streamlit - streamlit
- git+https://git-ce.rwth-aachen.de/leah.tgu/mdata@develop - git+https://git-ce.rwth-aachen.de/leah.tgu/mdata@master
\ No newline at end of file \ No newline at end of file
from collections.abc import Mapping
from enum import Enum from enum import Enum
import pandas as pd import pandas as pd
import streamlit as st import streamlit as st
from mdata.core import mdd from mdata.core import MDConcepts, MachineData, ObservationTypes
from mdata.core.factory import as_base
from mdata.core.protocols import TSSpec
from mdata.visualization import plotting, matplot from mdata.visualization import plotting, matplot
class Widgets(Enum): class Widgets(Enum):
Specifications = 'Specifications' Specifications = 'Specifications'
RawData = 'Raw Data' RawData = 'Raw Data'
...@@ -12,13 +16,17 @@ class Widgets(Enum): ...@@ -12,13 +16,17 @@ class Widgets(Enum):
Inspector = 'Inspector' Inspector = 'Inspector'
Dummy = 'Dummy' Dummy = 'Dummy'
available_widgets = [w for w in Widgets] available_widgets = [w for w in Widgets]
def generate_page(md: mdd.MachineData, widget):
def generate_page(md: MachineData, widget):
md = as_base(md)
c = st.columns(3) c = st.columns(3)
c[0].metric('#Observations', len(md.index_frame)) c[0].metric('#Observations', md.observation_count)
c[1].metric('#Event Types', len(md.event_series_types)) c[1].metric('#Event Types', len(md.event_specs))
c[2].metric('#Measurement Types', len(md.measurement_series_types)) c[2].metric('#Measurement Types', len(md.measurement_specs))
c = st.columns(2) c = st.columns(2)
cont = c[0].container() cont = c[0].container()
...@@ -30,11 +38,11 @@ def generate_page(md: mdd.MachineData, widget): ...@@ -30,11 +38,11 @@ def generate_page(md: mdd.MachineData, widget):
if widget == Widgets.Specifications: if widget == Widgets.Specifications:
st.markdown('## Events') st.markdown('## Events')
st.table(make_spec_df(md.event_series_types)) st.table(make_spec_df(md.event_specs))
st.markdown('## Measurements') st.markdown('## Measurements')
st.table(make_spec_df(md.measurement_series_types)) st.table(make_spec_df(md.measurement_specs))
fig = plotting.create_measurement_frequency_plot(md) #fig = plotting.create_measurement_frequency_plot(md)
st.plotly_chart(fig) #st.plotly_chart(fig)
elif widget == Widgets.RawData: elif widget == Widgets.RawData:
...@@ -43,13 +51,15 @@ def generate_page(md: mdd.MachineData, widget): ...@@ -43,13 +51,15 @@ def generate_page(md: mdd.MachineData, widget):
st.markdown('## Index') st.markdown('## Index')
st.write(md.index_frame) st.write(md.index_frame)
with c2: with c2:
selection = st.selectbox('Observation Type', [tsc.timeseries_type.type + ':' + tsc.timeseries_type.label for tsc in md.iter_all_timeseries()]) selection = st.selectbox('Observation Spec',
[tsc.timeseries_spec.type + ':' + tsc.timeseries_spec.label for tsc in
md.series_containers])
if selection is not None: if selection is not None:
label = selection[2:] label = selection[2:]
if selection[0] == 'E': if selection[0] == ObservationTypes.E:
st.write(md.get_event_series_collection(label).df) st.write(md.get_events(label).df)
if selection[0] == 'M': if selection[0] == ObservationTypes.M:
st.write(md.get_measurement_series_collection(label).df) st.write(md.get_measurements(label).df)
elif widget == Widgets.Overview: elif widget == Widgets.Overview:
f = plotting.create_overview_plot(md, downsample_to=5_000) f = plotting.create_overview_plot(md, downsample_to=5_000)
...@@ -57,35 +67,39 @@ def generate_page(md: mdd.MachineData, widget): ...@@ -57,35 +67,39 @@ def generate_page(md: mdd.MachineData, widget):
elif widget == Widgets.Inspector: elif widget == Widgets.Inspector:
c1, c2, c3 = st.columns(3) c1, c2, c3 = st.columns(3)
event_type_list = list(md.event_series_types.keys()) event_specs_list = list(md.event_series.keys())
measurement_type_list = list(md.measurement_series_types.keys()) measurement_spec_list = list(md.measurement_series.keys())
selected_measurement_type = c1.selectbox('Measurement Type', measurement_type_list) selected_measurement_spec = c1.selectbox('Measurement Spec', measurement_spec_list)
fs = list(md.measurement_series_types[selected_measurement_type].features) fs, object_list = [], []
object_list = list(md.measurement_series[selected_measurement_type].occurring_objects) if selected_measurement_spec is not None:
fs = list(md.measurement_specs[selected_measurement_spec].features)
object_list = list(md.measurement_series[selected_measurement_spec].objects)
measurement_feature_selection = c1.multiselect('Feature Selection', fs) measurement_feature_selection = c1.multiselect('Feature Selection', fs)
event_selection = c2.multiselect('Event Type Selection', event_type_list) event_selection = c2.multiselect('Event Spec Selection', event_specs_list)
plot_separately = c2.checkbox('plot separately', False) plot_separately = c2.checkbox('plot separately', False)
object_selection = c3.selectbox('Object Selection', object_list) object_selection = c3.selectbox('Object Selection', object_list)
f = plotting.create_timeseries_plot(md, measurement_type=selected_measurement_type, object=object_selection, f = plotting.create_timeseries_plot(md, measurement_spec=selected_measurement_spec, obj=object_selection,
events=event_selection, features=measurement_feature_selection, events=event_selection, features=measurement_feature_selection,
split_into_subplots=plot_separately) split_into_subplots=plot_separately)
st.plotly_chart(f, use_container_width=True) st.plotly_chart(f, use_container_width=True)
elif widget == Widgets.Dummy: elif widget == Widgets.Dummy:
measurement_type_list = list(md.measurement_series_types.keys()) measurement_spec_list = list(md.measurement_specs)
selected_measurement_type = st.selectbox('Measurement Type', measurement_type_list) selected_measurement_spec = st.selectbox('Measurement Spec', measurement_spec_list)
f = matplot.create_basic_stacked_subplots(md, measurement_type=selected_measurement_type) if selected_measurement_spec is not None:
f = matplot.create_basic_stacked_subplots(md, measurement_type=selected_measurement_spec)
st.write(f) st.write(f)
else: else:
st.write('Invalid Widget') st.write('Invalid Widget')
def make_spec_df(spec_types):
df = pd.DataFrame.from_dict({l: list(t.features) for l, t in spec_types.items()}, def make_spec_df(spec_types: Mapping[str, TSSpec]):
df = pd.DataFrame.from_dict({label: list(spec.features) for label, spec in spec_types.items()},
orient='index').replace(pd.NA, '') orient='index').replace(pd.NA, '')
df.columns = [f'f_{i}' for i in range(1, len(df.columns) + 1)] df.columns = [f'f_{i}' for i in range(1, len(df.columns) + 1)]
return df return df
...@@ -3,32 +3,32 @@ import os ...@@ -3,32 +3,32 @@ import os
import tempfile import tempfile
import zipfile import zipfile
import mdata.core.machine_data_def as mdd
import mdata.file_formats.csv.shared
import streamlit as st import streamlit as st
from mdata.core import MachineData, MDConcepts
from mdata.io import read_machine_data, write_machine_data, write_machine_data_h5
from mdata.file_formats.csv.shared import HeaderFileFormats
from mdata.file_formats.csv.exporting import write_machine_data_custom from mdata.file_formats.csv.exporting import write_machine_data_custom
from mdata.file_formats.hdf import write_machine_data_h5
st.title('Machine Data File Export & Conversions') st.title('Machine Data File Export & Conversions')
if 'uploaded_md' not in st.session_state: if 'uploaded_md' not in st.session_state:
st.write('No data has been uploaded yet') st.write('No data has been uploaded yet')
else: else:
md: mdd.MachineData = st.session_state['uploaded_md'] md: MachineData = st.session_state['uploaded_md']
st.header('File Format Exports') st.header('File Format Exports')
st.subheader('CSV') st.subheader('CSV')
header_format = st.selectbox('Header file type', mdata.file_formats.csv.shared.HeaderFileFormats) header_format = st.selectbox('Header file type', HeaderFileFormats.values)
do_csv_export = st.button('Convert to CSV') do_csv_export = st.button('Convert to CSV')
zip_download = st.empty() zip_download = st.empty()
header_download = st.empty() header_download = st.empty()
csv_download = st.empty() csv_download = st.empty()
st.subheader('HDF') st.subheader('HDF 5')
do_hdf_export = st.button('Convert to HDF') do_hdf_export = st.button('Convert to HDF 5')
hdf_download = st.empty() hdf_download = st.empty()
if do_csv_export: if do_csv_export:
......
...@@ -25,6 +25,7 @@ elif import_type == 'hdf': ...@@ -25,6 +25,7 @@ elif import_type == 'hdf':
if st.button('clear upload'): if st.button('clear upload'):
st.cache_data.clear() st.cache_data.clear()
if 'uploaded_md' in st.session_state:
del st.session_state['uploaded_md'] del st.session_state['uploaded_md']
......
Source diff could not be displayed: it is too large. Options to address this: view the blob.
...@@ -6,20 +6,23 @@ authors = ["Leah Tacke genannt Unterberg <leah.tgu@pads.rwth-aachen.de>"] ...@@ -6,20 +6,23 @@ authors = ["Leah Tacke genannt Unterberg <leah.tgu@pads.rwth-aachen.de>"]
packages = [{ include = "mdata_app" }] packages = [{ include = "mdata_app" }]
readme = "README.md" readme = "README.md"
license = "MIT" license = "MIT"
include = [".streamlit", "logic", "pages", "resources"]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.9,!=3.9.7" python = "^3.11"
streamlit = "^1.25"
cvxopt = "^1.3" cvxopt = "^1.3"
streamlit = "^1.23" # mdata = "^0.1.2"
# mdata = "^0.1.0" pyyaml = "*"
mdata = { path = "C:/Users/Leah/PycharmProjects/mdata", develop = true } mdata = { git = "https://git-ce.rwth-aachen.de/machine-data/mdata.git", branch = "master" }
# mdata = { git = "https://git-ce.rwth-aachen.de/leah.tgu/mdata.git", branch = "master" } #mdata = [
# { platform = "linux", git = "https://git-ce.rwth-aachen.de/leah.tgu/mdata.git", branch = "master" },
# { platform = "windows", path = "C:/Users/Leah/PycharmProjects/mdata", develop = true }
#]
seaborn = "^0.12.2" seaborn = "^0.12.2"
plotly = "^5.15" plotly = "^5.15"
tsdownsample = "^0.1.2" tsdownsample = "^0.1.2"
[build-system] [build-system]
requires = ["poetry-core"] requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api" build-backend = "poetry.core.masonry.api"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment