Skip to content
Snippets Groups Projects
Commit 72491429 authored by Leah Tacke genannt Unterberg's avatar Leah Tacke genannt Unterberg
Browse files

hacky update for current maed files

parent 1b2f9fa7
No related branches found
No related tags found
No related merge requests found
...@@ -3,8 +3,9 @@ from enum import Enum ...@@ -3,8 +3,9 @@ from enum import Enum
import pandas as pd import pandas as pd
import streamlit as st import streamlit as st
from mdata.core import MDConcepts, MachineData, ObservationTypes from mdata.core import ObservationKinds, MachineDataV2
from mdata.core.factory import as_base from mdata.core.factory import as_v2
from mdata.core.header import ObservationSpec
from mdata.core.protocols import TSSpec from mdata.core.protocols import TSSpec
from mdata.visualization import plotting, matplot from mdata.visualization import plotting, matplot
import plotly.express as px import plotly.express as px
...@@ -20,8 +21,8 @@ class Widgets(Enum): ...@@ -20,8 +21,8 @@ class Widgets(Enum):
available_widgets = [w for w in Widgets] available_widgets = [w for w in Widgets]
def generate_page(md: MachineData, widget): def generate_page(md: MachineDataV2, widget):
md = as_base(md) md = as_v2(md)
c = st.columns(3) c = st.columns(3)
c[0].metric('#Observations', md.observation_count) c[0].metric('#Observations', md.observation_count)
...@@ -30,10 +31,10 @@ def generate_page(md: MachineData, widget): ...@@ -30,10 +31,10 @@ def generate_page(md: MachineData, widget):
c = st.columns(2) c = st.columns(2)
cont = c[0].container() cont = c[0].container()
cont.text(md.index_frame['time'].min()) cont.text(md.observation_index['time'].min())
cont.caption('First Observation') cont.caption('First Observation')
cont = c[1].container() cont = c[1].container()
cont.text(md.index_frame['time'].max()) cont.text(md.observation_index['time'].max())
cont.caption('Last Observation') cont.caption('Last Observation')
if widget == Widgets.Specifications: if widget == Widgets.Specifications:
...@@ -49,16 +50,17 @@ def generate_page(md: MachineData, widget): ...@@ -49,16 +50,17 @@ def generate_page(md: MachineData, widget):
c1, c2 = st.columns(2) c1, c2 = st.columns(2)
with c1: with c1:
st.markdown('## Index') st.markdown('## Index')
st.write(md.index_frame) st.write(md.observation_index)
with c2: with c2:
selection = st.selectbox('Observation Spec', selection = st.selectbox('Observation Spec',
[tsc.timeseries_spec.type + ':' + tsc.timeseries_spec.label for tsc in [tsc.series_spec.kind + ':' + tsc.series_spec.type_name for tsc in
md.series_containers]) md.series_containers])
if selection is not None: if selection is not None:
label = selection[2:] label = selection[2:]
if selection[0] == ObservationTypes.E: if selection[0] == ObservationKinds.E:
st.write(md.get_events(label).df) st.write(md.get_events(label).df)
if selection[0] == ObservationTypes.M: if selection[0] == ObservationKinds.M:
st.write(md.get_measurements(label).df) st.write(md.get_measurements(label).df)
elif widget == Widgets.Overview: elif widget == Widgets.Overview:
...@@ -73,7 +75,7 @@ def generate_page(md: MachineData, widget): ...@@ -73,7 +75,7 @@ def generate_page(md: MachineData, widget):
fs, object_list = [], [] fs, object_list = [], []
if selected_measurement_spec is not None: if selected_measurement_spec is not None:
fs = list(md.measurement_specs[selected_measurement_spec].features) fs = list(md.measurement_specs[selected_measurement_spec].features)
object_list = list(md.measurement_series[selected_measurement_spec].objects) object_list = sorted(md.measurement_series[selected_measurement_spec].objects)
selected_feature = c1.multiselect('Feature Selection', fs) selected_feature = c1.multiselect('Feature Selection', fs)
event_selection = c2.multiselect('Event Spec Selection', event_specs_list) event_selection = c2.multiselect('Event Spec Selection', event_specs_list)
...@@ -97,6 +99,7 @@ def generate_page(md: MachineData, widget): ...@@ -97,6 +99,7 @@ def generate_page(md: MachineData, widget):
elif widget == Widgets.SegmentHists: elif widget == Widgets.SegmentHists:
c1, c2 = st.columns(2) c1, c2 = st.columns(2)
event_specs_list = list(md.event_series.keys()) event_specs_list = list(md.event_series.keys())
start_event = c1.selectbox('Event that marks segment start', event_specs_list) start_event = c1.selectbox('Event that marks segment start', event_specs_list)
end_event = c2.selectbox('Event that marks segment end', event_specs_list) end_event = c2.selectbox('Event that marks segment end', event_specs_list)
......
import mdata.file_formats.csv import mdata.io
import streamlit as st import streamlit as st
from logic.page_logic import generate_page, available_widgets from logic.page_logic import generate_page, available_widgets
...@@ -6,7 +6,7 @@ from logic.page_logic import generate_page, available_widgets ...@@ -6,7 +6,7 @@ from logic.page_logic import generate_page, available_widgets
@st.cache_data @st.cache_data
def load_data(): def load_data():
return mdata.file_formats.csv.read_machine_data('resources/mock_header.csv', 'resources/mock_data.csv') return mdata.io.read_machine_data_zip('resources/example.maed')
md = load_data() md = load_data()
......
import io
import os
import tempfile
import zipfile
import streamlit as st
from mdata.core import MachineData, MDConcepts
from mdata.io import read_machine_data, write_machine_data, write_machine_data_h5
from mdata.file_formats.csv.shared import HeaderFileFormats
from mdata.file_formats.csv.exporting import write_machine_data_custom
st.title('Machine Data File Export & Conversions')
if 'uploaded_md' not in st.session_state:
st.write('No data has been uploaded yet')
else:
md: MachineData = st.session_state['uploaded_md']
st.header('File Format Exports')
st.subheader('CSV')
header_format = st.selectbox('Header file type', HeaderFileFormats.values)
do_csv_export = st.button('Convert to CSV')
zip_download = st.empty()
header_download = st.empty()
csv_download = st.empty()
st.subheader('HDF 5')
do_hdf_export = st.button('Convert to HDF 5')
hdf_download = st.empty()
if do_csv_export:
with io.StringIO(newline='') as hf:
with io.StringIO(newline='') as df:
write_machine_data_custom(hf, df, md, header_format=header_format)
hf.seek(0)
df.seek(0)
with io.BytesIO() as buf:
with zipfile.ZipFile(buf, 'a', zipfile.ZIP_DEFLATED, False, compresslevel=6) as zippy:
zippy.writestr(f'converted_header.{header_format}', hf.getvalue().encode('utf-8'))
zippy.writestr('converted_data.csv', df.getvalue().encode('utf-8'))
with zip_download:
st.download_button('download', buf, 'converted.zip', mime='application/zip')
# header_download.download_button('download header', hf, f'converted_header.{header_format}',
# mime=f'text/{header_format}')
# csv_download.download_button('download csv file', df, 'converted_data.csv', mime=f'text/csv')
# with tempfile.NamedTemporaryFile('w+', newline='') as hf:
# with tempfile.NamedTemporaryFile('w+', newline='') as df:
elif do_hdf_export:
with tempfile.TemporaryDirectory() as dr:
path = os.path.join(dr, 'h5_workaround_conv.h5')
write_machine_data_h5(path, md)
hdf_download.download_button('download H5', path, 'converted.h5')
...@@ -2,10 +2,9 @@ import io ...@@ -2,10 +2,9 @@ import io
import os import os
import tempfile import tempfile
import mdata.core
import streamlit as st import streamlit as st
from mdata.file_formats.csv import read_machine_data from mdata.io import read_machine_data_zip
from mdata.file_formats.csv.shared import HeaderFormatLiterals
from mdata.file_formats.hdf import read_machine_data_h5
from streamlit.runtime.uploaded_file_manager import UploadedFile from streamlit.runtime.uploaded_file_manager import UploadedFile
from logic.switch_page import switch_page from logic.switch_page import switch_page
...@@ -13,59 +12,26 @@ from logic.switch_page import switch_page ...@@ -13,59 +12,26 @@ from logic.switch_page import switch_page
st.set_page_config(layout="wide") st.set_page_config(layout="wide")
st.title('Machine Data Upload') st.title('Machine Data Upload')
import_type = st.selectbox('Import Type', ['csv', 'hdf']) maed_file = st.file_uploader("Upload a .maed/.zip file", type=['.zip', '.maed'])
files = {}
if import_type == 'csv':
csv_header_upload = st.file_uploader("Upload a header csv, json or yaml file", type=['.csv', '.json', '.yaml'])
csv_data_upload = st.file_uploader("Upload a data csv file", type='.csv')
files['csv_header_upload'] = csv_header_upload
files['csv_data_upload'] = csv_data_upload
elif import_type == 'hdf':
hdf_upload = st.file_uploader('Upload an HDF file', type=['.hdf', '.h5'])
files['hdf_upload'] = hdf_upload
if st.button('clear upload'): if st.button('clear upload'):
st.cache_data.clear() st.cache_data.clear()
if 'uploaded_md' in st.session_state: if 'uploaded_md' in st.session_state:
del st.session_state['uploaded_md'] del st.session_state['uploaded_md']
def read_zip(zf: UploadedFile) -> mdata.core.MDV2:
def import_hdf(h5f): return read_machine_data_zip(zf.getvalue(), validity_checking=False)
with tempfile.TemporaryDirectory() as dr:
print(tempfile.tempdir)
path = os.path.join(dr, 'h5_workaround.h5')
with open(path, mode='wb') as f:
f.write(h5f.getbuffer())
md = read_machine_data_h5(path)
return md
# @st.experimental_memo
def import_csv(hf: UploadedFile, df: UploadedFile):
header_format: HeaderFormatLiterals = 'csv'
if 'csv' in hf.type:
header_format = 'csv'
elif 'json' in hf.type:
header_format = 'json'
elif 'octet-stream' in hf.type and ('yaml' in hf.name or 'yml' in hf.name):
header_format = 'yaml'
assert header_format is not None
return read_machine_data(hf.getvalue(), df.getvalue(), validity_checking=True, header_format=header_format)
@st.cache_data @st.cache_data
def attempt_import(import_type, **files): def attempt_import():
if import_type == 'csv' and files.get('csv_header_upload') is not None and files.get('csv_data_upload') is not None: if maed_file is not None:
csv_header_upload = files['csv_header_upload'] return read_zip(maed_file)
csv_data_upload = files['csv_data_upload']
return import_csv(csv_header_upload, csv_data_upload)
elif import_type == 'hdf' and files.get('hdf_upload') is not None:
hdf_upload = files['hdf_upload']
return import_hdf(hdf_upload)
md = attempt_import(import_type, **files) if maed_file is not None:
md = attempt_import()
else: md = None
# selected_widget = st.sidebar.selectbox('Widget', available_widgets, format_func=lambda w: w.value) # selected_widget = st.sidebar.selectbox('Widget', available_widgets, format_func=lambda w: w.value)
......
Source diff could not be displayed: it is too large. Options to address this: view the blob.
[tool.poetry] [tool.poetry]
name = "mdata-app" name = "mdata-app"
version = "0.1.0" version = "0.2.0"
description = "" description = ""
authors = ["Leah Tacke genannt Unterberg <leah.tgu@pads.rwth-aachen.de>"] authors = ["Leah Tacke genannt Unterberg <leah.tgu@pads.rwth-aachen.de>"]
packages = [{ include = "mdata_app" }] packages = [{ include = "mdata_app" }]
...@@ -9,18 +9,18 @@ license = "MIT" ...@@ -9,18 +9,18 @@ license = "MIT"
include = [".streamlit", "logic", "pages", "resources"] include = [".streamlit", "logic", "pages", "resources"]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.11" python = "^3.11,<3.13"
streamlit = "^1.27" streamlit = "^1.39"
cvxopt = "^1.3" cvxopt = "^1.3"
# mdata = "^0.1.2" mdata = "^0.3.7"
mdata = { git = "https://git-ce.rwth-aachen.de/machine-data/mdata.git", branch = "release" } # mdata = { git = "https://git-ce.rwth-aachen.de/machine-data/mdata.git", branch = "release" }
#mdata = [ #mdata = [
# { platform = "linux", git = "https://git-ce.rwth-aachen.de/leah.tgu/mdata.git", branch = "master" }, # { platform = "linux", git = "https://git-ce.rwth-aachen.de/leah.tgu/mdata.git", branch = "master" },
# { platform = "windows", path = "C:/Users/Leah/PycharmProjects/mdata", develop = true } # { platform = "windows", path = "C:/Users/Leah/PycharmProjects/mdata", develop = true }
#] #]
seaborn = "^0.13" seaborn = "^0.13"
plotly = "^5.17" plotly = "^5.17"
tsdownsample = "^0.1.2" # tsdownsample = "^0.1.2"
[build-system] [build-system]
requires = ["poetry-core"] requires = ["poetry-core"]
......
File added
time,object,type,label,f_1,f_2,f_3
2023-04-24T09:57:10.0,m_1,E,on
2023-04-24T09:57:15.0,m_1,M,source_A,50,6.9,4
2023-04-24T09:58:00.0,m_1,M,source_B,2,-3.5,90
2023-04-24T09:59:45.0,m_1,E,error,"exceptional behavior"
2023-04-24T10:00:00.0,m_1,M,source_B,1,-4,80
2023-04-24T10:00:15.0,m_1,M,source_A,50,7,-4
2023-04-24T10:01:10.0,m_1,E,error,"cascading exceptional behavior"
2023-04-24T10:02:00.0,m_1,M,source_B,.5,-4.5,75
2023-04-24T10:03:15.0,m_1,M,source_A,20,7.2,-40
2023-04-24T10:03:55.0,m_1,E,rapid_unplanned_disassembly,200
2023-04-24T10:04:00.0,m_1,M,source_B,NaN,NaN,NaN
2023-04-24T10:05:00.0,m_1,E,off
E,on
E,off
E,error,code
E,rapid_unplanned_disassembly,explosion_size
M,source_A,x,y,z
M,source_B,f,r,phi
File added
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment