diff --git a/Dockerfile b/Dockerfile index 085e88034899f4407f8c1c2f1d7ef707bab45f44..ee699dc905bf65cddbfcc82d67024aa53eff0b42 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,33 +1,16 @@ FROM continuumio/miniconda3:latest as conda -ENV PYTHONFAULTHANDLER=1 \ - PYTHONHASHSEED=random \ - PYTHONUNBUFFERED=1 - +ENV PYTHONUNBUFFERED=1 WORKDIR /app -FROM conda as builder - -ENV PIP_DEFAULT_TIMEOUT=100 \ - PIP_DISABLE_PIP_VERSION_CHECK=1 \ - PIP_NO_CACHE_DIR=1 - - -RUN apt-get update && apt-get install -y \ - build-essential \ - curl \ - software-properties-common \ - git \ - python3-dev \ - libblas-dev \ - liblapack-dev \ - && rm -rf /var/lib/apt/lists/* - # RUN git clone https://git-ce.rwth-aachen.de/leah.tgu/mdata_app.git . +COPY .streamlit/ .streamlit/ COPY logic/ logic/ COPY pages/ pages/ COPY resources/ resources/ -COPY poetry.lock pyproject.toml environment.yml streamlit_app.py ./ +COPY environment.yml streamlit_app.py ./ + +FROM conda as builder RUN conda env create -f "environment.yml" diff --git a/DockerfileConda b/DockerfileConda new file mode 100644 index 0000000000000000000000000000000000000000..085e88034899f4407f8c1c2f1d7ef707bab45f44 --- /dev/null +++ b/DockerfileConda @@ -0,0 +1,43 @@ +FROM continuumio/miniconda3:latest as conda + +ENV PYTHONFAULTHANDLER=1 \ + PYTHONHASHSEED=random \ + PYTHONUNBUFFERED=1 + +WORKDIR /app + +FROM conda as builder + +ENV PIP_DEFAULT_TIMEOUT=100 \ + PIP_DISABLE_PIP_VERSION_CHECK=1 \ + PIP_NO_CACHE_DIR=1 + + +RUN apt-get update && apt-get install -y \ + build-essential \ + curl \ + software-properties-common \ + git \ + python3-dev \ + libblas-dev \ + liblapack-dev \ + && rm -rf /var/lib/apt/lists/* + +# RUN git clone https://git-ce.rwth-aachen.de/leah.tgu/mdata_app.git . +COPY logic/ logic/ +COPY pages/ pages/ +COPY resources/ resources/ +COPY poetry.lock pyproject.toml environment.yml streamlit_app.py ./ + +RUN conda env create -f "environment.yml" + + +FROM builder as final + +EXPOSE 8501 + +HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health + +SHELL ["conda", "run", "-n", "mdata-app", "/bin/bash", "-c"] +# CMD ["./docker-entrypoint.sh"] +ENTRYPOINT ["conda", "run", "--no-capture-output", "-n", "mdata-app", "streamlit", "run", "streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"] \ No newline at end of file diff --git a/DockerfilePoetry b/DockerfilePoetry index 8e7f555b383725c982421b302a6782e3b69203f0..a832853034e56b3daef41598d7cacb28174f5cd2 100644 --- a/DockerfilePoetry +++ b/DockerfilePoetry @@ -26,6 +26,7 @@ RUN apt-get update && apt-get install -y \ && rm -rf /var/lib/apt/lists/* # RUN git clone https://git-ce.rwth-aachen.de/leah.tgu/mdata_app.git . +COPY .streamlit/ .streamlit/ COPY logic/ logic/ COPY pages/ pages/ COPY resources/ resources/ diff --git a/pages/demo_page.py b/pages/demo.py similarity index 100% rename from pages/demo_page.py rename to pages/demo.py diff --git a/pages/export.py b/pages/export.py new file mode 100644 index 0000000000000000000000000000000000000000..4b2145a69ffff85111408e129fc64b2e54cb6f0f --- /dev/null +++ b/pages/export.py @@ -0,0 +1,59 @@ +import io +import os +import tempfile +import zipfile + +import mdata.core.machine_data_def as mdd +import mdata.file_formats.csv.shared +import streamlit as st +from mdata.file_formats.csv.exporting import write_machine_data_custom +from mdata.file_formats.hdf import write_machine_data_h5 + +st.title('Machine Data File Export & Conversions') + +if 'uploaded_md' not in st.session_state: + st.write('No data has been uploaded yet') +else: + md: mdd.MachineData = st.session_state['uploaded_md'] + + st.header('File Format Exports') + + st.subheader('CSV') + + header_format = st.selectbox('Header file type', mdata.file_formats.csv.shared.HeaderFileFormats) + do_csv_export = st.button('Convert to CSV') + + zip_download = st.empty() + header_download = st.empty() + csv_download = st.empty() + + st.subheader('HDF') + do_hdf_export = st.button('Convert to HDF') + hdf_download = st.empty() + + if do_csv_export: + with io.StringIO(newline='') as hf: + with io.StringIO(newline='') as df: + write_machine_data_custom(hf, df, md, header_format=header_format) + hf.seek(0) + df.seek(0) + with io.BytesIO() as buf: + with zipfile.ZipFile(buf, 'a', zipfile.ZIP_DEFLATED, False, compresslevel=6) as zippy: + zippy.writestr(f'converted_header.{header_format}', hf.getvalue().encode('utf-8')) + zippy.writestr('converted_data.csv', df.getvalue().encode('utf-8')) + + with zip_download: + st.download_button('download', buf, 'converted.zip', mime='application/zip') + # header_download.download_button('download header', hf, f'converted_header.{header_format}', + # mime=f'text/{header_format}') + # csv_download.download_button('download csv file', df, 'converted_data.csv', mime=f'text/csv') + + # with tempfile.NamedTemporaryFile('w+', newline='') as hf: + # with tempfile.NamedTemporaryFile('w+', newline='') as df: + + + elif do_hdf_export: + with tempfile.TemporaryDirectory() as dr: + path = os.path.join(dr, 'h5_workaround_conv.h5') + write_machine_data_h5(path, md) + hdf_download.download_button('download H5', path, 'converted.h5') diff --git a/pages/export_page.py b/pages/export_page.py deleted file mode 100644 index 8f7f3598fa822e80e5b25bc7ae76a16c4143c44d..0000000000000000000000000000000000000000 --- a/pages/export_page.py +++ /dev/null @@ -1,45 +0,0 @@ -import tempfile - -import mdata.core.machine_data_def as mdd -import mdata.file_formats.csv.shared -import streamlit as st -from mdata.file_formats.csv.exporting import write_machine_data_custom -from mdata.file_formats.hdf import write_machine_data_h5 - -st.title('Machine Data File Export & Conversions') - -if 'uploaded_md' not in st.session_state: - st.write('No data has been uploaded yet') -else: - md: mdd.MachineData = st.session_state['uploaded_md'] - - with st.container(): - st.header('File Format Exports') - - st.subheader('CSV') - - header_format = st.selectbox('Header file type', mdata.file_formats.csv.shared.HeaderFileFormats) - do_csv_export = st.button('Convert to CSV') - - header_download = st.empty() - csv_download = st.empty() - - st.subheader('HDF') - do_hdf_export = st.button('Convert to HDF') - hdf_download = st.empty() - - if do_csv_export: - with tempfile.NamedTemporaryFile('w+', newline='') as hf: - with tempfile.NamedTemporaryFile('w+', newline='') as df: - write_machine_data_custom(hf, df, md, header_format=header_format) - hf.seek(0) - df.seek(0) - header_download.download_button('download header', hf.file, f'converted_header.{header_format}', - mime=f'text/{header_format}') - csv_download.download_button('download csv file', df.file, 'converted_data.csv', mime=f'text/csv') - - elif do_hdf_export: - with tempfile.NamedTemporaryFile('w+b') as hdf: - write_machine_data_h5(hdf.file, md) - hdf.seek(0) - hdf_download.download_button('download h5 file', hdf.file, 'converted.h5') diff --git a/pages/upload_page.py b/pages/upload.py similarity index 71% rename from pages/upload_page.py rename to pages/upload.py index feaf5e5804eb54a6c40ebc6b239c1ad48c01fcc5..05bf62363478be1315355f986adc55cf9baf7a45 100644 --- a/pages/upload_page.py +++ b/pages/upload.py @@ -1,13 +1,15 @@ +import io import os import tempfile + import streamlit as st from mdata.file_formats.csv import read_machine_data from mdata.file_formats.hdf import read_machine_data_h5 -from logic.page_logic import available_widgets, generate_page +from logic.switch_page import switch_page st.set_page_config(layout="wide") -st.title('Machine Data Viewer') +st.title('Machine Data Upload') import_type = st.selectbox('Import Type', ['csv', 'hdf']) @@ -21,19 +23,19 @@ elif import_type == 'hdf': hdf_upload = st.file_uploader('Upload an HDF file', type=['.hdf', '.h5']) files['hdf_upload'] = hdf_upload +if st.button('clear upload'): + st.cache_data.clear() + del st.session_state['uploaded_md'] -# @st.experimental_memo -def import_hdf(f): - with tempfile.NamedTemporaryFile() as fp: - fp.write(f.getbuffer()) - fp.seek(0) - md = read_machine_data_h5(fp.file) - return md - # fn = os.path.join('uploads', f.name) - # with open(fn, 'wb') as fd: - # fd.write(f.getbuffer()) - # fd.flush() - # return read_machine_data_h5(fn) + +def import_hdf(h5f): + with tempfile.TemporaryDirectory() as dr: + print(tempfile.tempdir) + path = os.path.join(dr, 'h5_workaround.h5') + with open(path, mode='wb') as f: + f.write(h5f.getbuffer()) + md = read_machine_data_h5(path) + return md # @st.experimental_memo @@ -57,10 +59,13 @@ def attempt_import(import_type, **files): hdf_upload = files['hdf_upload'] return import_hdf(hdf_upload) + md = attempt_import(import_type, **files) -selected_widget = st.sidebar.selectbox('Widget', available_widgets, format_func=lambda w: w.value) +# selected_widget = st.sidebar.selectbox('Widget', available_widgets, format_func=lambda w: w.value) if md is not None: st.session_state['uploaded_md'] = md - generate_page(md, selected_widget) + switch_page('visualization') + +# generate_page(md, selected_widget) diff --git a/pages/visualization.py b/pages/visualization.py new file mode 100644 index 0000000000000000000000000000000000000000..5aa0daab14eb03828cb6124abc5c93a1a6dd3e80 --- /dev/null +++ b/pages/visualization.py @@ -0,0 +1,12 @@ +import streamlit as st + +from logic.page_logic import available_widgets, generate_page + +st.set_page_config(layout="wide") +st.title('Machine Data Visualization') + +selected_widget = st.sidebar.selectbox('Widget', available_widgets, format_func=lambda w: w.value) + +md = st.session_state.get('uploaded_md') +if md is not None: + generate_page(md, selected_widget) diff --git a/streamlit_app.py b/streamlit_app.py index c5fbc3122c489a5b18e5986a712f2915d9a3e8ec..5270817e2cf0f0e79a1abd15304ff610d230efcd 100644 --- a/streamlit_app.py +++ b/streamlit_app.py @@ -6,12 +6,12 @@ st.set_page_config(layout="wide") st.title('Welcome to the Machine Data Demo Website') -got_to_demo = st.button('Got to Demo') -go_to_file_viewer = st.button('Got to File Viewer') -go_to_exporter = st.button('Got to Exporter') +got_to_demo = st.button('Go to Demo') +go_to_file_viewer = st.button('Go to File Upload') +go_to_exporter = st.button('Go to Export') if got_to_demo: - switch_page('demo_page') + switch_page('demo') elif go_to_file_viewer: - switch_page('upload_page') + switch_page('upload') elif go_to_exporter: - switch_page('export_page') + switch_page('export')