Skip to content
Snippets Groups Projects
Commit a636b194 authored by Felix Tomski's avatar Felix Tomski
Browse files

init

parents
Branches
No related tags found
No related merge requests found
Showing
with 1682 additions and 0 deletions
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
*/db.sqlite3
demo/staticfiles/*
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# dotenv
.env
# virtualenv
.venv
venv/
ENV/
TOKENS
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
# testing
.pytest_cache/
# emacs backup files
**/*~
FROM python:3.11.4-alpine
WORKDIR /usr/src/demo
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
RUN apk update
RUN apk upgrade
RUN apk add make automake gcc g++ subversion python3-dev gfortran
RUN pip install --upgrade pip
COPY ./requirements.txt .
RUN pip install -r requirements.txt
COPY . .
# Webapp for visualization of ReFrame results
## Start demo
Setup python environment:
```
python -m venv env
source env/bin/activate
pip install -r requirements.txt
```
Start server `./prepare_demo` to start server at 127.0.0.1:8000.
#!/usr/bin/env python
#
# Create a new superuser
from django.contrib.auth import get_user_model
UserModel = get_user_model()
name="admin"
password="admin"
# trying to get username
try:
UserModel.objects.get(username=name)
except:
su = UserModel.objects.create_user(name,password=password)
su.is_staff=True
su.is_superuser=True
su.save()
'''
Configure asgi server for serving asynchronous content such as websockets
Copyright (c) 2018 Gibbs Consulting and others - see CONTRIBUTIONS.md
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import os
import django
from channels.routing import get_default_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demo.settings")
django.setup()
application = get_default_application()
'''Dash demonstration application
TODO attribution here
'''
# The linter doesn't like the members of the html and dcc imports (as they are dynamic?)
#pylint: disable=no-member
import dash
from dash import dcc, html
import plotly.graph_objs as go
#import dpd_components as dpd
import numpy as np
from django_plotly_dash import DjangoDash
#from .urls import app_name
# app_name = "DPD demo application"
dashboard_name1 = 'dash_example_1'
dash_example1 = DjangoDash(name=dashboard_name1,
serve_locally=True,
# app_name=app_name
)
# Below is a random Dash app.
# I encountered no major problems in using Dash this way. I did encounter problems but it was because
# I was using e.g. Bootstrap inconsistenyly across the dash layout. Staying consistent worked fine for me.
dash_example1.layout = html.Div(id='main',
children=[
html.Div([dcc.Dropdown(id='my-dropdown1',
options=[{'label': 'New York City', 'value': 'NYC'},
{'label': 'Montreal', 'value': 'MTL'},
{'label': 'San Francisco', 'value': 'SF'}
],
value='NYC',
className='col-md-12',
),
html.Div(id='test-output-div')
]),
dcc.Dropdown(
id='my-dropdown2',
options=[
{'label': 'Oranges', 'value': 'Oranges'},
{'label': 'Plums', 'value': 'Plums'},
{'label': 'Peaches', 'value': 'Peaches'}
],
value='Oranges',
className='col-md-12',
),
html.Div(id='test-output-div2'),
html.Div(id='test-output-div3')
]) # end of 'main'
@dash_example1.expanded_callback(
dash.dependencies.Output('test-output-div', 'children'),
[dash.dependencies.Input('my-dropdown1', 'value')])
def callback_test(*args, **kwargs): #pylint: disable=unused-argument
'Callback to generate test data on each change of the dropdown'
# Creating a random Graph from a Plotly example:
N = 500
random_x = np.linspace(0, 1, N)
random_y = np.random.randn(N)
# Create a trace
trace = go.Scatter(x=random_x,
y=random_y)
data = [trace]
layout = dict(title='',
yaxis=dict(zeroline=False, title='Total Expense (£)',),
xaxis=dict(zeroline=False, title='Date', tickangle=0),
margin=dict(t=20, b=50, l=50, r=40),
height=350,
)
fig = dict(data=data, layout=layout)
line_graph = dcc.Graph(id='line-area-graph2', figure=fig, style={'display':'inline-block', 'width':'100%',
'height':'100%;'})
children = [line_graph]
return children
@dash_example1.expanded_callback(
dash.dependencies.Output('test-output-div2', 'children'),
[dash.dependencies.Input('my-dropdown2', 'value')])
def callback_test2(*args, **kwargs):
'Callback to exercise session functionality'
children = [html.Div(["You have selected %s." %(args[0])]),
html.Div(["The session context message is '%s'" %(kwargs['session_state']['django_to_dash_context'])])]
return children
@dash_example1.expanded_callback(
[dash.dependencies.Output('test-output-div3', 'children')],
[dash.dependencies.Input('my-dropdown1', 'value')])
def callback_test(*args, **kwargs): #pylint: disable=unused-argument
'Callback to generate test data on each change of the dropdown'
# Creating a random Graph from a Plotly example:
N = 500
random_x = np.linspace(0, 1, N)
random_y = np.random.randn(N)
# Create a trace
trace = go.Scatter(x=random_x,
y=random_y)
data = [trace]
layout = dict(title='',
yaxis=dict(zeroline=False, title='Total Expense (£)',),
xaxis=dict(zeroline=False, title='Date', tickangle=0),
margin=dict(t=20, b=50, l=50, r=40),
height=350,
)
fig = dict(data=data, layout=layout)
line_graph = dcc.Graph(id='line-area-graph2', figure=fig, style={'display':'inline-block', 'width':'100%',
'height':'100%;'})
children = [line_graph]
return [children]
import dash
from dash import Dash, Input, Output, html, callback, dcc
#import dpd_components as dpd
from django_plotly_dash import DjangoDash
from dash import dash_table
import dash_ag_grid as dag
from glob import glob
from os import path
import pandas as pd
import demo.report_to_csv as r2csv
import demo.dash_chaix_utility as ut
report_path=ut.DAILY_REPORT_DIR
df = pd.read_csv('/home/flx/cluster_testsuite/server/data/av_modules2.csv', sep=',')
#mask = df.applymap(type) != bool
#d = {True: 'TRUE', False: 'FALSE'}
#df = df.where(mask, df.replace(d))
#print(df)
dashboard_name = 'available-modules'
dash_available_modules = DjangoDash(name=dashboard_name,
serve_locally=True,
# app_name=app_name
)
#dash_available_modules.layout = html.Div(
# [
# html.Div(id='grid-container', children = [
# dash_table.DataTable(
# df.to_dict('records'),
# [{"name": i, "id": i} for i in df.columns]
# )
# ]),
# ]
#)
dash_available_modules.layout = html.Div(
[
dag.AgGrid(
id='av-mod-grid',
rowData=df.to_dict('records'),
columnDefs=list(df.columns),
# columnSize="autoSize",
# dashGridOptions={"domLayout": "autoHeight"},
# defaultColDef={"filter": True, "sortable": True, 'resizable': True},
)
]
)
#@overview_highlevel_daily.callback(
# Output('virtualRowData-grid', 'rowData'),
# Output('text-info', 'children'),
# Input('date', 'value'),
#)
#def update_graph(date):
# print(f'update_graph {date}')
# if not date:
# date = latest_date
# _df = summarize_result_by_testname(r2csv.report_to_df(ut.get_reports_for_period(date, date, report_path), perf=False))
# res = _df.to_dict(orient='records')
# return (res, f'Overview of daily check for {date}')
import dash
from dash import Dash, Input, Output, html, callback, dcc
#import dpd_components as dpd
from django_plotly_dash import DjangoDash
import dash_ag_grid as dag
from glob import glob
import demo.report_to_csv as r2csv
import demo.dash_chaix_utility as ut
report_path=ut.DAILY_REPORT_DIR
additional_col_confs = {
'test': {"tooltipField": 'test',
"tooltipComponentParams": { "color": '#ECEFF4' }},
'result': {"cellStyle": ut.GRID_CELL_STYLE},
}
column_filters = ('tag', 'runid', 'description')
df = ut.get_df_for_date(ut.get_avail_dates(report_path)[0], report_path)
dashboard_name = 'dash_chaix_overview'
dash_chaix_overview = DjangoDash(name=dashboard_name,
serve_locally=True,
# app_name=app_name
)
dash_chaix_overview.layout = html.Div(
[
dcc.Markdown("Overview of daily checks"),
dcc.DatePickerRange(
id='select-date',
start_date_placeholder_text="Start Period",
end_date_placeholder_text="End Period",
calendar_orientation='vertical',
),
html.Div(id='grid-container', children = [ut.get_grid(df, additional_col_confs, column_filters)]),
]
)
@dash_chaix_overview.callback(
Output('virtualRowData-grid', 'rowData'),
Input('select-date', 'start_date'),
Input('select-date', 'end_date'),
)
def update_graph(start_date, end_date):
if start_date and end_date:
return r2csv.report_to_df(ut.get_reports_for_period(start_date, end_date, f'{report_path}'), perf=False).to_dict(orient='records')
return df.to_dict(orient='records')
import dash
from dash import Dash, Input, Output, html, callback, dcc, State, dash_table
import plotly.graph_objs as go
import plotly.express as px
#import dpd_components as dpd
from django_plotly_dash import DjangoDash
import dash_ag_grid as dag
import pandas as pd
import base64
import string
import random
import pandas as pd
import demo.report_to_csv as r2csv
import os
from tempfile import TemporaryDirectory
#import plotly.io as pio
#pio.kaleido.scope.mathjax = None
defaultColDef = {
"flex": 1,
"minWidth": 150,
"sortable": True,
"resizable": True,
"filter": True,
}
cellStyle = {
"styleConditions": [
{
"condition": "params.data.result == 'success'",
"style": {"backgroundColor": "#A3BE8C"},
},
{
"condition": "params.data.result == 'performance'",
"style": {"backgroundColor": "#D08770", "color": "white"},
},
{
"condition": "params.data.result != 'success'",
"style": {"backgroundColor": "#BF616A", "color": "white"},
},
]
}
additional_col_confs = {
'test': {"tooltipField": 'test',
"tooltipComponentParams": { "color": '#ECEFF4' },
},
'testcase': { "checkboxSelection": True, "headerCheckboxSelection": True },
'result': {"cellStyle": cellStyle},
}
colum_filters = ('description')
def get_barplot(_df, testcase_name, perfvar):
if not testcase_name:
return px.bar(_df, x="perfvar", y="value", color="environment", pattern_shape='system', barmode="group")
dff = _df[_df['testcase'] == testcase_name]
if not perfvar:
perfvar = dff.perfvar.iloc[0]
dff = dff[dff['perfvar'] == perfvar]
fig = px.bar(dff, x="perfvar", y="value", color="environment", pattern_shape='tag', barmode="group",
labels={'value': ("" if dff.unit.isnull().values.any() else dff.unit.iloc[0])},
width=1000,
title=testcase_name
)
return fig
def get_graphs(_df, idx, testcase):
return [dcc.Graph(id={"index": idx, 'name': 'graph'}, style={'display': 'block'},
figure=get_barplot(_df, testcase_name=testcase, perfvar=perfvar))
for perfvar in _df.perfvar.unique()]
def get_testcase_div(_df, idx):
testcase = _df.testcase.unique()[idx]
_df = _df[_df['testcase'] == testcase]
return html.Div(id={"index": idx, "name": "container"},
children=[dcc.Markdown(f'### {testcase}\n{_df.description.iloc[0]}', id={"index": idx, 'name': 'description'})] +
get_graphs(_df, idx, testcase)
)
def write_report_to_pdf(children, file):
with TemporaryDirectory() as tmpdir:
for graph_children in children:
for graph in graph_children['props']['children']:
if not 'props' in graph:
continue
if not 'figure' in graph['props']:
continue
hash = ''.join(random.choices(string.ascii_uppercase + string.digits, k=15))
pio.write_image(graph['props']['figure'], f'{tmpdir}/{hash}.pdf')
os.system(f'pdfunite {tmpdir}/*.pdf {file}')
def get_grid(_df):
columns = [{'field': i, **additional_col_confs.get(i, {})} for i in _df.columns if i not in colum_filters]
return dag.AgGrid(
id="selection-checkbox-grid",
columnDefs=columns,
rowData=_df.to_dict("records"),
defaultColDef=defaultColDef,
dashGridOptions={"rowSelection":"multiple", "domLayout":"autoHeight"},
csvExportParams={ "fileName": "ag_grid_test.csv", },
)
def get_diff_grid(df1, df2):
# cols = [col for col in r2csv.CSV_PERF_HEADER+r2csv.CSV_HEADER_POSTFIX if col not in ('value', 'result', 'date', 'runid', 'jobid')]
to_drop = ['date', 'tag', 'tags', 'description', 'result']
_df = pd.merge(df1.drop(columns=to_drop), df2.drop(columns=to_drop), on=['system', 'partition', 'environment', 'test', 'testcase', 'perfvar', 'unit', 'reference', 'thres_lower', 'thres_upper'])
_df['value_diff'] = _df['value_y'] - _df['value_x']
_df['rel_diff'] = (_df['value_diff'] / _df['value_x'] * 100)
rearrange_cols = ['system', 'partition', 'environment', 'test', 'testcase', 'perfvar', 'unit', 'value_x', 'value_y', 'rel_diff', 'value_diff']
return _df[rearrange_cols + list(set(_df.columns.tolist()) - set(rearrange_cols))]
def parse_contents(contents, filenames, dates, tag='user'):
content_strs = []
for content, filename in zip(contents, filenames):
if 'json' in filename:
_, content_string = content.split(',')
content_strs.append(base64.b64decode(content_string))
return r2csv.report_str_to_df(content_strs, perf=True, tag=tag)
dashboard_name = 'dash_chaix_perf_compare'
dash_chaix_perf_compare = DjangoDash(name=dashboard_name,
serve_locally=True,
# app_name=app_name
)
dash_chaix_perf_compare.layout = html.Div([
dcc.Markdown("Upload a performance report for comparison"),
dcc.Upload(
id='upload-data-base',
multiple=True,
children=html.Div([
'Drag and Drop or ',
html.A('Select Files')
]),
style={
'width': '100%',
'height': '60px',
'lineHeight': '60px',
'borderWidth': '1px',
'borderStyle': 'dashed',
'borderRadius': '5px',
'textAlign': 'center',
'margin': '10px'
},
# Allow multiple files to be uploaded
),
dcc.Upload(
id='upload-data-new',
multiple=True,
children=html.Div([
'Drag and Drop or ',
html.A('Select Files')
]),
style={
'width': '100%',
'height': '60px',
'lineHeight': '60px',
'borderWidth': '1px',
'borderStyle': 'dashed',
'borderRadius': '5px',
'textAlign': 'center',
'margin': '10px'
},
# Allow multiple files to be uploaded
),
html.Div(dcc.Input(id='pdf-name-input', type='text')),
html.Button('Save as PDF', id='save-as-pdf', n_clicks=0),
html.Div(id='save-feedback',
children='Enter a value and press submit'),
dcc.Checklist(["Plot"], [], id="plot-checklist", inline=True),
html.Button("Download CSV", id="csv-button", n_clicks=0),
html.Div(id='output-data-upload'),
html.Div(id='graph-container')
])
@dash_chaix_perf_compare.callback(
Output("selection-checkbox-grid", "exportDataAsCsv"),
Input("csv-button", "n_clicks"),
)
def export_data_as_csv(n_clicks):
print('Downloading')
if n_clicks:
return True
return False
@dash_chaix_perf_compare.callback(
Output('output-data-upload', 'children'),
Output('graph-container', 'children'),
Input('upload-data-base', 'contents'),
Input('upload-data-new', 'contents'),
Input('plot-checklist', 'value'),
State('upload-data-base', 'filename'),
State('upload-data-base', 'last_modified'),
State('upload-data-new', 'filename'),
State('upload-data-new', 'last_modified')
)
def update_output(list_of_contents, new_contents, enable_plots, list_of_names,
list_of_dates, new_names, new_dates):
if list_of_contents is None and new_contents is None:
return ([], [])
if list_of_contents and new_contents is None:
_df = parse_contents(list_of_contents, list_of_names, list_of_dates)
print(list(_df))
return (get_grid(_df),
[get_testcase_div(_df, idx) for idx in range(len(_df.testcase.unique()))] if enable_plots else []
)
elif list_of_contents and new_contents:
_df1 = parse_contents(list_of_contents, list_of_names, list_of_dates, 'old-kernel')
_df2 = parse_contents(new_contents, new_names, new_dates, 'new-kernel')
_df = pd.concat([_df1, _df2])
return (get_grid(get_diff_grid(_df1, _df2)),
[get_testcase_div(_df, idx) for idx in range(len(_df.testcase.unique()))] if enable_plots else []
)
@dash_chaix_perf_compare.callback(
Output('save-feedback', 'children'),
Input('save-as-pdf', 'n_clicks'),
Input('graph-container', 'children'),
State('pdf-name-input', 'value'),
)
def save_as_pdf(n_clicks, children, file_name):
if not file_name:
file_name = '/tmp/performance_comparison.pdf'
if not children:
return
write_report_to_pdf(children, file_name)
return f'Report saved to {file_name}'
import dash
from dash import Dash, Input, State, Output, html, callback, dcc
import plotly.graph_objs as go
import plotly.express as px
#import dpd_components as dpd
import numpy as np
from django_plotly_dash import DjangoDash
import dash_ag_grid as dag
import os
import demo.report_to_csv as r2csv
from glob import glob
import demo.dash_chaix_utility as ut
defaultColDef = {
"flex": 1,
"minWidth": 150,
"sortable": True,
"resizable": True,
"filter": True,
}
report_path=ut.DAILY_REPORT_DIR
init_report = os.path.join(report_path, os.listdir(report_path)[0])
df = r2csv.report_to_df(glob(f'{init_report}/**/*json', recursive=True), perf=True)
cellStyle = {
"styleConditions": [
{
"condition": "params.data.result == 'success'",
"style": {"backgroundColor": "#A3BE8C"},
},
{
"condition": "params.data.result == 'performance'",
"style": {"backgroundColor": "#D08770", "color": "white"},
},
{
"condition": "params.data.result != 'success'",
"style": {"backgroundColor": "#BF616A", "color": "white"},
},
]
}
additional_col_confs = {
'test': {"tooltipField": 'test',
"tooltipComponentParams": { "color": '#ECEFF4' },
},
'testcase': { "checkboxSelection": True, "headerCheckboxSelection": True },
'result': {"cellStyle": cellStyle},
}
colum_filters = ('description')
def get_barplot(_df, testcase_name, perfvar):
if not testcase_name:
return px.bar(_df, x="perfvar", y="value", color="environment", pattern_shape='system', barmode="group")
dff = df[df['testcase'] == testcase_name]
if not perfvar:
return px.bar(dff, x="perfvar", y="value", color="environment", pattern_shape='system', barmode="group",
labels={'value': dff.unit.iloc[0]}, title=testcase_name)
dff = dff[dff['perfvar'] == perfvar]
return px.bar(dff, x="perfvar", y="value", color="environment", pattern_shape='system', barmode="group",
labels={'value': dff.unit.iloc[0]}, title=testcase_name)
def get_testcase_div(_df, idx):
return html.Div(id={"index": idx, "name": "container"}, children=[
dcc.Markdown(_df.testcase.unique()[idx], id={"index": idx, 'name': 'description'}),
dcc.Graph(id={"index": idx, 'name': 'graph'}, style={'display': 'block'},
figure=get_barplot(testcase_name=_df.testcase.unique()[idx], perfvar=''))
])
def get_grid(_df):
columns = [{'field': i, **additional_col_confs.get(i, {})} for i in _df.columns if i not in colum_filters]
return dag.AgGrid(
id="selection-checkbox-grid",
columnDefs=columns,
rowData=_df.to_dict("records"),
defaultColDef=defaultColDef,
dashGridOptions={"rowSelection":"multiple"},
)
dashboard_name = 'dash_chaix_perf_overview'
dash_chaix_perf_overview = DjangoDash(name=dashboard_name,
serve_locally=True,
# app_name=app_name
)
dash_chaix_perf_overview.layout = html.Div([
dcc.Upload(
id='upload-data-base',
multiple=True,
children=html.Div([
'Drag and Drop or ',
html.A('Select Files')
]),
style={
'width': '100%',
'height': '60px',
'lineHeight': '60px',
'borderWidth': '1px',
'borderStyle': 'dashed',
'borderRadius': '5px',
'textAlign': 'center',
'margin': '10px'
},
# Allow multiple files to be uploaded
),
html.Div(id='output-data-upload'),
html.Div(id="selections-checkbox-output"),
html.Div(id='graph-container')
],
style={"margin": 20},
)
#@dash_chaix_perf_overview.callback(
# Output('output-data-upload', 'children'),
# Output('graph-container', 'children'),
# Input('upload-data-base', 'contents'),
# State('upload-data-base', 'filename'),
# State('upload-data-base', 'last_modified')
#)
#def update_graphs(contents, filename, last_modified):
# _df = parse_contents(list_of_contents, list_of_names, list_of_dates, 'old-kernel')
# return ([get_testcase_div(idx) for idx in range(len(df.testcase.unique()))]
# )
import dash
from dash import Dash, Input, Output, html, callback, dcc
import plotly.express as px
#import dpd_components as dpd
from django_plotly_dash import DjangoDash
import dash_ag_grid as dag
from glob import glob
import demo.report_to_csv as r2csv
import demo.dash_chaix_utility as ut
report_path=ut.DAILY_REPORT_DIR
df = r2csv.report_to_df(glob(f'{report_path}/**/*json', recursive=True), perf=True)
def get_perfvars(testcase):
return df[df['testcase'] == testcase]['perfvar']
def get_description(testcase):
return df[df['testcase'] == testcase].description.iloc[0]
dashboard_name = 'dash_chaix_perf_timeline'
dash_chaix_perf_timeline = DjangoDash(name=dashboard_name,
serve_locally=True,
# app_name=app_name
)
dash_chaix_perf_timeline.layout = html.Div(children=[
html.H1(
children='Performance over time',
style={
'textAlign': 'center',
}
),
html.Div([
dcc.Dropdown(
df['testcase'].unique(),
df.testcase.iloc[0],
id='crossfilter-testcase',
),
],
style={'width': '100%', 'display': 'inline-block'}),
html.Div([
dcc.Dropdown(
options=get_perfvars(df.testcase.iloc[0]).unique(),
value=get_perfvars(df.testcase.iloc[0]).iloc[0],
id='crossfilter-perfvar',
),
],
style={'width': '100%', 'display': 'inline-block'}),
dcc.DatePickerRange(
id='select-date',
start_date_placeholder_text="Start Period",
end_date_placeholder_text="End Period",
calendar_orientation='vertical',
),
html.Div(id='crossfilter-description', children=get_description(df.testcase.iloc[0]), style={
'textAlign': 'center',
}),
dcc.Graph(
id='example-graph-2',
figure=ut.get_timeline_plot(df, df.testcase.iloc[0], get_perfvars(df.testcase.iloc[0]).iloc[0],
df.date.iloc[-1], df.date.iloc[0])
)
])
@dash_chaix_perf_timeline.callback(
Output('example-graph-2', 'figure'),
Output('crossfilter-perfvar', 'options'),
Output('crossfilter-perfvar', 'value'),
Output('crossfilter-description', 'children'),
Input('crossfilter-testcase', 'value'),
Input('crossfilter-perfvar', 'value'),
Input('select-date', 'start_date'),
Input('select-date', 'end_date'),
)
def update_graph(testcase_name, perfvar, start_date, end_date):
perfopts = get_perfvars(testcase_name)
if not perfvar or perfvar not in perfopts.unique():
perfvar = perfopts.iloc[0]
return (ut.get_timeline_plot(df, testcase_name=testcase_name, perfvar=perfvar,
start_date=start_date, end_date=end_date),
perfopts.unique(),
perfvar,
get_description(testcase_name)
)
import dash
from dash import Dash, Input, Output, html, callback, dcc
#import dpd_components as dpd
from django_plotly_dash import DjangoDash
import dash_ag_grid as dag
from glob import glob
import demo.report_to_csv as r2csv
import demo.dash_chaix_utility as ut
report_path=ut.DAILY_REPORT_DIR
df = r2csv.report_to_df(glob(f'{report_path}/**/*json', recursive=True), perf=True)
def get_graphs(_df, idx, testcase, start_date, end_date):
return [dcc.Graph(id={"index": idx, 'name': 'graph'}, style={'display': 'block'},
figure=ut.get_timeline_plot(_df, testcase_name=testcase, perfvar=perfvar, start_date=start_date, end_date=end_date))
for perfvar in _df.perfvar.unique()]
def get_testcase_div(_df, idx, start_date=None, end_date=None):
testcase = _df.testcase.unique()[idx]
_df = _df[_df['testcase'] == testcase]
return html.Div(id={"index": idx, "name": "container"},
children=[dcc.Markdown(f'### {testcase}\n{_df.description.iloc[0]}', id={"index": idx, 'name': 'description'})] +
get_graphs(_df, idx, testcase, start_date, end_date)
)
dashboard_name = 'dash_chaix_perf_timeline_overview'
dash_chaix_perf_timeline_overview = DjangoDash(name=dashboard_name,
serve_locally=True,
# app_name=app_name
)
dash_chaix_perf_timeline_overview.layout = html.Div(children=[
html.H1(
children='Performance over time',
style={
'textAlign': 'center',
}
),
dcc.DatePickerRange(
id='select-date',
start_date_placeholder_text="Start Period",
end_date_placeholder_text="End Period",
calendar_orientation='vertical',
),
html.Div(id='graph-container', children = [get_testcase_div(df, idx) for idx in range(len(df.testcase.unique()))])
])
@dash_chaix_perf_timeline_overview.callback(
Output('graph-container', 'children'),
Input('select-date', 'start_date'),
Input('select-date', 'end_date'),
)
def update_graph(start_date, end_date):
_df = ut.get_time_filtered_df(df, start_date, end_date)
return [get_testcase_div(_df, idx, start_date=start_date, end_date=end_date) for idx in range(len(_df.testcase.unique()))]
import dash
from dash import Dash, Input, Output, html, callback, dcc, ctx
import plotly.graph_objs as go
import plotly.express as px
#import dpd_components as dpd
from django_plotly_dash import DjangoDash
import os
from glob import glob
import demo.dash_chaix_utility as ut
report_path = ut.DAILY_REPORT_DIR
dates = ut.get_avail_dates(report_path)
df = ut.get_df_for_date(dates[0], report_path, perf=True)
def get_barplot(_df, testcase_name, perfvar):
if not testcase_name:
return px.bar(_df, x="perfvar", y="value", color="environment", pattern_shape='system', barmode="group")
dff = _df[_df['testcase'] == testcase_name]
if not perfvar:
perfvar = dff.unit.iloc[0]
dff = dff[dff['perfvar'] == perfvar]
x_val = 'perfvar_x_value' if 'perfvar_x' in dff else 'perfvar'
res = px.bar(dff, x=x_val, y="value", orientation='v', color="environment", pattern_shape='system', barmode="group",
labels={'value': perfvar})
if dff.reference.iloc[0]:
res.add_hline(y=dff.reference.iloc[0],
fillcolor='black',
annotation_text=f'Ref: {dff.reference.iloc[0]}',
annotation_position='bottom right')
return res
def get_perfvars(_df, testcase):
return _df[_df['testcase'] == testcase]['perfvar']
def get_description(_df, testcase):
return _df[_df['testcase'] == testcase].description.iloc[0]
dashboard_name = 'dash_chaix_perf_single'
dash_chaix_perf_single = DjangoDash(name=dashboard_name,
serve_locally=True,
# app_name=app_name
)
dash_chaix_perf_single.layout = html.Div(children=[
html.H1(
children='Performance report',
style={
'textAlign': 'center',
}
),
html.Div([
dcc.Dropdown(
dates,
dates[0],
id='crossfilter-report-date',
),
],
style={'width': '100%', 'display': 'inline-block'}),
html.Div([
dcc.Dropdown(
df['testcase'].unique(),
df.testcase.iloc[0],
id='crossfilter-testcase',
),
],
style={'width': '100%', 'display': 'inline-block'}),
html.Div([
dcc.Dropdown(
options=get_perfvars(df, df.testcase.iloc[0]).unique(),
value=get_perfvars(df, df.testcase.iloc[0]).iloc[0],
id='crossfilter-perfvar',
),
],
style={'width': '100%', 'display': 'inline-block'}),
html.Div(id='crossfilter-description', children=get_description(df, df.testcase.iloc[0]), style={
'textAlign': 'center',
}),
dcc.Graph(
id='example-graph-2',
figure=get_barplot(df, df.testcase.iloc[0], get_perfvars(df, df.testcase.iloc[0]).iloc[0])
)
])
@dash_chaix_perf_single.callback(
Output('example-graph-2', 'figure'),
Output('crossfilter-perfvar', 'options'),
Output('crossfilter-perfvar', 'value'),
Output('crossfilter-description', 'children'),
Output('crossfilter-testcase', 'options'),
Input('crossfilter-testcase', 'value'),
Input('crossfilter-report-date', 'value'),
Input('crossfilter-perfvar', 'value'),
)
def update_graph(testcase_name, date, perfvar):
#new_df = get_df(os.path.join(report_path, date))
new_df = df
perfopts = get_perfvars(new_df, testcase_name)
if not perfvar or perfvar not in perfopts.values:
perfvar = perfopts.iloc[0]
return (get_barplot(new_df, testcase_name=testcase_name, perfvar=perfvar),
perfopts.unique(),
perfvar,
get_description(new_df, testcase_name),
new_df.testcase.unique()
)
import os
from datetime import datetime
from glob import glob
import demo.report_to_csv as r2csv
import dash_ag_grid as dag
import plotly.express as px
REPORTS_BASE_DIR = os.environ.get('CHAIX_REPORT_BASE_DIR', os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data'))
DAILY_REPORT_DIR = os.path.join(REPORTS_BASE_DIR, 'ci_reports/daily')
GRID_CELL_STYLE = {
"styleConditions": [
{
"condition": "params.data.result == 'success'",
"style": {"backgroundColor": "#A3BE8C"},
},
{
"condition": "params.data.result == 'performance'",
"style": {"backgroundColor": "#D08770", "color": "white"},
},
{
"condition": "params.data.result != 'success'",
"style": {"backgroundColor": "#BF616A", "color": "white"},
},
]
}
def get_avail_dates(basepath, start_date=None, end_date=None):
format = '%Y-%m-%d'
date_dirs = sorted([datetime.strptime(os.path.basename(os.path.normpath(dir)), format) for dir in glob(f'{basepath}/*/')], reverse=True)
date_dirs = [d.strftime(format) for d in date_dirs
if (not start_date or d >= datetime.strptime(start_date, format)) and
(not end_date or d <= datetime.strptime(end_date, format))]
return date_dirs
def get_reports_for_period(start_date, end_date, basepath):
res = []
for date in get_avail_dates(basepath, start_date, end_date):
res += glob(f'{basepath}/{date}/**/*.json', recursive=True)
return res
def get_df_for_date(date, path, perf=False):
return r2csv.report_to_df(glob(f'{path}/{date}/**/*.json', recursive=True), perf=perf)
def get_grid(_df, additional_col_confs={}, column_filters=()):
columns = [{'field': i, **additional_col_confs.get(i, {})} for i in _df.columns if i not in column_filters]
return dag.AgGrid(
id="virtualRowData-grid",
columnSize="autoSize",
dashGridOptions={"domLayout": "autoHeight", "tooltipShowDelay": 300, 'alwaysMultiSort': True},
rowData=_df.to_dict('records'),
columnDefs=columns,
defaultColDef={"filter": True, "sortable": True, 'resizable': True, "tooltipComponent": "CustomTooltip"},
# rowModelType="infinite",
)
def get_time_filtered_df(_df, start_date, end_date):
if start_date is None and end_date is None:
return _df
try:
start_date = start_date.strftime('%Y%m%d') if start_date else None
end_date = end_date.strftime('%Y%m%d') if end_date else None
except Exception:
start_date = start_date.replace('-', '') if start_date else None
end_date = end_date.replace('-', '') if end_date else None
before = f'{start_date} <= ' if start_date else ''
after = f' <= {end_date}' if end_date else ''
res = _df.query(f'{before}date{after}')
return res
def get_timeline_plot(_df, testcase_name, perfvar, start_date, end_date):
if not testcase_name:
return None
dff = _df[_df['testcase'] == testcase_name]
if perfvar:
dff = dff[dff['perfvar'] == perfvar]
dff = get_time_filtered_df(dff, start_date, end_date)
if dff.empty:
return None
res = px.scatter(dff, x="date", y="value", color="environment", symbol='system',
labels={'value': dff.unit.iloc[0]})
res.update_xaxes(
dtick="D1",
tickformat="%e %b\n%Y")
if dff.reference.iloc[0]:
res.add_hline(y=dff.reference.iloc[0],
fillcolor='black',
annotation_text=f'Ref: {dff.reference.iloc[0]}',
annotation_position='bottom right')
return res
{
"session_info": {
"cmdline": "/home/ja664344/cluster-testsuite/rocky/reframe/bin/reframe -C config/claix.py -c ./chaix/basics -c chaix/system/slurm/io --performance-report --purge-env --system=claix18:c18m -p [a-zA-Z-]+-d$ -p builtin -T performance -T ^mpi -t daily --report-file=5997dfddddcb8a90f409101967f3ea61bfdd889dfe778327e90ae2472338b4ed.json --report-junit=5997dfddddcb8a90f409101967f3ea61bfdd889dfe778327e90ae2472338b4ed.xml -r -v -R --cpu-only",
"config_files": [
"<builtin>",
"config/claix.py"
],
"data_version": "3.1",
"hostname": "hpc-build1.hpc.itc.rwth-aachen.de",
"log_files": [
"/tmp/rfm-df_r6_yk.log"
],
"prefix_output": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/output",
"prefix_stage": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/stage",
"user": "ja664344",
"version": "4.4.0",
"workdir": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix",
"time_start": "2024-01-01T06:03:46+0100",
"time_end": "2024-01-01T06:04:56+0100",
"time_elapsed": 69.8601450920105,
"num_cases": 5,
"num_failures": 0
},
"runs": [
{
"num_cases": 5,
"num_failures": 0,
"num_aborted": 0,
"num_skipped": 0,
"runid": 0,
"testcases": [
{
"build_stderr": "rfm_build.err",
"build_stdout": "rfm_build.out",
"dependencies_actual": [],
"dependencies_conceptual": [],
"description": "",
"display_name": "omp_affinity",
"environment": "GCC-d",
"fail_phase": null,
"fail_reason": null,
"filename": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/chaix/basics/omp/affinity/omp_affinity.py",
"fixture": false,
"hash": "055e103f",
"jobid": "41838907",
"job_stderr": "rfm_job.err",
"job_stdout": "rfm_job.out",
"maintainers": [],
"name": "omp_affinity",
"nodelist": [],
"outputdir": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/output/claix18/c18m/GCC-d/omp_affinity",
"perfvars": null,
"prefix": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/chaix/basics/omp/affinity",
"result": "success",
"stagedir": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/stage/claix18/c18m/GCC-d/omp_affinity",
"scheduler": "slurm",
"system": "claix18:c18m",
"tags": [
"omp",
"daily",
"maintenance",
"pinning"
],
"time_compile": 3.378491163253784,
"time_performance": 0.0036478042602539062,
"time_run": 25.991153478622437,
"time_sanity": 0.003817319869995117,
"time_setup": 0.03596019744873047,
"time_total": 31.145076274871826,
"unique_name": "omp_affinity",
"check_vars": {
"valid_prog_environs": [
"+ctc"
],
"valid_systems": [
"*"
],
"descr": "",
"sourcepath": "check_thread_affinity.cpp",
"sourcesdir": "src",
"prebuild_cmds": [],
"postbuild_cmds": [],
"executable": "run_omp_binding_tests_slurm_template.sh",
"executable_opts": [],
"prerun_cmds": [],
"postrun_cmds": [
"python3 ./validate_binding.py --source $PWD"
],
"keep_files": [],
"readonly_files": [],
"tags": [
"omp",
"daily",
"maintenance",
"pinning"
],
"maintainers": [],
"strict_check": true,
"num_tasks": 1,
"num_tasks_per_node": null,
"num_gpus_per_node": null,
"num_cpus_per_task": 48,
"num_tasks_per_core": null,
"num_tasks_per_socket": null,
"use_multithreading": null,
"max_pending_time": null,
"exclusive_access": true,
"local": false,
"modules": [
"Python"
],
"env_vars": {
"NUM_CORES": "48",
"_OMP_RUNTIME": "GOMP"
},
"variables": {
"NUM_CORES": "48",
"_OMP_RUNTIME": "GOMP"
},
"time_limit": 600.0,
"build_time_limit": null,
"extra_resources": {},
"build_locally": true
},
"check_params": {}
},
{
"build_stderr": "rfm_build.err",
"build_stdout": "rfm_build.out",
"dependencies_actual": [],
"dependencies_conceptual": [],
"description": "",
"display_name": "omp_affinity",
"environment": "intel-compilers-d",
"fail_phase": null,
"fail_reason": null,
"filename": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/chaix/basics/omp/affinity/omp_affinity.py",
"fixture": false,
"hash": "055e103f",
"jobid": "41838909",
"job_stderr": "rfm_job.err",
"job_stdout": "rfm_job.out",
"maintainers": [],
"name": "omp_affinity",
"nodelist": [],
"outputdir": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/output/claix18/c18m/intel-compilers-d/omp_affinity",
"perfvars": null,
"prefix": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/chaix/basics/omp/affinity",
"result": "success",
"stagedir": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/stage/claix18/c18m/intel-compilers-d/omp_affinity",
"scheduler": "slurm",
"system": "claix18:c18m",
"tags": [
"omp",
"daily",
"maintenance",
"pinning"
],
"time_compile": 2.705561399459839,
"time_performance": 0.003619670867919922,
"time_run": 25.81383490562439,
"time_sanity": 0.003860950469970703,
"time_setup": 0.019611835479736328,
"time_total": 31.694396495819092,
"unique_name": "omp_affinity",
"check_vars": {
"valid_prog_environs": [
"+ctc"
],
"valid_systems": [
"*"
],
"descr": "",
"sourcepath": "check_thread_affinity.cpp",
"sourcesdir": "src",
"prebuild_cmds": [],
"postbuild_cmds": [],
"executable": "run_omp_binding_tests_slurm_template.sh",
"executable_opts": [],
"prerun_cmds": [],
"postrun_cmds": [
"python3 ./validate_binding.py --source $PWD"
],
"keep_files": [],
"readonly_files": [],
"tags": [
"omp",
"daily",
"maintenance",
"pinning"
],
"maintainers": [],
"strict_check": true,
"num_tasks": 1,
"num_tasks_per_node": null,
"num_gpus_per_node": null,
"num_cpus_per_task": 48,
"num_tasks_per_core": null,
"num_tasks_per_socket": null,
"use_multithreading": null,
"max_pending_time": null,
"exclusive_access": true,
"local": false,
"modules": [
"Python"
],
"env_vars": {
"NUM_CORES": "48",
"_OMP_RUNTIME": "IOMP"
},
"variables": {
"NUM_CORES": "48",
"_OMP_RUNTIME": "IOMP"
},
"time_limit": 600.0,
"build_time_limit": null,
"extra_resources": {},
"build_locally": true
},
"check_params": {}
},
{
"build_stderr": "rfm_build.err",
"build_stdout": "rfm_build.out",
"dependencies_actual": [],
"dependencies_conceptual": [],
"description": "",
"display_name": "omp_affinity",
"environment": "intel-compilers-x-d",
"fail_phase": null,
"fail_reason": null,
"filename": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/chaix/basics/omp/affinity/omp_affinity.py",
"fixture": false,
"hash": "055e103f",
"jobid": "41838911",
"job_stderr": "rfm_job.err",
"job_stdout": "rfm_job.out",
"maintainers": [],
"name": "omp_affinity",
"nodelist": [],
"outputdir": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/output/claix18/c18m/intel-compilers-x-d/omp_affinity",
"perfvars": null,
"prefix": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/chaix/basics/omp/affinity",
"result": "success",
"stagedir": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/stage/claix18/c18m/intel-compilers-x-d/omp_affinity",
"scheduler": "slurm",
"system": "claix18:c18m",
"tags": [
"omp",
"daily",
"maintenance",
"pinning"
],
"time_compile": 1.999302864074707,
"time_performance": 0.003176450729370117,
"time_run": 25.157328367233276,
"time_sanity": 0.0033750534057617188,
"time_setup": 0.018958568572998047,
"time_total": 32.02913308143616,
"unique_name": "omp_affinity",
"check_vars": {
"valid_prog_environs": [
"+ctc"
],
"valid_systems": [
"*"
],
"descr": "",
"sourcepath": "check_thread_affinity.cpp",
"sourcesdir": "src",
"prebuild_cmds": [],
"postbuild_cmds": [],
"executable": "run_omp_binding_tests_slurm_template.sh",
"executable_opts": [],
"prerun_cmds": [],
"postrun_cmds": [
"python3 ./validate_binding.py --source $PWD"
],
"keep_files": [],
"readonly_files": [],
"tags": [
"omp",
"daily",
"maintenance",
"pinning"
],
"maintainers": [],
"strict_check": true,
"num_tasks": 1,
"num_tasks_per_node": null,
"num_gpus_per_node": null,
"num_cpus_per_task": 48,
"num_tasks_per_core": null,
"num_tasks_per_socket": null,
"use_multithreading": null,
"max_pending_time": null,
"exclusive_access": true,
"local": false,
"modules": [
"Python"
],
"env_vars": {
"NUM_CORES": "48",
"_OMP_RUNTIME": "IOMP"
},
"variables": {
"NUM_CORES": "48",
"_OMP_RUNTIME": "IOMP"
},
"time_limit": 600.0,
"build_time_limit": null,
"extra_resources": {},
"build_locally": true
},
"check_params": {}
},
{
"build_stderr": "rfm_build.err",
"build_stdout": "rfm_build.out",
"dependencies_actual": [],
"dependencies_conceptual": [],
"description": "",
"display_name": "omp_affinity",
"environment": "Clang-d",
"fail_phase": null,
"fail_reason": null,
"filename": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/chaix/basics/omp/affinity/omp_affinity.py",
"fixture": false,
"hash": "055e103f",
"jobid": "41838912",
"job_stderr": "rfm_job.err",
"job_stdout": "rfm_job.out",
"maintainers": [],
"name": "omp_affinity",
"nodelist": [],
"outputdir": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/output/claix18/c18m/Clang-d/omp_affinity",
"perfvars": null,
"prefix": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/chaix/basics/omp/affinity",
"result": "success",
"stagedir": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/stage/claix18/c18m/Clang-d/omp_affinity",
"scheduler": "slurm",
"system": "claix18:c18m",
"tags": [
"omp",
"daily",
"maintenance",
"pinning"
],
"time_compile": 5.449052333831787,
"time_performance": 0.003370523452758789,
"time_run": 24.90464758872986,
"time_sanity": 0.003542661666870117,
"time_setup": 0.01900482177734375,
"time_total": 32.77658009529114,
"unique_name": "omp_affinity",
"check_vars": {
"valid_prog_environs": [
"+ctc"
],
"valid_systems": [
"*"
],
"descr": "",
"sourcepath": "check_thread_affinity.cpp",
"sourcesdir": "src",
"prebuild_cmds": [],
"postbuild_cmds": [],
"executable": "run_omp_binding_tests_slurm_template.sh",
"executable_opts": [],
"prerun_cmds": [],
"postrun_cmds": [
"python3 ./validate_binding.py --source $PWD"
],
"keep_files": [],
"readonly_files": [],
"tags": [
"omp",
"daily",
"maintenance",
"pinning"
],
"maintainers": [],
"strict_check": true,
"num_tasks": 1,
"num_tasks_per_node": null,
"num_gpus_per_node": null,
"num_cpus_per_task": 48,
"num_tasks_per_core": null,
"num_tasks_per_socket": null,
"use_multithreading": null,
"max_pending_time": null,
"exclusive_access": true,
"local": false,
"modules": [
"Python"
],
"env_vars": {
"NUM_CORES": "48",
"_OMP_RUNTIME": "IOMP"
},
"variables": {
"NUM_CORES": "48",
"_OMP_RUNTIME": "IOMP"
},
"time_limit": 600.0,
"build_time_limit": null,
"extra_resources": {},
"build_locally": true
},
"check_params": {}
},
{
"build_stderr": null,
"build_stdout": null,
"dependencies_actual": [],
"dependencies_conceptual": [],
"description": "",
"display_name": "HelloBeeond",
"environment": "builtin",
"fail_phase": null,
"fail_reason": null,
"filename": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/chaix/system/slurm/io/beeond.py",
"fixture": false,
"hash": "70c431ae",
"jobid": "41838905",
"job_stderr": "rfm_job.err",
"job_stdout": "rfm_job.out",
"maintainers": [],
"name": "HelloBeeond",
"nodelist": [],
"outputdir": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/output/claix18/c18m/builtin/HelloBeeond",
"perfvars": null,
"prefix": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/chaix/system/slurm/io",
"result": "success",
"stagedir": "/rwthfs/rz/cluster/work/ja664344/dev_runner/rt_user/cache/0/cluster-testsuite-chaix/25tyQaNx/2/cluster-testsuite/chaix/stage/claix18/c18m/builtin/HelloBeeond",
"scheduler": "slurm",
"system": "claix18:c18m",
"tags": [
"beeond",
"daily",
"maintenance",
"io"
],
"time_compile": 0.013689994812011719,
"time_performance": 0.002967357635498047,
"time_run": 66.30336546897888,
"time_sanity": 0.011704206466674805,
"time_setup": 0.016829490661621094,
"time_total": 69.74765801429749,
"unique_name": "HelloBeeond",
"check_vars": {
"valid_prog_environs": [
"+os"
],
"valid_systems": [
"+backend"
],
"descr": "",
"sourcepath": "",
"sourcesdir": null,
"prebuild_cmds": [],
"postbuild_cmds": [],
"executable": "file",
"executable_opts": [
"$BEEOND"
],
"prerun_cmds": [],
"postrun_cmds": [],
"keep_files": [],
"readonly_files": [],
"tags": [
"beeond",
"daily",
"maintenance",
"io"
],
"maintainers": [],
"strict_check": true,
"num_tasks": 1,
"num_tasks_per_node": null,
"num_gpus_per_node": null,
"num_cpus_per_task": null,
"num_tasks_per_core": null,
"num_tasks_per_socket": null,
"use_multithreading": null,
"max_pending_time": null,
"exclusive_access": false,
"local": false,
"modules": [],
"env_vars": {},
"variables": {},
"time_limit": 120.0,
"build_time_limit": null,
"extra_resources": {},
"build_locally": true
},
"check_params": {}
}
]
}
],
"restored_cases": []
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment