diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000000000000000000000000000000000000..395e5b7faa9f4493d025170e808a438fb6eb22cf
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,10 @@
+builds
+logs
+gencodes
+*.log
+*.html
+*.tex
+*.csv
+*~
+.git
+.vscode
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..33b1c317dd563a4e8a44e898d99f6f130120e4b2
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,16 @@
+*~
+gencodes/
+builds/
+logs
+*.tex
+*.log
+*.html
+__pycache__/
+*.pyc
+scripts/generators/__pycache__/
+scripts/tools/__pycache__/
+scripts/__pycache__/
+
+# OS generated files #
+######################
+*.DS_Store
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
new file mode 100755
index 0000000000000000000000000000000000000000..c967c5c54a7c0a137663d05f44b91ab98a714e78
--- /dev/null
+++ b/.gitlab-ci.yml
@@ -0,0 +1,552 @@
+# Use our own docker image, that was generated and pushed with the following commands:
+#   docker login registry.gitlab.inria.fr
+#   docker build -t registry.gitlab.inria.fr/quinson/mbi2 .
+#   docker push registry.gitlab.inria.fr/quinson/mbi2
+# image: registry.gitlab.com/mwapl/benchmarking-mpi:latest
+# image: registry.gitlab.inria.fr/quinson/mbi2:latest
+image: registry.hub.docker.com/mquinson/mbi
+
+variables:
+    GIT_SUBMODULE_STRATEGY: none
+
+stages:
+    - build
+    - test
+    - gather # For the tests that are split in several jobs (ISP, MUST, CIVL), rerun them all in one
+    - deploy
+
+build-must:
+    stage: build
+    needs: []
+    script:
+        - scripts/ensure_python3 ./MBI.py -x must -c build
+    artifacts:
+        untracked: false
+        paths:
+            - builds/MUST
+
+test-must-1:
+    stage: test
+    needs:
+        - job: build-must
+          artifacts: true
+    before_script:
+        - apt-get install -y psmisc
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x must -c run -b 1/10
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+test-must-2:
+    stage: test
+    needs:
+        - job: build-must
+          artifacts: true
+    before_script:
+        - apt-get install -y psmisc
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x must -c run -b 2/10
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+test-must-3:
+    stage: test
+    needs:
+        - job: build-must
+          artifacts: true
+    before_script:
+        - apt-get install -y psmisc
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x must -c run -b 3/10
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+test-must-4:
+    stage: test
+    needs:
+        - job: build-must
+          artifacts: true
+    before_script:
+        - apt-get install -y psmisc
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x must -c run -b 4/10
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+test-must-5:
+    stage: test
+    needs:
+        - job: build-must
+          artifacts: true
+    before_script:
+        - apt-get install -y psmisc
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x must -c run -b 5/10
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+test-must-6:
+    stage: test
+    needs:
+        - job: build-must
+          artifacts: true
+    before_script:
+        - apt-get install -y psmisc
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x must -c run -b 6/10
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+test-must-7:
+    stage: test
+    needs:
+        - job: build-must
+          artifacts: true
+    before_script:
+        - apt-get install -y psmisc
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x must -c run -b 7/10
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+test-must-8:
+    stage: test
+    needs:
+        - job: build-must
+          artifacts: true
+    before_script:
+        - apt-get install -y psmisc
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x must -c run -b 8/10
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+test-must-9:
+    stage: test
+    needs:
+        - job: build-must
+          artifacts: true
+    before_script:
+        - apt-get install -y psmisc
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x must -c run -b 9/10
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+test-must-10:
+    stage: test
+    needs:
+        - job: build-must
+          artifacts: true
+    before_script:
+        - apt-get install -y psmisc
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x must -c run -b 10/10
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+test-must-all:
+    stage: gather
+    needs:
+        - job: build-must
+          artifacts: true
+        - job: test-must-1
+          artifacts: true
+        - job: test-must-2
+          artifacts: true
+        - job: test-must-3
+          artifacts: true
+        - job: test-must-4
+          artifacts: true
+        - job: test-must-5
+          artifacts: true
+        - job: test-must-6
+          artifacts: true
+        - job: test-must-7
+          artifacts: true
+        - job: test-must-8
+          artifacts: true
+        - job: test-must-9
+          artifacts: true
+        - job: test-must-10
+          artifacts: true
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x must
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs/*
+
+build-aislinn:
+    stage: build
+    needs: []
+    image: ubuntu:18.04
+    script:
+        - scripts/ensure_python3 ./MBI.py -x aislinn -c build
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - tools/aislinn-git/*
+
+test-aislinn:
+    stage: test
+    image: ubuntu:18.04
+    needs:
+        - job: build-aislinn
+          artifacts: true
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x aislinn
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+# MPI-SV refuses to build on nested virtualization as we have when using docker on gitlab-CI
+#test-mpisv:
+#    stage: test
+#    image: mpisv/mpi-sv
+#    needs: []
+#    tags:
+#        - baremetal
+#    image: registry.hub.docker.com/mquinson/mbi
+#    script:
+#        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+#        - scripts/ensure_python3 ./MBI.py -x mpisv
+#    artifacts:
+#        untracked: false
+#        when: always
+#        paths:
+#            - logs
+
+test-civl-1:
+    stage: test
+    needs: []
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x civl -c run -b 1/4
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+test-civl-2:
+    stage: test
+    needs: []
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x civl -c run -b 2/4
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+test-civl-3:
+    stage: test
+    needs: []
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x civl -c run -b 3/4
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+test-civl-4:
+    stage: test
+    needs: []
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x civl -c run -b 4/4
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+test-civl-all:
+    stage: gather
+    needs:
+        - job: test-civl-1
+          artifacts: true
+        - job: test-civl-2
+          artifacts: true
+        - job: test-civl-3
+          artifacts: true
+        - job: test-civl-4
+          artifacts: true
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x civl
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs/*
+
+build-parcoach:
+    stage: build
+    needs: []
+    script:
+        - scripts/ensure_python3 ./MBI.py -x parcoach -c build
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - builds/parcoach/*
+
+test-parcoach:
+    stage: test
+    needs:
+        - job: build-parcoach
+          artifacts: true
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x parcoach
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+build-simgrid:
+    stage: build
+    needs: []
+    script:
+        - scripts/ensure_python3 ./MBI.py -x simgrid -c build
+    cache:
+        paths:
+            - SimGrid/*
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - builds/SimGrid
+
+test-simgrid:
+    stage: test
+    needs:
+        - job: build-simgrid
+          artifacts: true
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x simgrid
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+test-smpi:
+    stage: test
+    needs:
+        - job: build-simgrid
+          artifacts: true
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x smpi
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+test-smpivg-1:
+    stage: test
+    needs:
+        - job: build-simgrid
+          artifacts: true
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x smpivg -b 1/3
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+test-smpivg-2:
+    stage: test
+    needs:
+        - job: build-simgrid
+          artifacts: true
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x smpivg -b 2/3
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+test-smpivg-3:
+    stage: test
+    needs:
+        - job: build-simgrid
+          artifacts: true
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x smpivg -b 3/3
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+test-smpivg-all:
+    stage: gather
+    needs:
+        - job: test-smpivg-1
+          artifacts: true
+        - job: test-smpivg-2
+          artifacts: true
+        - job: test-smpivg-3
+          artifacts: true
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x smpivg
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+#test-itac:
+#    stage: test
+#    needs: []
+#    tags:
+#        - baremetal
+#    image: registry.hub.docker.com/mquinson/mbi
+#    script:
+#        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+#        - scripts/ensure_python3 ./MBI.py -x itac -c run
+#    artifacts:
+#        untracked: false
+#        when: always
+#        paths:
+#            - logs
+
+build-isp:
+    stage: build
+    needs: []
+    script:
+        - scripts/ensure_python3 ./MBI.py -x isp -c build
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - ./builds/ISP
+
+test-isp-1:
+    stage: test
+    needs:
+        - job: build-isp
+          artifacts: true
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x isp -c run -b 1/2
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+test-isp-2:
+    stage: test
+    needs:
+        - job: build-isp
+          artifacts: true
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x isp -c run -b 2/2
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs
+
+test-isp-all:
+    stage: gather
+    needs:
+        - job: test-isp-1
+          artifacts: true
+        - job: test-isp-2
+          artifacts: true
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -x isp
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - logs/*
+
+
+latex:
+    stage: deploy
+    when: always
+    script:
+        - apt update ; apt -y install texlive-latex-base texlive-latex-extra texlive-pictures
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -c latex
+        - cd script ; ./tools/gen_latex.py ; cd ..
+        - cd latex ; pdflatex -batch dashboard.tex ; cd ..
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - latex
+
+pages:
+    stage: deploy
+    when: always
+    script:
+        - rm -rf /MBI/*; scripts/ensure_python3 ./MBI.py -c generate
+        - scripts/ensure_python3 ./MBI.py -c html
+        - pwd ; ls
+        - mkdir public
+        - cp -r *.html gencodes img logs public/
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - public
diff --git a/Data/CUN.svg b/Data/CUN.svg
new file mode 100755
index 0000000000000000000000000000000000000000..f0f0c384a600622e1cf621f3159ecbd116e4283b
--- /dev/null
+++ b/Data/CUN.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 40 45">
+
+    <text x="20" y="40" style="fill: orange; stroke: #000000;  font-size: 48px; text-anchor: middle">C</text>
+</svg>
diff --git a/Data/RSF.svg b/Data/RSF.svg
new file mode 100755
index 0000000000000000000000000000000000000000..20a1fa3f6dbeba05cbf7638befa4ff7cd5a28cf3
--- /dev/null
+++ b/Data/RSF.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 40 45">
+
+    <text x="20" y="40" style="fill: yellow; stroke: #000000;  font-size: 48px; text-anchor: middle">R</text>
+</svg>
diff --git a/Data/TO.svg b/Data/TO.svg
new file mode 100755
index 0000000000000000000000000000000000000000..41675d2339a62bff56ea30afffcd9e480f636f0c
--- /dev/null
+++ b/Data/TO.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 40 45">
+
+    <text x="20" y="40" style="fill: blue; stroke: #000000;  font-size: 48px; text-anchor: middle">T</text>
+</svg>
diff --git a/Data/caption.svg b/Data/caption.svg
new file mode 100755
index 0000000000000000000000000000000000000000..f01a218d4da81669ddd7956a2166369cac10c53f
--- /dev/null
+++ b/Data/caption.svg
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"
+     width="526.3157894736842" height="126.05042016806722" viewBox="0 -126.05042016806722 526.3157894736842 126.05042016806722">
+<defs>
+</defs>
+<image x="10" y="-118.65546218487394" width="35.08771929824561" height="12.605042016806722" xlink:href="data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iaXNvLTg4NTktMSI/Pg0KPCEtLSBHZW5lcmF0b3I6IEFkb2JlIElsbHVzdHJhdG9yIDIxLjEuMCwgU1ZHIEV4cG9ydCBQbHVnLUluIC4gU1ZHIFZlcnNpb246IDYuMDAgQnVpbGQgMCkgIC0tPg0KPHN2ZyB2ZXJzaW9uPSIxLjEiIGlkPSJMYXllcl8xIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIiB4PSIwcHgiIHk9IjBweCINCgkgdmlld0JveD0iMCAwIDgwLjU4OCA2MS4xNTgiIHN0eWxlPSJlbmFibGUtYmFja2dyb3VuZDpuZXcgMCAwIDgwLjU4OCA2MS4xNTg7IiB4bWw6c3BhY2U9InByZXNlcnZlIj4NCjxwYXRoIHN0eWxlPSJmaWxsOmdyZWVuOyIgZD0iTTI5LjY1OCw2MS4xNTdjLTEuMjM4LDAtMi40MjctMC40OTEtMy4zMDUtMS4zNjlMMS4zNywzNC44MDhjLTEuODI2LTEuODI1LTEuODI2LTQuNzg1LDAtNi42MTENCgljMS44MjUtMS44MjYsNC43ODYtMS44MjcsNi42MTEsMGwyMS40ODUsMjEuNDgxTDcyLjQyNiwxLjU2MWMxLjcxOS0xLjkyNCw0LjY3NC0yLjA5NCw2LjYwMS0wLjM3NA0KCWMxLjkyNiwxLjcyLDIuMDk0LDQuNjc1LDAuMzc0LDYuNjAxTDMzLjE0NSw1OS41OTVjLTAuODU2LDAuOTU5LTIuMDcsMS41MjMtMy4zNTUsMS41NkMyOS43NDYsNjEuMTU2LDI5LjcwMiw2MS4xNTcsMjkuNjU4LDYxLjE1N3oNCgkiLz4NCjwvc3ZnPg0K" />
+<rect x="10" y="-118.65546218487394" width="35.08771929824561" height="12.605042016806722" fill="none" stroke="black" stroke-width="0.2" />
+<text x="62.63157894736841" y="-106.05042016806722" font-size="15" fill="black" dy="0em">Right</text>
+<image x="10" y="-99.74789915966386" width="35.08771929824561" height="12.605042016806722" xlink:href="data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZyB2ZXJzaW9uPSIxLjEiIGlkPSJjcm9zcy0xNSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB3aWR0aD0iMTVweCIgaGVpZ2h0PSIxNXB4IiB2aWV3Qm94PSIwIDAgMTUgMTUiPgogIDxwYXRoIGQ9Ik0yLjY0LDEuMjdMNy41LDYuMTNsNC44NC00Ljg0QzEyLjUxMTQsMS4xMDc2LDEyLjc0OTcsMS4wMDI5LDEzLDFjMC41NTIzLDAsMSwwLjQ0NzcsMSwxJiN4QTsmI3g5O2MwLjAwNDcsMC4yNDc4LTAuMDkzLDAuNDg2Ni0wLjI3LDAuNjZMOC44NCw3LjVsNC44OSw0Ljg5YzAuMTY0OCwwLjE2MTIsMC4yNjE1LDAuMzc5NiwwLjI3LDAuNjFjMCwwLjU1MjMtMC40NDc3LDEtMSwxJiN4QTsmI3g5O2MtMC4yNTc3LDAuMDEwNy0wLjUwOC0wLjA4NzMtMC42OS0wLjI3TDcuNSw4Ljg3bC00Ljg1LDQuODVDMi40NzkzLDEzLjg5NjMsMi4yNDUzLDEzLjk5NzEsMiwxNGMtMC41NTIzLDAtMS0wLjQ0NzctMS0xJiN4QTsmI3g5O2MtMC4wMDQ3LTAuMjQ3OCwwLjA5My0wLjQ4NjYsMC4yNy0wLjY2TDYuMTYsNy41TDEuMjcsMi42MUMxLjEwNTIsMi40NDg4LDEuMDA4NSwyLjIzMDQsMSwyYzAtMC41NTIzLDAuNDQ3Ny0xLDEtMSYjeEE7JiN4OTtDMi4yNDA0LDEuMDAyOSwyLjQ3MDEsMS4wOTk4LDIuNjQsMS4yN3oiIHN0cm9rZT0icmVkIi8+Cjwvc3ZnPgo=" />
+<rect x="10" y="-99.74789915966386" width="35.08771929824561" height="12.605042016806722" fill="none" stroke="black" stroke-width="0.2" />
+<text x="62.63157894736841" y="-87.14285714285714" font-size="15" fill="black" dy="0em">Wrong</text>
+<image x="10" y="-80.84033613445378" width="35.08771929824561" height="12.605042016806722" xlink:href="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgICAgeG1sbnM6eGxpbms9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkveGxpbmsiCiAgICAgdmlld0JveD0iMCAwIDQwIDQ1Ij4KCiAgICA8dGV4dCB4PSIyMCIgeT0iNDAiIHN0eWxlPSJmaWxsOiBibHVlOyBzdHJva2U6ICMwMDAwMDA7ICBmb250LXNpemU6IDQ4cHg7IHRleHQtYW5jaG9yOiBtaWRkbGUiPlQ8L3RleHQ+Cjwvc3ZnPgo=" />
+<rect x="10" y="-80.84033613445378" width="35.08771929824561" height="12.605042016806722" fill="none" stroke="black" stroke-width="0.2" />
+<text x="62.63157894736841" y="-68.23529411764706" font-size="15" fill="black" dy="0em">Time Out</text>
+<image x="10" y="-61.9327731092437" width="35.08771929824561" height="12.605042016806722" xlink:href="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgICAgeG1sbnM6eGxpbms9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkveGxpbmsiCiAgICAgdmlld0JveD0iMCAwIDQwIDQ1Ij4KCiAgICA8dGV4dCB4PSIyMCIgeT0iNDAiIHN0eWxlPSJmaWxsOiBvcmFuZ2U7IHN0cm9rZTogIzAwMDAwMDsgIGZvbnQtc2l6ZTogNDhweDsgdGV4dC1hbmNob3I6IG1pZGRsZSI+QzwvdGV4dD4KPC9zdmc+Cg==" />
+<rect x="10" y="-61.9327731092437" width="35.08771929824561" height="12.605042016806722" fill="none" stroke="black" stroke-width="0.2" />
+<text x="62.63157894736841" y="-49.32773109243698" font-size="15" fill="black" dy="0em">Unsupported feature</text>
+<image x="10" y="-43.02521008403362" width="35.08771929824561" height="12.605042016806722" xlink:href="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgICAgeG1sbnM6eGxpbms9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkveGxpbmsiCiAgICAgdmlld0JveD0iMCAwIDQwIDQ1Ij4KCiAgICA8dGV4dCB4PSIyMCIgeT0iNDAiIHN0eWxlPSJmaWxsOiB5ZWxsb3c7IHN0cm9rZTogIzAwMDAwMDsgIGZvbnQtc2l6ZTogNDhweDsgdGV4dC1hbmNob3I6IG1pZGRsZSI+UjwvdGV4dD4KPC9zdmc+Cg==" />
+<rect x="10" y="-43.02521008403362" width="35.08771929824561" height="12.605042016806722" fill="none" stroke="black" stroke-width="0.2" />
+<text x="62.63157894736841" y="-30.420168067226893" font-size="15" fill="black" dy="0em">Run time error</text>
+</svg>
\ No newline at end of file
diff --git a/Data/cross.svg b/Data/cross.svg
new file mode 100755
index 0000000000000000000000000000000000000000..4bd87c0403346883e4181b32fd390740f0715c5f
--- /dev/null
+++ b/Data/cross.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<svg version="1.1" id="cross-15" xmlns="http://www.w3.org/2000/svg" width="15px" height="15px" viewBox="0 0 15 15">
+  <path d="M2.64,1.27L7.5,6.13l4.84-4.84C12.5114,1.1076,12.7497,1.0029,13,1c0.5523,0,1,0.4477,1,1&#xA;&#x9;c0.0047,0.2478-0.093,0.4866-0.27,0.66L8.84,7.5l4.89,4.89c0.1648,0.1612,0.2615,0.3796,0.27,0.61c0,0.5523-0.4477,1-1,1&#xA;&#x9;c-0.2577,0.0107-0.508-0.0873-0.69-0.27L7.5,8.87l-4.85,4.85C2.4793,13.8963,2.2453,13.9971,2,14c-0.5523,0-1-0.4477-1-1&#xA;&#x9;c-0.0047-0.2478,0.093-0.4866,0.27-0.66L6.16,7.5L1.27,2.61C1.1052,2.4488,1.0085,2.2304,1,2c0-0.5523,0.4477-1,1-1&#xA;&#x9;C2.2404,1.0029,2.4701,1.0998,2.64,1.27z" stroke="red"/>
+</svg>
diff --git a/Data/featureFigs/COLL.svg b/Data/featureFigs/COLL.svg
new file mode 100644
index 0000000000000000000000000000000000000000..e3b789aa37d5dafbeaa8cbd1db8aca5bba25f601
--- /dev/null
+++ b/Data/featureFigs/COLL.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24" y="28" style="fill: green; stroke: green;  font-size: 19px; text-anchor: middle">COLL</text>
+</svg>
diff --git a/Data/featureFigs/COM.svg b/Data/featureFigs/COM.svg
new file mode 100644
index 0000000000000000000000000000000000000000..dfcb5599d0527429ca65272deb116104cc070314
--- /dev/null
+++ b/Data/featureFigs/COM.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24" y="28" style="fill: pink; stroke: black;  font-size: 21px; text-anchor: middle">COM</text>
+</svg>
diff --git a/Data/featureFigs/DATA.svg b/Data/featureFigs/DATA.svg
new file mode 100644
index 0000000000000000000000000000000000000000..6d07f1d19ee94e7fb709d6fd3077d9cbcdcbcc9c
--- /dev/null
+++ b/Data/featureFigs/DATA.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24" y="28" style="fill: blue; stroke: blue;  font-size: 19px; text-anchor: middle">DATA</text>
+</svg>
diff --git a/Data/featureFigs/GRP.svg b/Data/featureFigs/GRP.svg
new file mode 100644
index 0000000000000000000000000000000000000000..fb8d766974942a70ec1f43468067fe8b25a53130
--- /dev/null
+++ b/Data/featureFigs/GRP.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24" y="28" style="fill: brown; stroke: brown;  font-size: 22px; text-anchor: middle">GRP</text>
+</svg>
diff --git a/Data/featureFigs/HYB.svg b/Data/featureFigs/HYB.svg
new file mode 100644
index 0000000000000000000000000000000000000000..6d0561395c55498ca2b8adeb6ec24bec0c7a0a4c
--- /dev/null
+++ b/Data/featureFigs/HYB.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24" y="28" style="fill: grey; stroke: grey;  font-size: 23px; text-anchor: middle">HYB</text>
+</svg>
diff --git a/Data/featureFigs/IO.svg b/Data/featureFigs/IO.svg
new file mode 100644
index 0000000000000000000000000000000000000000..d62d96825750c27b7bf1ad1fcf8532f950933f9e
--- /dev/null
+++ b/Data/featureFigs/IO.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24.5" y="32" style="fill: orange; stroke: #000000;  font-size: 31px; text-anchor: middle">IO</text>
+</svg>
diff --git a/Data/featureFigs/LOOP.svg b/Data/featureFigs/LOOP.svg
new file mode 100644
index 0000000000000000000000000000000000000000..a427b534c0b18193d0f49e7023f91c7a2fd329b4
--- /dev/null
+++ b/Data/featureFigs/LOOP.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24" y="28" style="fill: black; stroke: #000000;  font-size: 18px; text-anchor: middle">LOOP</text>
+</svg>
diff --git a/Data/featureFigs/OP.svg b/Data/featureFigs/OP.svg
new file mode 100644
index 0000000000000000000000000000000000000000..9e7ae5efba4f45dbbad5b637fcd16730ee84ebda
--- /dev/null
+++ b/Data/featureFigs/OP.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24.5" y="32" style="fill: orange; stroke: #000000;  font-size: 31px; text-anchor: middle">OP</text>
+</svg>
diff --git a/Data/featureFigs/P2P.svg b/Data/featureFigs/P2P.svg
new file mode 100644
index 0000000000000000000000000000000000000000..a1ba64ed81e7f8a6e1ed10e0bade3d7f9d240ea1
--- /dev/null
+++ b/Data/featureFigs/P2P.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24" y="28" style="fill: orange; stroke: orange;  font-size: 25px; text-anchor: middle">P2P</text>
+</svg>
diff --git a/Data/featureFigs/PERS.svg b/Data/featureFigs/PERS.svg
new file mode 100644
index 0000000000000000000000000000000000000000..3cc64a73b130190e2298be2835d40e85a6d7c0c9
--- /dev/null
+++ b/Data/featureFigs/PERS.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24" y="28" style="fill: #d4bd2a; stroke: #d4bd2a;  font-size: 18px; text-anchor: middle">PERS</text>
+</svg>
diff --git a/Data/featureFigs/PROB.svg b/Data/featureFigs/PROB.svg
new file mode 100644
index 0000000000000000000000000000000000000000..cf28aa0d5d4e0cf45a4108169ce4c99735fd222b
--- /dev/null
+++ b/Data/featureFigs/PROB.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24" y="28" style="fill: purple; stroke: purple;  font-size: 17px; text-anchor: middle">PROB</text>
+</svg>
diff --git a/Data/featureFigs/RMA.svg b/Data/featureFigs/RMA.svg
new file mode 100644
index 0000000000000000000000000000000000000000..d31f04a77da87cef63cb8ee87a88faa67cd4ecf2
--- /dev/null
+++ b/Data/featureFigs/RMA.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24" y="28" style="fill: red; stroke: red;  font-size: 22px; text-anchor: middle">RMA</text>
+</svg>
diff --git a/Data/featureFigs/SP.svg b/Data/featureFigs/SP.svg
new file mode 100644
index 0000000000000000000000000000000000000000..a20d7ea0e25b267adf59c8fd1fa627af6a5f0556
--- /dev/null
+++ b/Data/featureFigs/SP.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24.5" y="32" style="fill: black; stroke: black;  font-size: 31px; text-anchor: middle">SP</text>
+</svg>
diff --git a/Data/featureFigs/TOPO.svg b/Data/featureFigs/TOPO.svg
new file mode 100644
index 0000000000000000000000000000000000000000..6d2cdc3bf7c446ebe0c1dbb02836db053044133b
--- /dev/null
+++ b/Data/featureFigs/TOPO.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24" y="28" style="fill: orange; stroke: #000000;  font-size: 18px; text-anchor: middle">TOPO</text>
+</svg>
diff --git a/Data/featureFigs/iCOLL.svg b/Data/featureFigs/iCOLL.svg
new file mode 100644
index 0000000000000000000000000000000000000000..de34b6640b1b205b366dff272ca96f1c71f0cc41
--- /dev/null
+++ b/Data/featureFigs/iCOLL.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24.5" y="28" style="fill: green; stroke: green;  font-size: 16.5px; text-anchor: middle">iCOLL</text>
+</svg>
diff --git a/Data/featureFigs/iP2P.svg b/Data/featureFigs/iP2P.svg
new file mode 100644
index 0000000000000000000000000000000000000000..7b616bbfa90a8beb76ceb8a8c3aedec3cf454b9e
--- /dev/null
+++ b/Data/featureFigs/iP2P.svg
@@ -0,0 +1,6 @@
+<svg xmlns="http://www.w3.org/2000/svg"
+     xmlns:xlink="http://www.w3.org/1999/xlink"
+     viewBox="0 0 50 45">
+
+    <text x="24" y="28" style="fill: orange; stroke: orange;  font-size: 22px; text-anchor: middle">iP2P</text>
+</svg>
diff --git a/Data/gen_svg.py b/Data/gen_svg.py
new file mode 100755
index 0000000000000000000000000000000000000000..17478275e83bf7da5210e962df2ecb1af4146157
--- /dev/null
+++ b/Data/gen_svg.py
@@ -0,0 +1,550 @@
+import drawSvg as draw
+import csv
+import argparse
+import ast
+import os
+
+#############################
+## Argument handling
+#############################
+
+parser = argparse.ArgumentParser(description='This script will generate nice dashboard with link to C sources or execution logs, as well as the actual result. It takes a csv as input and generate both a svg and png')
+
+
+parser.add_argument('-i', metavar='input', default='result.csv', help='the input csv file containing all the data generated from our runner.')
+
+parser.add_argument('-o', metavar='output', default='out', type=str, help='name of the file that will be generated (both output.svg and output.png)')
+
+parser.add_argument('--header', metavar='header', default=True, type=bool, help='wether the input csv contains an header')
+
+args = parser.parse_args()
+    
+#############################
+## Specific SVG class
+#############################
+
+    
+class Hyperlink(draw.DrawingParentElement):
+    TAG_NAME = 'a'
+    def __init__(self, href, target=None, **kwargs):
+        super().__init__(href=href, target=target, **kwargs)
+
+class Image(draw.DrawingBasicElement):
+    TAG_NAME = 'image'
+    def __init__(self, x, y, width, height, href, **kwargs):
+        try:
+            y = -y-height
+        except TypeError:
+            pass
+        super().__init__(x=x, y=y, width=width, height=height, href=href, **kwargs)
+
+class Group(draw.DrawingDef):
+    TAG_NAME = 'g'
+    def __init__(self, ids, **kwargs):
+        super().__init__(id=ids, **kwargs)
+
+        
+#############################
+## GLOBAL CONSTANT
+#############################
+
+WIDTH = 900
+HEIGHT = 1200
+HEADER_SIZE = 15
+
+
+#############################
+## Handling CSV
+#############################
+
+csvread = []
+with open(args.i, newline='') as result_file:
+    csvread = csv.reader(result_file, delimiter=';')
+    isHeader = args.header
+    data = []
+    for row in csvread:
+        if isHeader:          
+            header = row
+        else:
+            data.append(row)
+        isHeader = False
+
+d = draw.Drawing(WIDTH, HEIGHT, origin='center', displayInline=True)
+
+#############################
+## Data dependant constant
+#############################
+
+
+tools = list(set([row[2] for row in data]))
+tools.sort()
+
+width_per_tool = WIDTH / (len(tools))
+
+error_type_not_sorted = list(set([row[6] for row in data]))
+error_type_not_sorted.sort()
+error_type = ['OK'] + [error for error in error_type_not_sorted if error != 'OK'] 
+
+
+height_per_error = HEIGHT / (len(error_type) + 1)
+
+nb_uniq_testcase = len([R for R in data if R[2]==tools[0]])
+
+CASE_HEIGHT = HEIGHT / (((nb_uniq_testcase // 5)*1.1 + len(error_type)*2 + 1))
+CASE_WIDTH = width_per_tool / (5.7)
+
+case_per_error = []
+for error in error_type:
+    case_per_error.append([row for row in data if row[6] == error])
+
+nb_error_type = {}
+
+nb_FP = {}
+nb_TP = {}
+nb_TN = {}
+nb_FN = {}
+nb_error = {}
+
+for t in tools:
+    nb_FP[t] = 0
+    nb_TP[t] = 0
+    nb_TN[t] = 0
+    nb_FN[t] = 0
+    nb_error[t] = 0
+    
+
+#############################
+## Actual printing method
+#############################
+
+def print_result(top_left_x, top_left_y, i, j, row):
+
+
+    name = row[0]
+    id = row[1]
+    tool = row[2]
+    to = row[3]
+    np = row[4]
+    buf = row[5]
+    expected = row[6]
+    result = row[7]
+    elapsed = row[8]
+
+    link = "https://gitlab.com/MpiCorrectnessBenchmark/mpicorrectnessbenchmark/-/tree/master/codes/{}.c".format(name)
+    
+    r = Hyperlink(link)
+    
+    fig = "tick.svg"
+    
+    if result == "timeout":
+        fig = "TO.svg"
+        nb_error[tool] += 1
+    elif result == "UNIMPLEMENTED":
+        fig = "CUN.svg"
+        nb_error[tool] += 1
+    elif result == "failure":
+        fig = "RSF.svg"
+        nb_error[tool] += 1
+
+    elif expected == 'OK':
+        if result == 'OK':
+            fig = "tick.svg"
+            nb_TN[tool] += 1
+        else:
+            fig = "cross.svg"
+            nb_FP[tool] += 1
+    elif expected == 'ERROR':
+        if result == 'OK':
+            fig = "cross.svg"
+            nb_FN[tool] += 1
+        else:
+            fig = "tick.svg"
+            nb_TP[tool] += 1
+
+    r.append(draw.Image(top_left_x + 0.1*CASE_WIDTH + i * (CASE_WIDTH*1.1),
+            top_left_y - 0.1*CASE_HEIGHT - j * (CASE_HEIGHT*1.1),
+            CASE_WIDTH,
+            CASE_HEIGHT,
+            fig,
+            embed=True))
+
+    r.append(draw.Rectangle(top_left_x + 0.1*CASE_WIDTH + i * (CASE_WIDTH*1.1),
+            top_left_y - 0.1*CASE_HEIGHT - j * (CASE_HEIGHT*1.1),
+            CASE_WIDTH,
+            CASE_HEIGHT,
+            fill='none',
+            stroke="black",
+            stroke_width="0.2"
+    ))
+    
+    desc = "TEST : " + tool + " np=" + np + " to=" + to
+    if buf != 'NA':
+        desc += " buf=" + buf
+    desc += " " + name
+
+    desc += "\nEXPECTED : " + expected[0]
+
+    if result == 'CUN': 
+        desc += "\nRETURNED : Compilation Failure"  
+    elif result == 'RSF':
+        desc += "\nRETURNED : Runtime Failure"  
+    else :
+        desc += "\nRETURNED : " + result  
+        
+    r.append(draw.Title(desc))
+
+    if tool == tools[0]:
+        expects = expected[0]
+        if not expects in nb_error_type:
+            nb_error_type[expects] = 1
+        else:
+            nb_error_type[expects] += 1
+    
+    return r
+
+    
+#############################
+## Going throught data
+#############################
+
+
+for i in range(len(tools)):
+    name = ""
+    if tools[i]=="aislinn":
+        name = "Aislinn"
+    if tools[i]=="isp":
+        name = "ISP"
+    if tools[i]=="civl":
+        name = "CIVL"
+    if tools[i]=="parcoach":
+        name = "Parcoach"
+    if tools[i]=="must" or tools[i]=="mustdist":
+        name = "MUST"
+    if tools[i]=="simgrid":
+        name = "McSimGrid"
+        
+    d.append(draw.Text(name, HEADER_SIZE+5, -(WIDTH/2) + (i)*width_per_tool + 5,  (HEIGHT/2) - 20, fill='black'))
+
+
+adjust_height = 70
+    
+for i in range(len(error_type)):
+
+    # Print the error name
+    d.append(draw.Text(error_type[i], HEADER_SIZE-2, -(WIDTH/2) + 5,  (HEIGHT/2) - adjust_height + 25, fill='black'))
+    
+    
+    for j in range(len(tools)):
+        
+        to_print = [cases for cases in case_per_error[i] if cases[2]==tools[j]]
+        to_print.sort()
+        
+        for k in range(len(to_print)):
+            row = to_print[k]
+            d.append(print_result(-(WIDTH/2) + (j)*width_per_tool,
+                                  (HEIGHT/2) - adjust_height,
+                                  k%5,
+                                  k//5,
+                                  row))
+
+    to_add = len(to_print)//5
+    if len(to_print)%5!=0:
+        to_add+=1
+    adjust_height += (to_add)*CASE_HEIGHT*1.1
+    adjust_height += 30
+          
+
+
+d.setPixelScale(2)  # Set number of pixels per geometry unit
+#d.setRenderSize(400,200)  # Alternative to setPixelScale
+
+d.saveSvg(args.o + '.svg')
+d.savePng(args.o + '.png')
+
+
+#############################
+## Generating the caption
+#############################
+
+caption = draw.Drawing(CASE_WIDTH*15, CASE_HEIGHT*10, displayInline=True)
+
+x = 10
+y = CASE_HEIGHT*10 - 20
+
+caption.append(draw.Image(x, y, CASE_WIDTH, CASE_HEIGHT, "tick.svg", embed=True))
+
+caption.append(draw.Rectangle(x, y, CASE_WIDTH, CASE_HEIGHT,
+            fill='none',
+            stroke="black",
+            stroke_width="0.2"
+    ))
+
+caption.append(draw.Text("Right", HEADER_SIZE, x + 1.5*CASE_WIDTH, y, fill='black'))
+
+y -= CASE_HEIGHT*1.5
+
+caption.append(draw.Image(x,
+                    y,
+                    CASE_WIDTH,
+                    CASE_HEIGHT,
+                    "cross.svg",
+                    embed=True))
+
+caption.append(draw.Rectangle(x,
+                              y,
+                              CASE_WIDTH, CASE_HEIGHT,
+                              fill='none',
+                              stroke="black",
+                              stroke_width="0.2"))
+
+caption.append(draw.Text("Wrong", HEADER_SIZE, x + 1.5*CASE_WIDTH, y, fill='black'))
+
+y -= CASE_HEIGHT*1.5
+
+caption.append(draw.Image(x,
+                    y,
+                    CASE_WIDTH,
+                    CASE_HEIGHT,
+                    "TO.svg",
+                    embed=True))
+
+caption.append(draw.Rectangle(x,
+                              y,
+                              CASE_WIDTH, CASE_HEIGHT,
+                              fill='none',
+                              stroke="black",
+                              stroke_width="0.2"))
+
+caption.append(draw.Text("Time Out", HEADER_SIZE, x + 1.5*CASE_WIDTH, y, fill='black'))
+
+y -= CASE_HEIGHT*1.5
+
+caption.append(draw.Image(x,
+                    y,
+                    CASE_WIDTH,
+                    CASE_HEIGHT,
+                    "CUN.svg",
+                    embed=True))
+
+caption.append(draw.Rectangle(x,
+                              y,
+                              CASE_WIDTH, CASE_HEIGHT,
+                              fill='none',
+                              stroke="black",
+                              stroke_width="0.2"))
+
+caption.append(draw.Text("Unsupported feature", HEADER_SIZE, x + 1.5*CASE_WIDTH, y, fill='black'))
+
+y -= CASE_HEIGHT*1.5
+
+caption.append(draw.Image(x,
+                    y,
+                    CASE_WIDTH,
+                    CASE_HEIGHT,
+                    "RSF.svg",
+                    embed=True))
+
+caption.append(draw.Rectangle(x,
+                              y,
+                              CASE_WIDTH, CASE_HEIGHT,
+                              fill='none',
+                              stroke="black",
+                              stroke_width="0.2"))
+
+caption.append(draw.Text("Run time error", HEADER_SIZE, x + 1.5*CASE_WIDTH, y, fill='black'))
+
+caption.saveSvg('caption.svg')
+
+#############################
+## Printing result
+#############################
+
+for t in tools:
+    print("TOOLS : {}\n  TP : {}\n  TN : {}\n  FP : {}\n  FN : {}\n  Error : {}\n".
+          format(t, nb_TP[t], nb_TN[t], nb_FP[t], nb_FN[t], nb_error[t]))
+
+if 'aislinn' in tools and 'isp' in tools and 'civl' in tools and 'must' in tools and 'parcoach' in tools and 'simgrid' in tools:
+    print("Aislinn & {} & {} & {} & {} & {} \\\\\n CIVL & {} & {} & {} & {} & {} \\\\\n ISP & {} & {} & {} & {} & {} \\\\\n Must & {} & {} & {} & {} & {}\\\\\n Parcoach & {} & {} & {} & {} & {}\\\\\n McSimGrid & {} & {} & {} & {} & {}\\\\".format(nb_TP['aislinn'], nb_FN['aislinn'], nb_FP['aislinn'], nb_TN['aislinn'], nb_error['aislinn'],
+    nb_TP['civl'], nb_FN['civl'], nb_FP['civl'], nb_TN['civl'], nb_error['civl'],
+    nb_TP['isp'], nb_FN['isp'], nb_FP['isp'], nb_TN['isp'], nb_error['isp'],
+    nb_TP['must'], nb_FN['must'], nb_FP['must'], nb_TN['must'], nb_error['must'],
+    nb_TP['parcoach'], nb_FN['parcoach'], nb_FP['parcoach'], nb_TN['parcoach'], nb_error['parcoach'],
+    nb_TP['simgrid'], nb_FN['simgrid'], nb_FP['simgrid'], nb_TN['simgrid'], nb_error['simgrid']))
+
+#############################
+## Extracting features
+#############################
+
+feature_data = [["Name", "Origin", "P2P", "iP2P", "PERS", "COLL", "iCOLL", "TOPO", "IO", "RMA", "PROB",
+	 "COM", "GRP", "DATA", "OP", "LOOP", "SP", "deadlock", "numstab", "segfault", "mpierr",
+	 "resleak", "livelock", "datarace"]]
+directory = "../codes/"
+for filename in os.listdir(directory):
+    if filename.endswith(".c"):
+        row = [0]*len(feature_data[0])
+        row[0] = filename
+        f = open(os.path.join(directory, filename), 'r')
+        line = f.readline()
+        while line[0] == "/":
+            line = f.readline()
+            parsed_line = line.split(" ")
+            try:
+                if len(parsed_line) >= 3:
+                    index_data = feature_data[0].index(parsed_line[1][:-1])
+                    if parsed_line[1][:-1] == "Origin":
+                        row[index_data] = parsed_line[2].rstrip('\n')
+                    else:
+                        row[index_data] = parsed_line[2][:1]
+            except ValueError:
+                pass
+        f.close()
+        feature_data.append(row)
+
+feature_per_file = {}
+for row in feature_data:
+    feature_per_file[row[0]] = []
+    if "no-error" in row[0]:
+        for j in range(2,17):
+            if row[j] == "C":
+                feature_per_file[row[0]].append(feature_data[0][j]) 
+    else:
+        for j in range(2,17):
+            if row[j] == "I":
+                feature_per_file[row[0]].append(feature_data[0][j])
+        if len(feature_per_file[row[0]]) == 0:
+            for j in range(2,17):
+                if row[j] == "C":
+                    feature_per_file[row[0]].append(feature_data[0][j])
+        
+                
+most_feature_per_file = 0
+for filename in feature_per_file:
+    feature_per_file[filename].sort()
+    most_feature_per_file = max(most_feature_per_file,
+                                len(feature_per_file[filename]))
+
+
+CASE_WIDTH = WIDTH / 5.7
+width_per_feature = CASE_WIDTH / most_feature_per_file
+CASE_HEIGHT = HEIGHT / ((nb_uniq_testcase // 5)*1.1 + len(error_type)*2 + 1)
+
+
+nb_features = {}
+for feat in ["P2P", "iP2P", "PERS", "COLL", "iCOLL", "TOPO", "IO", "RMA", "PROB", "COM", "GRP", "DATA", "OP", "LOOP", "SP"]:
+    nb_features[feat] = [0,0]
+    
+#############################
+## Feature printing function
+#############################
+
+
+
+def print_feature(top_left_x, top_left_y, i, j, n, feature):
+
+    fig = "./featureFigs/{}.svg".format(feature)
+    
+    r = draw.Image(
+        top_left_x + 0.1*CASE_WIDTH + i * (CASE_WIDTH*1.1) + n * width_per_feature,
+        top_left_y - 0.1*CASE_HEIGHT - j * (CASE_HEIGHT*1.1),
+        width_per_feature,
+        CASE_HEIGHT,
+        fig,
+        embed=True)
+    
+    return r
+
+def print_box(top_left_x, top_left_y, i, j):
+
+    r = (draw.Rectangle(top_left_x + 0.1*CASE_WIDTH + i * (CASE_WIDTH*1.1),
+            top_left_y - 0.1*CASE_HEIGHT - j * (CASE_HEIGHT*1.1),
+            CASE_WIDTH,
+            CASE_HEIGHT,
+            fill='none',
+            stroke="black",
+            stroke_width="0.3"
+    ))
+    
+    return r
+    
+#############################
+## Printing features
+#############################
+
+feature_drawing = draw.Drawing(WIDTH, HEIGHT, origin='center', displayInline=True)
+
+# for i in range(most_feature_per_file):
+#     feature_drawing.append(draw.Text("Feature {}".format(i+1), HEADER_SIZE, -(WIDTH/2) + (i+1)*width_per_tool,  (HEIGHT/2) - 15, fill='black'))
+
+
+adjust_height = 50
+    
+for i in range(len(error_type)):
+
+    # Print the error name
+    feature_drawing.append(draw.Text(error_type[i], HEADER_SIZE, -(WIDTH/2) + 5,  (HEIGHT/2) - adjust_height + 30, fill='black'))
+
+    to_print = [cases for cases in case_per_error[i] if cases[2]==tools[0]]
+    to_print.sort()
+    
+    for k in range(len(to_print)):
+
+        filename = to_print[k][0] + '.c'
+
+        if not filename in feature_per_file:
+            continue
+        
+        list_feature = feature_per_file[filename]
+
+        feature_drawing.append(print_box(-(WIDTH/2),
+                                         (HEIGHT/2) - adjust_height,
+                                         k%5,
+                                         k//5))
+
+        
+        for j in range(len(list_feature)):
+                       
+            if j < len(feature_per_file[filename]):
+
+                #counting
+                if "no-error" in filename:
+                    nb_features[list_feature[j]][0] += 1
+                else:
+                    nb_features[list_feature[j]][1] += 1
+
+                #printing
+                feature_drawing.append(print_feature(-(WIDTH/2),
+                                                     (HEIGHT/2) - adjust_height,
+                                                     k%5,
+                                                     k//5,
+                                                     j,
+                                                     list_feature[j]))
+
+    to_add = len(to_print)//5
+    if len(to_print)%5!=0:
+        to_add+=1
+    adjust_height += (to_add)*CASE_HEIGHT*1.1
+    adjust_height += 30
+
+d.setPixelScale(2)  # Set number of pixels per geometry unit
+#d.setRenderSize(400,200)  # Alternative to setPixelScale
+
+feature_drawing.saveSvg('features.svg')
+feature_drawing.savePng('features.png')
+
+#############################
+## Printing feature count
+#############################
+
+for feat in nb_features:
+    print("FEATURE : {}\n  Correct : {}\n  Incorect : {}\n".
+          format(feat, nb_features[feat][0], nb_features[feat][1]))
+
+
+#############################
+## Printing error count
+#############################
+
+for error in nb_error_type:
+    print("ERROR : {}\n  Number : {}\n".
+          format(error, nb_error_type[error]))
+
diff --git a/Data/tick.svg b/Data/tick.svg
new file mode 100755
index 0000000000000000000000000000000000000000..e54aa497d6f952fe9193a237644b7a0b6e277930
--- /dev/null
+++ b/Data/tick.svg
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="iso-8859-1"?>
+<!-- Generator: Adobe Illustrator 21.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0)  -->
+<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
+	 viewBox="0 0 80.588 61.158" style="enable-background:new 0 0 80.588 61.158;" xml:space="preserve">
+<path style="fill:green;" d="M29.658,61.157c-1.238,0-2.427-0.491-3.305-1.369L1.37,34.808c-1.826-1.825-1.826-4.785,0-6.611
+	c1.825-1.826,4.786-1.827,6.611,0l21.485,21.481L72.426,1.561c1.719-1.924,4.674-2.094,6.601-0.374
+	c1.926,1.72,2.094,4.675,0.374,6.601L33.145,59.595c-0.856,0.959-2.07,1.523-3.355,1.56C29.746,61.156,29.702,61.157,29.658,61.157z
+	"/>
+</svg>
diff --git a/MBI.py b/MBI.py
new file mode 100755
index 0000000000000000000000000000000000000000..bda73669de1dc281903958d607e24c76f7345585
--- /dev/null
+++ b/MBI.py
@@ -0,0 +1,1749 @@
+#! /usr/bin/python3
+
+# autopep8 -i --max-line-length 130 MBI.py
+
+import shutil
+import os
+import signal
+import sys
+import stat
+import re
+import argparse
+import time
+import glob
+import subprocess
+import statistics
+import multiprocessing as mp
+
+# Add our lib directory to the PYTHONPATH, and load our utilitary libraries
+sys.path.append(f'{os.path.dirname(os.path.abspath(__file__))}/scripts')
+
+from MBIutils import *
+from LaTeXutils import *
+
+# Plots need big dependancy like numpy and matplotlib, so just ignore
+# the import if dependencies are not available.
+plots_loaded = False
+try:
+    from tools.gen_plots_radar import *
+    plots_loaded = True
+except ImportError:
+    print("[MBI] Warning: ImportError for the plots module.")
+
+
+import tools.parcoach
+import tools.simgrid
+import tools.smpi # SimGrid without MC
+import tools.smpivg # SimGrid with valgrind instead of MC
+import tools.must
+import tools.mpisv
+import tools.hermes
+import tools.isp
+itac_loaded=False
+try:
+    import tools.itac
+    itac_loaded=True
+except ImportError:
+    print("[MBI] Warning: ITAC module cannot be loaded because of an ImportError (that's OK if you did not plan to use it).")
+import tools.civl
+import tools.aislinn
+import tools.mpi_checker
+
+tools = {'aislinn': tools.aislinn.Tool(), 'civl': tools.civl.Tool(), 'hermes': tools.hermes.Tool(), 'isp': tools.isp.Tool(), 'mpisv': tools.mpisv.Tool(),
+         'itac': tools.itac.Tool() if itac_loaded else None,
+         'must': tools.must.V18(), #'must17': tools.must.V17(), # This one is deprecated, and no RC release right now
+         'simgrid': tools.simgrid.Tool(), 'simgrid-3.27': tools.simgrid.v3_27(), 'simgrid-3.28': tools.simgrid.v3_28(), 'simgrid-3.29': tools.simgrid.v3_29(), 'simgrid-3.30': tools.simgrid.v3_30(),'simgrid-3.31': tools.simgrid.v3_31(),'simgrid-3.32': tools.simgrid.v3_32(),
+         'smpi':tools.smpi.Tool(),'smpivg':tools.smpivg.Tool(), 'parcoach': tools.parcoach.Tool(), 'mpi-checker': tools.mpi_checker.Tool()}
+
+# Some scripts may fail if error messages get translated
+os.environ["LC_ALL"] = "C"
+
+
+# BufferLength/BufferOverlap
+# RMA concurrency errors (local and distributed)
+
+########################
+# Extract the TODOs from the codes
+########################
+todo = []
+
+
+def extract_all_todo(batch):
+    """Extract the TODOs from all existing files, applying the batching request"""
+    if os.path.exists("/MBI/gencodes"):  # Docker run
+        filenames = glob.glob("/MBI/gencodes/*.c")
+    elif os.path.exists("gencodes/"):  # Gitlab-ci run
+        filenames = glob.glob(f"{os.getcwd()}/gencodes/*.c")  # our code expects absolute paths
+    elif os.path.exists("../../gencodes/"):  # Local runs
+        filenames = glob.glob(f"{os.getcwd()}/../../gencodes/*.c")  # our code expects absolute paths
+    else:
+        subprocess.run("ls ../..", shell=True)
+        raise Exception(f"Cannot find the input codes (cwd: {os.getcwd()}). Did you run the generators before running the tests?")
+    # Choose the files that will be used by this runner, depending on the -b argument
+    match = re.match('(\d+)/(\d+)', batch)
+    if not match:
+        print(f"The parameter to batch option ({batch}) is invalid. Must be something like 'N/M', with N and M numbers.")
+    pos = int(match.group(1))
+    runner_count = int(match.group(2))
+    assert pos > 0
+    assert pos <= runner_count
+    batch = int(len(filenames) / runner_count)+1
+    min_rank = batch*(pos-1)
+    max_rank = (batch*pos)-1
+    print(f'Handling files from #{min_rank} to #{max_rank}, out of {len(filenames)} in {os.getcwd()}')
+
+    global todo
+    filename = sorted(filenames)
+    for filename in filenames[min_rank:max_rank]:
+        todo = todo + parse_one_code(filename)
+    if pos == runner_count and pos != 1: # The last runner starts from the end of the array to ease dynamically splitting
+        todo = list(reversed(todo))
+
+########################
+# cmd_gencodes(): what to do when '-c generate' is used (Generating the codes)
+########################
+
+
+def cmd_gencodes():
+    if os.path.exists("/MBI/scripts/generators/CollArgGenerator.py"):  # Docker run
+        print("Docker run")
+        generators = glob.glob("/MBI/scripts/generators/*Generator.py")
+        dir = "/MBI/gencodes"
+    elif os.path.exists("../../scripts/generators/CollArgGenerator.py"):  # Local run, from logs dir
+        print("Local run, from tools' logs dir")
+        generators = glob.glob(f"{os.getcwd()}/../../scripts/generators/*Generator.py")
+        dir = "../../gencodes/"
+    elif os.path.exists("scripts/generators/CollArgGenerator.py"):  # Local run, from main dir
+        print("Local run, from MBI main dir")
+        generators = glob.glob(f"{os.getcwd()}/scripts/generators/*Generator.py")
+        dir = "gencodes/"
+    else:
+        raise Exception("Cannot find the codes' generators. Please report that bug.")
+    subprocess.run(f"rm -rf {dir} ; mkdir {dir}", shell=True, check=True)
+    here = os.getcwd()
+    os.chdir(dir)
+    print(f"Generate the codes (in {os.getcwd()}): ", end='')
+    for generator in generators:
+        m = re.match("^.*?/([^/]*)Generator.py$", generator)
+        if m:
+            print(m.group(1), end=", ")
+        else:
+            print(generator, end=", ")
+        subprocess.run(f'../scripts/ensure_python3 {generator}', shell=True, check=True)
+    print("\nTest count: ", end='')
+    sys.stdout.flush()
+    subprocess.run("ls *.c|wc -l", shell=True, check=True)
+    subprocess.run("for n in *.c ; do cat -n $n > $n.txt ; done", shell=True, check=True)
+    os.chdir(here)
+
+
+########################
+# cmd_build(): what to do when '-c build' is used (building the tool, discarding the cache)
+########################
+def cmd_build(rootdir, toolname):
+    # Basic verification
+    tools[toolname].ensure_image()
+
+    # Build the tool on need
+    tools[toolname].build(rootdir=rootdir, cached=False)
+
+########################
+# cmd_run(): what to do when '-c run' is used (running the tests)
+########################
+def cmd_run(rootdir, toolname, batchinfo):
+    # Go to the tools' logs directory on need
+    rootdir = os.path.dirname(os.path.abspath(__file__))
+    os.makedirs(f'{rootdir}/logs/{toolname}', exist_ok=True)
+    os.chdir(f'{rootdir}/logs/{toolname}')
+    print(f"Run tool {toolname} from {os.getcwd()} (batch {batchinfo}).")
+
+    tools[toolname].set_rootdir(rootdir)
+
+    # Basic verification
+    tools[toolname].ensure_image()
+
+    # Build the tool on need
+    tools[toolname].build(rootdir=rootdir)
+
+    count = 1
+    for test in todo:
+        binary = re.sub('\.c', '', os.path.basename(test['filename']))
+
+        print(f"\nTest #{count} out of {len(todo)}: '{binary}_{test['id']} '", end="... ")
+        count += 1
+        sys.stdout.flush()
+
+        p = mp.Process(target=tools[toolname].run, args=(test['cmd'], test['filename'], binary, test['id'], args.timeout, batchinfo))
+        p.start()
+        sys.stdout.flush()
+        p.join(args.timeout+60)
+        if p.is_alive():
+            print("HARD TIMEOUT! The child process failed to timeout by itself. Sorry for the output.")
+            p.terminate()
+
+    tools[toolname].teardown()
+
+########################
+# cmd_html(): what to do when '-c html' is used (extract the statistics of this tool)
+########################
+def percent(num, den, compl=False, one=False):
+    """Returns the ratio of num/den as a percentage, rounded to 2 digits only. If one=True, then return a ratio of 1 with 4 digits"""
+    if den == 0:
+        return "(error)"
+    elif compl: # Complementary
+        res = round (100 - num/den*100, 2)
+    else:
+        res = round (num/den*100, 2)
+    if int(res) == 100:
+        return "1" if one else "100"
+    return round(res/100, 4) if one else res
+
+def bold_if(val, target):
+    """Returns the value as a bold LaTeX string if it equals the target, or unchanged otherwise."""
+    if str(val) == str(target):
+        return f'{{\\bf {val}}}'
+    return str(val)
+
+def seconds2human(secs):
+    """Returns the amount of seconds in human-friendly way"""
+    days = int(secs//86400)
+    hours = int((secs - days*86400)//3600)
+    minutes = int((secs - days*86400 - hours*3600)//60)
+    seconds = secs - days*86400 - hours*3600 - minutes*60
+    return (f"{days} days, " if days else "") + (f"{hours} hours, " if hours else "") + (f"{minutes} minutes, " if minutes else "") + (f"{int(seconds*100)/100} seconds" if seconds else "")
+
+def cmd_html(rootdir, toolnames=[]):
+    here = os.getcwd()
+    os.chdir(rootdir)
+    results = {}
+    total_elapsed = {}
+    used_toolnames = []
+    for toolname in toolnames:
+        if not toolname in tools:
+            raise Exception(f"Tool {toolname} does not seem to be a valid name.")
+
+        if os.path.exists(f'logs/{toolname}'):
+            used_toolnames.append(toolname)
+            # To compute statistics on the performance of this tool
+            results[toolname]= {'failure':[], 'timeout':[], 'unimplemented':[], 'other':[], 'TRUE_NEG':[], 'TRUE_POS':[], 'FALSE_NEG':[], 'FALSE_POS':[]}
+
+            # To compute timing statistics
+            total_elapsed[toolname] = 0
+
+    ########################
+    # Analyse each test, grouped by expectation, and all tools for a given test
+    ########################
+    with open(f"{rootdir}/index.html", "w") as outHTML:
+      outHTML.write("""
+<html><head><title>MBI results</title></head>
+<script>
+iframe {
+  resize: both;
+  overflow: auto;
+}
+</script>
+<body>
+<iframe width="100%" height="45%" src="summary.html"></iframe>
+<iframe width="100%" height="55%" name="MBI_details"></iframe>
+</body></html>
+""")
+
+    with open(f"{rootdir}/summary.html", "w") as outHTML:
+      outHTML.write(f"<html><head><title>MBI outcomes for all tests</title></head>\n")
+      outHTML.write("""
+<style>
+.tooltip {
+  position: relative;
+  display: inline-block;
+  border-bottom: 1px dotted black; /* If you want dots under the hoverable text */
+}
+
+.tooltip .tooltiptext {
+  visibility: hidden;
+  width: 120px;
+  background-color: #555;
+  color: #fff;
+  text-align: center;
+  border-radius: 6px;
+  padding: 5px 0;
+  position: absolute;
+  z-index: 1;
+  bottom: 125%;
+  left: 50%;
+  margin-left: -60px;
+  opacity: 0;
+  transition: opacity 0.3s;
+}
+
+.tooltip .tooltiptext::after {
+  content: "";
+  position: absolute;
+  top: 100%;
+  left: 50%;
+  margin-left: -5px;
+  border-width: 5px;
+  border-style: solid;
+  border-color: #555 transparent transparent transparent;
+}
+
+.tooltip:hover .tooltiptext {
+  visibility: visible;
+  opacity: 1;
+}
+</style>
+<body>
+""")
+
+      # Generate the table of contents
+      previous_detail ='' # To open a new section for each possible detailed outcome
+      outHTML.write("<h2>Table of contents</h2>\n<ul>\n")
+      for test in sorted(todo, key=lambda t: f"{possible_details[t['detail']]}|{t['detail']}|{t['filename']}|{t['id']}"):
+        if previous_detail != possible_details[test['detail']]:
+            if previous_detail != '': # Close the previous item, if we are not generating the first one
+                outHTML.write(f" </li>\n")
+            previous_detail = possible_details[test['detail']]
+            if test['detail'] != 'OK':
+                outHTML.write(f" <li><a href='#{possible_details[test['detail']]}'>{displayed_name[ possible_details[test['detail']]]}</a> (scope: {error_scope[possible_details[test['detail']]]})\n")
+            else:
+                outHTML.write(f" <li><a href='#OK'>{displayed_name[ possible_details[test['detail']]]}</a>\n")
+
+      outHTML.write("  </ul>\n <li><a href='#metrics'>Summary metrics</a></li></ul>\n")
+
+      # Generate the actual content
+      previous_detail=''  # To open a new section for each possible detailed outcome
+      testcount=0 # To repeat the table header every 25 lines
+      for test in sorted(todo, key=lambda t: f"{possible_details[t['detail']]}|{t['detail']}|{t['filename']}|{t['id']}"):
+        testcount += 1
+        if previous_detail != possible_details[test['detail']] or testcount == 25:
+            if testcount != 25: # Write the expected outcome only once, not every 25 tests
+                if previous_detail != '': # Close the previous table, if we are not generating the first one
+                    outHTML.write(f"</table>\n")
+                previous_detail = possible_details[test['detail']]
+                if test['detail'] != 'OK':
+                    outHTML.write(f"  <a name='{possible_details[test['detail']]}'/><h3>{displayed_name[possible_details[test['detail']]]} errors (scope: {error_scope[possible_details[test['detail']]]})</h3>\n")
+                else:
+                    outHTML.write(f"  <a name='OK'/><h3>Correct codes</h3>\n")
+
+                outHTML.write( '  <table border=1>\n')
+            testcount=0
+            outHTML.write("   <tr><td>Test</td>")
+            for toolname in used_toolnames:
+                outHTML.write(f"<td>&nbsp;{displayed_name[toolname]}&nbsp;</td>")
+            outHTML.write(f"</tr>\n")
+        outHTML.write(f"     <tr>")
+
+        binary=re.sub('\.c', '', os.path.basename(test['filename']))
+        ID=test['id']
+        test_id = f"{binary}_{ID}"
+        expected=test['expect']
+
+        outHTML.write(f"<td><a href='gencodes/{binary}.c.txt' target='MBI_details'>{binary}</a>&nbsp;<a href='gencodes/{binary}.c'><img title='Download source' src='img/html.svg' height='24' /></a>")
+        if ID != 0:
+            outHTML.write(f' (test {ID+1}) ')
+        outHTML.write("</td>")
+
+        for toolname in used_toolnames:
+            (res_category, elapsed, diagnostic, outcome) = categorize(tool=tools[toolname], toolname=toolname, test_id=test_id, expected=expected, autoclean=True)
+
+            results[toolname][res_category].append(f"{test_id} expected {test['detail']}, outcome: {diagnostic}")
+            outHTML.write(f"<td align='center'><a href='logs/{toolname}/{test_id}.txt' target='MBI_details'><img title='{displayed_name[toolname]} {diagnostic} (returned {outcome})' src='img/{res_category}.svg' width='24' /></a> ({outcome})")
+            extra=None
+
+            report = []
+            for root, dirs, files in os.walk(f"logs/{toolname}/{test_id}"):
+                if "index.html" in files:
+                    report.append(os.path.join(root, "index.html"))
+
+            if len(report) > 0:
+                extra = 'logs/' + report[0].split('logs/')[1]
+            if os.path.exists(f'logs/{toolname}/{test_id}.html'):
+                extra=f'logs/{toolname}/{test_id}.html'
+            if os.path.exists(f'logs/{toolname}/{test_id}-klee-out'): # MPI-SV
+                extra=f'logs/{toolname}/{test_id}-klee-out'
+
+            if extra is not None:
+                outHTML.write(f"&nbsp;<a href='{extra}' target='MBI_details'><img title='more info' src='img/html.svg' height='24' /></a>")
+            outHTML.write("</td>")
+
+            if res_category != 'timeout' and elapsed is not None:
+                total_elapsed[toolname] += float(elapsed)
+
+            if len(used_toolnames) == 1:
+                print(f"Test '{test_id}' result: {res_category}: {diagnostic}. Elapsed: {elapsed} sec")
+
+            np = re.search(r"(?:-np) [0-9]+", test['cmd'])
+            np = int(re.sub(r"-np ", "", np.group(0)))
+
+        outHTML.write(f"</tr>\n")
+      outHTML.write(f"</table>\n")
+
+      # Display summary metrics for each tool
+      def tool_stats(toolname):
+          return (len(results[toolname]['TRUE_POS']), len(results[toolname]['TRUE_NEG']),len(results[toolname]['FALSE_POS']),len(results[toolname]['FALSE_NEG']),len(results[toolname]['unimplemented']),len(results[toolname]['failure']),len(results[toolname]['timeout']),len(results[toolname]['other']))
+
+      outHTML.write("\n<a name='metrics'/><h2>Metrics</h2><table border=1>\n<tr><td/>\n")
+      for toolname in used_toolnames:
+        outHTML.write(f"<td>{displayed_name[toolname]}</td>")
+
+      outHTML.write("</tr>\n<tr><td>API coverage</td>")
+      for toolname in used_toolnames:
+        (TP, TN, FP, FN, nPort, nFail, nTout, nNocc) = tool_stats(toolname)
+        total = TP + TN + FP + FN + nTout + nPort + nFail + nNocc
+        outHTML.write(f"<td><div class='tooltip'>{percent(nPort,total,compl=True)}% <span class='tooltiptext'>{nPort} unimplemented calls, {nNocc} inconclusive runs out of {total}</span></div></td>")
+
+      outHTML.write("</tr>\n<tr><td>Robustness</td>")
+      for toolname in used_toolnames:
+        (TP, TN, FP, FN, nPort, nFail, nTout, nNocc) = tool_stats(toolname)
+        totalPort = TP + TN + FP + FN + nTout + nFail
+        outHTML.write(f"<td><div class='tooltip'>{percent((nTout+nFail),(totalPort),compl=True)}% <span class='tooltiptext'>{nTout} timeouts, {nFail} failures out of {totalPort}</span></div></td>")
+
+      outHTML.write("</tr>\n<tr><td>Recall</td>")
+      for toolname in used_toolnames:
+        (TP, TN, FP, FN, nPort, nFail, nTout, nNocc) = tool_stats(toolname)
+        outHTML.write(f"<td><div class='tooltip'>{percent(TP,(TP+FN))}% <span class='tooltiptext'>found {TP} errors out of {TP+FN}</span></div></td>")
+      outHTML.write("</tr>\n<tr><td>Specificity</td>")
+      for toolname in used_toolnames:
+        (TP, TN, FP, FN, nPort, nFail, nTout, nNocc) = tool_stats(toolname)
+        outHTML.write(f"<td><div class='tooltip'>{percent(TN,(TN+FP))}%  <span class='tooltiptext'>recognized {TN} correct codes out of {TN+FP}</span></div></td>")
+      outHTML.write("</tr>\n<tr><td>Precision</td>")
+      for toolname in used_toolnames:
+        (TP, TN, FP, FN, nPort, nFail, nTout, nNocc) = tool_stats(toolname)
+        outHTML.write(f"<td><div class='tooltip'>{percent(TP,(TP+FP))}% <span class='tooltiptext'>{TP} diagnostics of error are correct out of {TP+FP})</span></div></td>")
+      outHTML.write("</tr>\n<tr><td>Accuracy</td>")
+      for toolname in used_toolnames:
+        (TP, TN, FP, FN, nPort, nFail, nTout, nNocc) = tool_stats(toolname)
+        outHTML.write(f"<td><div class='tooltip'>{percent((TP+TN),(TP+TN+FP+FN))}% <span class='tooltiptext'>{TP+TN} correct diagnostics in total, out of {TP+TN+FP+FN} diagnostics</span></div></td>")
+      outHTML.write("</tr></table>")
+      outHTML.write("<p>Hover over the values for details. API coverage issues, timeouts and failures are not considered when computing the other metrics, thus differences in the total amount of tests.</p>")
+
+      # Add generate radar plots
+      if plots_loaded:
+          for toolname in used_toolnames:
+              outHTML.write(f'<img src="plots/ext_radar_all_{toolname}.svg" alt="Radar plot for all error type for the {displayed_name[toolname]} tool."\>')
+
+      outHTML.write(f"</body></html>\n")
+
+    ########################
+    # Per tool statistics summary
+    ########################
+    for toolname in used_toolnames:
+        TP = len(results[toolname]['TRUE_POS'])
+        TN = len(results[toolname]['TRUE_NEG'])
+        FP = len(results[toolname]['FALSE_POS'])
+        FN = len(results[toolname]['FALSE_NEG'])
+        nPort = len(results[toolname]['unimplemented'])
+        nFail = len(results[toolname]['failure'])
+        other = len(results[toolname]['other'])
+        nTout = len(results[toolname]['timeout'])
+        passed = TP + TN
+        total = passed + FP + FN + nTout + nPort + nFail + other
+
+        print(f"XXXXXXXXX Final results for {toolname}")
+        if FP > 0:
+            print(f"XXX {FP} false positives")
+            if len(used_toolnames) == 1:
+                for p in results[toolname]['FALSE_POS']:
+                    print(f"  {p}")
+        if FN > 0:
+            print(f"XXX {FN} false negatives")
+            if len(used_toolnames) == 1:
+                for p in results[toolname]['FALSE_NEG']:
+                    print(f"  {p}")
+        if nTout > 0:
+            print(f"XXX {nTout} timeouts")
+            if len(used_toolnames) == 1:
+                for p in results[toolname]['timeout']:
+                    print(f"  {p}")
+        if nPort > 0:
+            print(f"XXX {nPort} API coverage issues")
+            if len(used_toolnames) == 1:
+                for p in results[toolname]['unimplemented']:
+                    print(f"  {p}")
+        if nFail > 0:
+            print(f"XXX {nFail} tool failures")
+            if len(used_toolnames) == 1:
+                for p in results[toolname]['failure']:
+                    print(f"  {p}")
+        if other > 0:
+            print(f"XXX {nFail} inconclusive runs (output parsing failure)")
+            if len(used_toolnames) == 1:
+                for p in results[toolname]['other']:
+                    print(f"  {p}")
+
+        print(f"\nXXXX Summary for {toolname} XXXX  {passed} test{'' if passed == 1 else 's'} passed (out of {total})")
+        print(f"\nFP = {FP}  FN = {FN}  TP = {TP}  TN = {TN}")
+        print(f"API coverage: {percent(nPort,total,compl=True)}% ({nPort} tests failed out of {total})")
+        print(
+            f"Robustness: {percent((nTout+nFail),(total-nPort),compl=True)}% ({nTout} timeouts and {nFail} failures out of {total-nPort})\n")
+
+        print(f"Recall: {percent(TP,(TP+FN))}% (found {TP} errors out of {TP+FN})")
+        print(f"Specificity: {percent(TN,(TN+FP))}% (recognized {TN} correct codes out of {TN+FP})")
+        print(f"Precision: {percent(TP,(TP+FP))}% ({TP} diagnostic of error are correct out of {TP+FP})")
+        print(f"Accuracy: {percent((TP+TN),(TP+TN+FP+FN))}% ({TP+TN} correct diagnostics in total, out of {TP+TN+FP+FN} diagnostics)")
+        print(f"\nTotal time of {toolname} for all tests (not counting the timeouts): {seconds2human(total_elapsed[toolname])} ({total_elapsed[toolname]} seconds)")
+
+    os.chdir(here)
+
+def cmd_latex(rootdir, toolnames):
+    here = os.getcwd()
+    os.chdir(rootdir)
+    results = {}
+    total_elapsed = {}
+    used_toolnames = []
+
+    # select the tools for which we have some results
+    print("Produce the stats for:", end='')
+    for toolname in toolnames:
+        if not toolname in tools:
+            raise Exception(f"Tool {toolname} does not seem to be a valid name.")
+
+        if os.path.exists(f'logs/{toolname}'):
+            used_toolnames.append(toolname)
+            print(f' {toolname}', end="")
+
+            # To compute timing statistics
+            total_elapsed[toolname] = 0
+    print(".")
+
+    # Initialize the data structure to gather all results
+    results = {'total':{}, 'error':{}}
+    timing = {'total':{}, 'error':{}}
+    for error in error_scope:
+        results[error] = {}
+        timing[error] = {}
+        for toolname in used_toolnames:
+            results[error][toolname] = {'failure':[], 'timeout':[], 'unimplemented':[], 'other':[], 'TRUE_NEG':[], 'TRUE_POS':[], 'FALSE_NEG':[], 'FALSE_POS':[]}
+            results['total'][toolname] = {'failure':[], 'timeout':[], 'unimplemented':[], 'other':[], 'TRUE_NEG':[], 'TRUE_POS':[], 'FALSE_NEG':[], 'FALSE_POS':[],'error':[],'OK':[]}
+            results['error'][toolname] = {'failure':[], 'timeout':[], 'unimplemented':[], 'other':[], 'TRUE_NEG':[], 'TRUE_POS':[], 'FALSE_NEG':[], 'FALSE_POS':[],'error':[],'OK':[]}
+            timing[error][toolname] = []
+            timing['total'][toolname] = []
+            timing['error'][toolname] = []
+
+    # Get all data from the caches
+    for test in todo:
+        binary=re.sub('\.c', '', os.path.basename(test['filename']))
+        ID=test['id']
+        test_id = f"{binary}_{ID}"
+        expected=test['expect']
+
+        for toolname in used_toolnames:
+            (res_category, elapsed, diagnostic, outcome) = categorize(tool=tools[toolname], toolname=toolname, test_id=test_id, expected=expected)
+            error = possible_details[test['detail']]
+            results[error][toolname][res_category].append(test_id)
+            results['total'][toolname][res_category].append(test_id)
+            timing[error][toolname].append(float(elapsed))
+            timing['total'][toolname].append(float(elapsed))
+            if expected == 'OK':
+                results['total'][toolname]['OK'].append(test_id)
+            else:
+                results['total'][toolname]['error'].append(test_id)
+                results['error'][toolname][res_category].append(test_id)
+                timing['error'][toolname].append(float(elapsed))
+
+    # Produce the results per tool and per category
+    with open(f'{rootdir}/latex/results-per-category-landscape.tex', 'w') as outfile:
+        outfile.write('\\setlength\\tabcolsep{3pt} % default value: 6pt\n')
+        outfile.write("\\begin{tabular}{|l|*{"+str(len(used_toolnames))+"}{c|c|c|c||}}\n")
+        outfile.write("\\cline{2-"+str(len(used_toolnames)*4+1)+"}\n")
+        # First title line: Tool names
+        outfile.write("  \\multicolumn{1}{c|}{}")
+        for t in used_toolnames:
+            outfile.write("& \\multicolumn{4}{c||}{"+displayed_name[t]+"}")
+        outfile.write("\\\\\n")
+        outfile.write("\\cline{2-"+str(len(used_toolnames)*4+1)+"}\n")
+        # Second title line: TP&TN&FP&FN per tool
+        outfile.write("  \\multicolumn{1}{c|}{}")
+        for t in used_toolnames:
+            outfile.write("& \\rotatebox{90}{Build error~~} &\\rotatebox{90}{Failure} & \\rotatebox{90}{Incorrect} & \\rotatebox{90}{Correct~~} ")
+        outfile.write("\\\\\\hline\n")
+
+        for error in error_scope:
+            if error == 'FOK':
+                outfile.write("\\hline\n")
+            outfile.write(displayed_name[error])
+            for toolname in used_toolnames:
+                port = len(results[error][toolname]['unimplemented'])
+                othr = len(results[error][toolname]['other'])
+                fail = len(results[error][toolname]['failure'])
+                tout = len(results[error][toolname]['timeout'])
+                good = len(results[error][toolname]['TRUE_POS']) + len(results[error][toolname]['TRUE_NEG'])
+                bad  = len(results[error][toolname]['FALSE_POS']) + len(results[error][toolname]['FALSE_NEG'])
+                outfile.write(f"&{port+othr} & {fail+tout} &{bad}&{good}")
+                #results[error][toolname] = {'failure':[], 'timeout':[], 'unimplemented':[], 'other':[], 'TRUE_NEG':[], 'TRUE_POS':[], 'FALSE_NEG':[], 'FALSE_POS':[]}
+            outfile.write("\\\\\\hline\n")
+        outfile.write("\\hline\n \\textbf{Total}")
+        for toolname in used_toolnames:
+            port = othr = fail = tout = good = bad = 0
+            for error in error_scope:
+                port += len(results[error][toolname]['unimplemented'])
+                othr += len(results[error][toolname]['other'])
+                fail += len(results[error][toolname]['failure'])
+                tout += len(results[error][toolname]['timeout'])
+                good += len(results[error][toolname]['TRUE_POS']) + len(results[error][toolname]['TRUE_NEG'])
+                bad  += len(results[error][toolname]['FALSE_POS']) + len(results[error][toolname]['FALSE_NEG'])
+            outfile.write(f"&{port+othr} & {fail+tout} &{bad}&{good}")
+        outfile.write("\\\\\\hline\n")
+
+        # Finish the table
+        outfile.write("\\end{tabular}\n")
+        outfile.write('\\setlength\\tabcolsep{6pt} % Back to default value\n')
+
+    # Produce the results per tool and per category
+    with open(f'{rootdir}/latex/results-per-category-portrait.tex', 'w') as outfile:
+        outfile.write('\\setlength\\tabcolsep{1.5pt} % default value: 6pt\n')
+        # To split the table in two lines, do this: for errors in [['FOK','AInvalidParam','BResLeak','BReqLifecycle','BLocalConcurrency'], ['CMatch','DRace','DMatch','DGlobalConcurrency','EBufferingHazard']]:
+        for errors in [['FOK','AInvalidParam','BResLeak','BReqLifecycle','BLocalConcurrency', 'CMatch','DRace','DMatch','DGlobalConcurrency','InputHazard','EBufferingHazard']]:
+            outfile.write("\\begin{tabular}{|l@{}|*{"+str(len(errors)-1)+"}{c|c|c|c||} c|c|c|c|}\n") # last column not in multiplier (len-1 used) to not have || at the end
+            outfile.write(f"\\cline{{2-{len(errors)*4+1}}}\n")
+            # First title line: error categories
+            outfile.write("  \\multicolumn{1}{c|}{}")
+            for error in errors:
+                sep = '|' if error == errors[-1] else '||' # Use || as a separator, unless that's the last column
+                outfile.write(f"&\\multicolumn{{4}}{{c{sep}}}{{{displayed_name[error].split(' ')[0]}}}")
+            outfile.write("\\\\\n  \\multicolumn{1}{c|}{}")
+            for error in errors:
+                sep = '|' if error == errors[-1] else '||' # Use || as a separator, unless that's the last column
+                outfile.write(f"&\\multicolumn{{4}}{{c{sep}}}{{{displayed_name[error].split(' ')[1]}}}")
+            outfile.write(f"\\\\\\cline{{2-{len(errors)*4+1}}}\n")
+            outfile.write("\\multicolumn{1}{c|}{}")
+            for error in errors:
+                outfile.write("& \\rotatebox{90}{Build error~~} & \\rotatebox{90}{Runtime error} &") #\\rotatebox{90}{Timeout~~}&
+                if error == 'FOK':
+                    outfile.write(" \\rotatebox{90}{False \\textbf{Positive}} & \\rotatebox{90}{True \\textbf{Negative}~~} \n")
+                else:
+                    outfile.write(" \\rotatebox{90}{False Negative} & \\rotatebox{90}{True Positive~} \n")
+            outfile.write("\\\\\\hline\n")
+
+            # Find the best tool
+            best = {}
+            for error in errors:
+                best[error] = 0
+                for toolname in used_toolnames:
+                    val = len(results[error][toolname]['TRUE_POS']) + len(results[error][toolname]['TRUE_NEG'])
+                    if val > best[error]:
+                        best[error] = val
+                # print(f"Best for {error} has {best[error]}")
+
+            # display all tools
+            for toolname in used_toolnames:
+                outfile.write(f'{displayed_name[toolname]}')
+                for error in errors:
+                    port = len(results[error][toolname]['unimplemented'])
+                    othr = len(results[error][toolname]['other'])
+                    fail = len(results[error][toolname]['failure'])
+                    tout = len(results[error][toolname]['timeout'])
+                    good = len(results[error][toolname]['TRUE_POS']) + len(results[error][toolname]['TRUE_NEG'])
+                    bad  = len(results[error][toolname]['FALSE_POS']) + len(results[error][toolname]['FALSE_NEG'])
+                    if good == best[error]: # Best tool is diplayed in bold
+                        outfile.write(f"&{{\\bf {port}}}&{{\\bf {tout+othr+fail}}}&{{\\bf {bad}}}&{{\\bf {good}}}")
+                    else:
+                        outfile.write(f"&{port}&{tout+othr+fail}&{bad}&{good}")
+                outfile.write("\\\\\\hline\n")
+
+            outfile.write("\\hline\\textit{Ideal tool}")
+            for error in errors:
+                toolname = used_toolnames[0]
+                total  = len(results[error][toolname]['unimplemented']) + len(results[error][toolname]['other']) + len(results[error][toolname]['failure'])
+                total += len(results[error][toolname]['timeout']) + len(results[error][toolname]['TRUE_POS']) + len(results[error][toolname]['TRUE_NEG'])
+                total += len(results[error][toolname]['FALSE_POS']) + len(results[error][toolname]['FALSE_NEG'])
+
+                outfile.write(f"& \\textit{{0}} &\\textit{{0}} & \\textit{{0}} & \\textit{total} \n")
+            outfile.write("\\\\\\hline\n")
+
+            # Finish the table
+            outfile.write("\\end{tabular}\n\n\medskip\n")
+        outfile.write('\\setlength\\tabcolsep{6pt} % Back to default value\n')
+
+    # Produce the landscape results+metric per tool for all category
+    with open(f'{rootdir}/latex/results-summary.tex', 'w') as outfile:
+        outfile.write('\\setlength\\tabcolsep{2pt} % default value: 6pt\n')
+        outfile.write('\\begin{tabular}{|l|*{3}{c|}|*{4}{c|}|*{2}{c|}|*{4}{c|}|c|}\\hline\n')
+        outfile.write('  \\multirow{2}{*}{ \\textbf{Tool}} &  \\multicolumn{3}{c||}{Errors} &\\multicolumn{4}{c||}{Results}&\\multicolumn{2}{c||}{Robustness} &\\multicolumn{4}{c||}{Usefulness}&\\textbf{Overall}\\\\\\cline{2-14}\n')
+        outfile.write('& \\textbf{CE}&\\textbf{TO}&\\textbf{RE}  & \\textbf{TP} & \\textbf{TN} & \\textbf{FP} & \\textbf{FN} &\\textbf{Coverage} & \\textbf{Conclusiveness} & \\textbf{Specificity}&\\textbf{Recall}& \\textbf{Precision}& \\textbf{F1 Score}    & \\textbf{accuracy}\\\\\\hline \n')
+
+        # Search the best values
+        best = {'TP':0, 'TN':0, 'FP':999999, 'FN':9999999, 'coverage':0, 'completion':0, 'specificity':0, 'recall':0, 'precision':0, 'F1':0, 'accuracy':0}
+        for toolname in used_toolnames:
+            TP = len(results['total'][toolname]['TRUE_POS'])
+            TN = len(results['total'][toolname]['TRUE_NEG'])
+            FN = len(results['total'][toolname]['FALSE_NEG'])
+            FP = len(results['total'][toolname]['FALSE_POS'])
+            if TP > best['TP']:
+                best['TP'] = TP
+            if TN > best['TN']:
+                best['TN'] = TN
+            if FP < best['FP']:
+                best['FP'] = FP
+            if FN < best['FN']:
+                best['FN'] = FN
+
+            port = len(results['total'][toolname]['unimplemented'])
+            fail = len(results['total'][toolname]['failure'])
+            othr = len(results['total'][toolname]['other'])
+            tout = len(results['total'][toolname]['timeout'])
+            total = TP + TN + FP + FN + port + fail + othr + tout
+            if (TN+FP) != 0 and TP+FN != 0 and TP+FP != 0:
+                coverage = float(percent(port,total,compl=True,one=True))
+                if coverage > best['coverage']:
+                    best['coverage'] = coverage
+                completion = float(percent((port+fail+othr+tout),(total),compl=True,one=True))
+                if completion > best['completion']:
+                    best['completion'] = completion
+                specificity = float(percent(TN,(TN+FP),one=True))
+                if specificity > best['specificity']:
+                    best['specificity'] = specificity
+                recall = float(percent(TP,(TP+FN),one=True))
+                if recall > best['recall']:
+                    best['recall'] = recall
+                precision = float(percent(TP,(TP+FP),one=True))
+                if precision > best['precision']:
+                    best['precision'] = precision
+
+                # Recompute precision & recall without rounding, to match the value computed when displaying the result
+                precision = TN/(TP+FP)
+                recall = TP/(TP+FN)
+                F1 = percent(2*precision*recall,(precision+recall),one=True)
+                if F1 > best['F1']:
+                    best['F1'] = F1
+                accuracy = percent(TP+TN,(TP+TN+FP+FN+port+fail+othr+tout),one=True)
+                if accuracy > best['accuracy']:
+                    best['accuracy'] = accuracy
+            else:
+                print (f"WARNING: {toolname} not considered as a best score: TN+FP={TP+FP} TP+FN={TP+FN} TP+FP={TP+FP}")
+
+
+        for key in best: # Cleanup the data to ensure that the equality test matches in bold_if()
+            if best[key] == 1.0:
+                best[key] = "1"
+        print(f"best coverage: {best['coverage']}")
+        print(f"best: {best}")
+
+        for toolname in used_toolnames:
+            outfile.write(f'{displayed_name[toolname]}&\n')
+
+            port = len(results['total'][toolname]['unimplemented'])
+            fail = len(results['total'][toolname]['failure'])
+            othr = len(results['total'][toolname]['other'])
+            tout = len(results['total'][toolname]['timeout'])
+            TP = len(results['total'][toolname]['TRUE_POS'])
+            TN = len(results['total'][toolname]['TRUE_NEG'])
+            FN = len(results['total'][toolname]['FALSE_NEG'])
+            FP = len(results['total'][toolname]['FALSE_POS'])
+
+            total = TP + TN + FP + FN + port + fail + othr + tout
+
+            outfile.write(f"{bold_if(port,0)}&{bold_if(tout,0)}&{bold_if(fail+othr,0)}")
+            outfile.write(f"&{bold_if(TP,best['TP'])}&{bold_if(TN,best['TN'])}&{bold_if(FP,best['FP'])}&{bold_if(FN,best['FN'])}&")
+
+            # Coverage & Completion
+            coverage = percent(port,total,compl=True,one=True)
+            completion = percent((port+fail+othr+tout),(total),compl=True,one=True)
+            outfile.write(f"{bold_if(coverage,best['coverage'])} &{bold_if(completion, best['completion'])}&")
+            # Specificity: recognized {TN} correct codes out of {TN+FP}
+            specificity = percent(TN,(TN+FP),one=True)
+            outfile.write(f'{bold_if(specificity, best["specificity"])}&')
+            # Recall: found {TP} errors out of {TP+FN} ;Precision: {TP} diagnostic of error are correct out of {TP+FP}) ;
+            recall = percent(TP,(TP+FN),one=True)
+            precision = percent(TP,(TP+FP),one=True)
+            outfile.write(f'{bold_if(recall, best["recall"])} & {bold_if(precision, best["precision"])} &')
+            # F1 Score
+            if TP+FP >0 and TP+FN >0:
+                precision = TN/(TP+FP)
+                recall = TP/(TP+FN)
+                F1 = percent(2*precision*recall,(precision+recall),one=True)
+                outfile.write(f'{bold_if(F1, best["F1"])}&')
+            else:
+                outfile.write('(error)&')
+            # Accuracy: {TP+TN} correct diagnostics in total, out of all tests {TP+TN+FP+FN+port+fail+othr+tout} diagnostics
+            accuracy = percent(TP+TN,(TP+TN+FP+FN+port+fail+othr+tout),one=True)
+            outfile.write(f'{bold_if(accuracy, best["accuracy"])}')
+
+            outfile.write(f'\\\\\\hline\n')
+        outfile.write(f'\\hline\n')
+
+        outfile.write('\\textit{Ideal tool}&\\textit{0}&\\textit{0}&\\textit{0}&')
+        outfile.write(f"\\textit{{{len(results['total'][toolname]['error'])}}}&\\textit{{{len(results['total'][toolname]['OK'])}}}&\\textit{{0}}&\\textit{{0}}&")
+        outfile.write("\\textit{1}&\\textit{1}&\\textit{1}&\\textit{1}&\\textit{1}&\\textit{1}&\\textit{1} \\\\\\hline\n")
+
+        outfile.write('\\end{tabular}\n')
+        outfile.write('\\setlength\\tabcolsep{6pt} % Back to default value\n')
+
+    # Produce the table with the metrics per tool per category (not used, as we put everything on one line only)
+    with open(f'{rootdir}/latex/results-metrics.tex', 'w') as outfile:
+        outfile.write('\\begin{tabular}{|l|*{7}{c|}}\\hline\n')
+        outfile.write('  \\multirow{2}{*}{ \\textbf{Tool}} &  \\multicolumn{2}{c|}{Robustness} &\\multicolumn{4}{c|}{Usefulness}&\\textbf{Overall}\\\\\\cline{2-7}\n')
+
+        outfile.write('  &  \\textbf{Coverage} & \\textbf{Conclusiveness} & \\textbf{Specificity}&\\textbf{Recall}& \\textbf{Precision}& \\textbf{F1 Score}    & \\textbf{accuracy}   \\\\\\hline \n')
+
+        for toolname in used_toolnames:
+            outfile.write(f'{displayed_name[toolname]}&\n')
+
+            nPort = len(results['total'][toolname]['unimplemented'])
+            nFail = len(results['total'][toolname]['failure']) + len(results['total'][toolname]['other'])
+            nTout = len(results['total'][toolname]['timeout'])
+            TP = len(results['total'][toolname]['TRUE_POS'])
+            TN = len(results['total'][toolname]['TRUE_NEG'])
+            FN = len(results['total'][toolname]['FALSE_NEG'])
+            FP = len(results['total'][toolname]['FALSE_POS'])
+
+            total = TP + TN + FP + FN + nTout + nPort + nFail
+
+            # Coverage & Completion
+            outfile.write(f'{percent(nPort,total,compl=True,one=True)} &{percent((nTout+nFail+nPort),(total),compl=True,one=True)}&')
+            # Specificity: recognized {TN} correct codes out of {TN+FP}
+            outfile.write(f'{percent(TN,(TN+FP),one=True)}&')
+            # Recall: found {TP} errors out of {TP+FN} ;Precision: {TP} diagnostic of error are correct out of {TP+FP}) ;
+            outfile.write(f'{percent(TP,(TP+FN),one=True)} & {percent(TP,(TP+FP),one=True)} &')
+            # F1 Score
+            if TP+FP >0 and TP+FN >0:
+                precision = TN/(TP+FP)
+                recall = TP/(TP+FN)
+                outfile.write(f'{percent(2*precision*recall,(precision+recall),one=True)}&')
+            else:
+                outfile.write('(error)&')
+            # Accuracy: {TP+TN} correct diagnostics in total, out of all tests {TP+TN+FP+FN+nTout+nFail+nPort} diagnostics
+            outfile.write(f'{percent(TP+TN,(TP+TN+FP+FN+nTout+nFail+nPort),one=True)}')
+            outfile.write(f'\\\\\\hline\n')
+
+        outfile.write("\\hline\n\\textit{Ideal tool}&\\textit{1}&\\textit{1}&\\textit{1}&\\textit{1}&\\textit{1}&\\textit{1}&\\textit{1}\\\\\\hline\n")
+
+        outfile.write('\\end{tabular}\n')
+
+    # Produce the timing results
+    with open(f'{rootdir}/latex/results-timings.tex', 'w') as outfile:
+        outfile.write(f"\\begin{{tabular}}{{|c|c|*{{{len(used_toolnames)}}}{{c|}}}}\n")
+        outfile.write(f"\\cline{{3-{len(used_toolnames)+2}}}\n")
+        # First title line: Tool names
+        outfile.write("  \\multicolumn{2}{c|}{}")
+        for t in used_toolnames:
+            outfile.write(f"& {displayed_name[t]}")
+        outfile.write(f"\\\\\\hline\n")
+
+        def show_line(key, display_name):
+            outfile.write(f"\\multirow{{3}}{{*}}{{{display_name}}} & Mean time ")
+            for toolname in used_toolnames:
+                if len(timing[key][toolname]) >1:
+                    mean = statistics.mean(timing[key][toolname])
+                    outfile.write(f"&{round(mean,2)}")
+                else:
+                    outfile.write("&(error)")
+                    print(f"Error while computing the mean of timing[{key}][{toolname}] (needs at least one value)")
+            outfile.write(f"\\\\\\cline{{2-{len(used_toolnames)+2}}}\n")
+
+            outfile.write(f"& StdDev ")
+            for toolname in used_toolnames:
+                if len(timing[key][toolname]) >2:
+                    stdev = statistics.stdev(timing[key][toolname])
+                    outfile.write(f"&{round(stdev,2)}")
+                else:
+                    outfile.write("&(error)")
+                    print(f"Error while computing the variance of timing[{key}][{toolname}] (needs at least two values)")
+            outfile.write(f"\\\\\\cline{{2-{len(used_toolnames)+2}}}\n")
+
+            outfile.write(f" & \\# timout ")
+            for toolname in used_toolnames:
+                tout = len(results[key][toolname]['timeout'])
+                if tout == 0:
+                    tout = '-'
+                outfile.write(f"&{tout}")
+            outfile.write("\\\\\\hline\n")
+
+        for error in error_scope:
+            if error == 'FOK':
+                outfile.write('\\hline\n')
+                show_line('error', '\\textit{All incorrect tests}')
+                title = '\\textit{All correct tests}'
+            else:
+                title = f"\makecell{{{displayed_name[error]} \\\\ ({error_scope[error]})}}"
+
+            show_line(error, title)
+        outfile.write('\\hline\n')
+        show_line('total', '\\textbf{All tests}')
+
+        outfile.write(f"\\multicolumn{{2}}{{|c|}}{{\\textbf{{Total time}}}} ")
+        for toolname in used_toolnames:
+            secs = sum(timing['total'][toolname])
+            days = int(secs//86400)
+            hours = int((secs - days*86400)//3600)
+            minutes = int((secs - days*86400 - hours*3600)//60)
+            seconds = secs - days*86400 - hours*3600 - minutes*60
+#            centi = int((seconds - int(seconds)*10)
+            outfile.write("&")
+            if hours > 0:
+                outfile.write(f"{hours}h")
+            if hours >0 or minutes > 0:
+                outfile.write(f"{minutes}m")
+            outfile.write(f"{int(seconds)}s")
+        outfile.write(f"\\\\\\hline\n")
+
+        # Last line: Tool names again
+        outfile.write("  \\multicolumn{2}{c|}{}")
+        for t in used_toolnames:
+            outfile.write(f"& {displayed_name[t]}")
+        outfile.write(f"\\\\\\cline{{3-{len(used_toolnames)+2}}}\n")
+
+        outfile.write(f"\\end{{tabular}}\n")
+
+
+    with open(f'{rootdir}/latex/nondeterministic-results-summary.tex', 'w') as outfile:
+        outfile.write('\\setlength\\tabcolsep{2pt}\n')
+        outfile.write('\\begin{tabular}{|l|*{1}{c|}|*{6}{c|}|*{2}{c|}}\\hline\n')
+        outfile.write('  \\multirow{2}{*}{ \\textbf{Tool}} &  \\multicolumn{1}{c||}{Errors} &\\multicolumn{6}{c||}{Results}&\\multicolumn{2}{c|}{Overall}\\\\\\cline{2-10}\n')
+        outfile.write('&\\textbf{SE}  &\\textbf{STP}&\\textbf{CTP}&\\textbf{SFN}&\\textbf{SFP}&\\textbf{CFP}&\\textbf{STN}  &\\textbf{Accuracy\\textsuperscript{+}}&\\textbf{Accuracy\\textsuperscript{-}}\\\\\\hline \n')
+
+        best = {
+            'STP':0, 'STN':0, 'CTP':0, 'CFP':999999, 'SFP':999999, 'SFN':9999999,
+            'CE':999999, 'TO':999999, 'RE':999999, 'O':999999, 'SE':999999,
+            'accp':0, 'accm':0
+        }
+
+        def merge_cat(x):
+            if x not in ['CE', 'TO', 'RE']:
+                return x
+            else:
+                return 'SE'
+
+        total = {'OK': 0, 'Error': 0}
+        ext_results = {}
+        for toolname in used_toolnames:
+            files_results = categorize_all_files(tools[toolname], toolname, todo)
+            ext_results[toolname] = {
+                'STP':[], 'STN':[], 'CTP':[], 'CFP':[], 'SFP':[], 'SFN':[],
+                'CE':[], 'TO':[], 'RE':[], 'O':[], 'SE':[],
+                'accp':0, 'accm':0,
+                'total':{'OK':0, 'Error':0}
+            }
+
+            for file in files_results:
+                ext_results[toolname][merge_cat(files_results[file]['result'])].append(file)
+
+                if files_results[file]['expected'] == 'OK':
+                    ext_results[toolname]['total']['OK'] += 1
+                else:
+                    ext_results[toolname]['total']['Error'] += 1
+
+            total = ext_results[toolname]['total']['Error'] + ext_results[toolname]['total']['OK']
+            accp = round((len(ext_results[toolname]['STP']) + len(ext_results[toolname]['STN']) + len(ext_results[toolname]['CTP'])) / total, 2)
+            accm = round((len(ext_results[toolname]['STP']) + len(ext_results[toolname]['STN'])) / total, 2)
+
+            ext_results[toolname]['accp'] = accp
+            ext_results[toolname]['accm'] = accm
+
+            # Compute best results
+            for metric in best:
+                if metric in ['accp', 'accm']:
+                    if best[metric] < ext_results[toolname][metric]:
+                        best[metric] = ext_results[toolname][metric]
+                elif metric in ['CFP', 'SFP', 'SFN', 'CE', 'TO', 'RE', 'O', 'SE']:
+                    if best[metric] > len(ext_results[toolname][metric]):
+                        best[metric] = len(ext_results[toolname][metric])
+                else:
+                    if best[metric] < len(ext_results[toolname][metric]):
+                        best[metric] = len(ext_results[toolname][metric])
+
+        for toolname in used_toolnames:
+            format_if_best = lambda res : f" {{\\bf {len(ext_results[toolname][res])}}}" if best[res] == len(ext_results[toolname][res]) else f" {len(ext_results[toolname][res])}"
+
+            TP = format_if_best('STP')
+            TN = format_if_best('STN')
+            CTP = format_if_best('CTP')
+            CFP = format_if_best('CFP')
+            FP = format_if_best('SFP')
+            FN = format_if_best('SFN')
+            CE = format_if_best('CE')
+            TO = format_if_best('TO')
+            RE = format_if_best('RE')
+            O = format_if_best('O')
+            SE = format_if_best('SE')
+
+            accp = str(ext_results[toolname]['accp']) if ext_results[toolname]['accp'] < best['accp'] else f"{{\\bf  {ext_results[toolname]['accp']} }}"
+            accm = str(ext_results[toolname]['accm']) if ext_results[toolname]['accm'] < best['accm'] else f"{{\\bf  {ext_results[toolname]['accm']} }}"
+
+            outfile.write(f'{displayed_name[toolname]} & {SE} &')
+            outfile.write(f"{TP} & {CTP} & {FN} & {FP} & {CFP} & {TN} & ")
+            outfile.write(f"{accp} & {accm} \\\\\\hline\n")
+
+
+        outfile.write('\\textit{Ideal tool}&\\textit{0}&')
+        outfile.write(f"\\textit{{{ext_results[used_toolnames[0]]['total']['Error']}}} & \\textit{{0}} & \\textit{{0}} & \\textit{{0}} & \\textit{{0}} & \\textit{{{ext_results[used_toolnames[0]]['total']['OK']}}} & ")
+        outfile.write("\\textit{1}&\\textit{1} \\\\\\hline\n")
+
+        outfile.write('\\end{tabular}\n')
+        outfile.write('\\setlength\\tabcolsep{6pt}\n')
+
+    with open(f'{rootdir}/latex/files-count.tex', 'w') as outfile:
+        files_results = categorize_all_files(tools[used_toolnames[0]], used_toolnames[0], todo)
+
+        error_types = {}
+        error_types_tests = {}
+        for error in error_scope:
+            error_types[error] = 0
+            error_types_tests[error] = 0
+
+        # Count number of code by expected type of results
+        for f in files_results:
+            error_types[possible_details[files_results[f]['detail']]] += 1
+
+        for t in todo:
+            error_types_tests[possible_details[t['detail']]] += 1
+
+        outfile.write("\\begin{tabular}{|l|c|c|}\n")
+        outfile.write("  \\hline\n")
+        outfile.write("  \\textbf{Error category} & \\textbf{Number of codes} & \\textbf{Number of tests}\\\\\n")
+        outfile.write("  \\hline\n")
+        for et in error_types:
+            if et in ['BLocalConcurrency', 'DRace', 'DGlobalConcurrency',
+                      'EBufferingHazard', 'InputHazard']:
+                outfile.write(f"  \\textbf{{{displayed_name[et]}}} & \\textbf{{{error_types[et]}}}& \\textbf{{{error_types_tests[et]}}} \\\\\n")
+            else:
+                outfile.write(f"  \\textit{{{displayed_name[et]}}} & {error_types[et]} & {error_types_tests[et]}\\\\\n")
+
+        outfile.write("  \\hline\n")
+        outfile.write(f"  \\textbf{{Total}} & {len(files_results)} & {len(todo)}\\\\\n")
+        outfile.write("  \\hline\n")
+        outfile.write("\\end{tabular}\n")
+
+    def resultsPerCategory(suffix, hazard=False):
+        category = ['FOK', 'AInvalidParam', 'BResLeak', 'DMatch', 'CMatch', 'BReqLifecycle', 'BEpochLifecycle']
+        if hazard:
+            category = ['BLocalConcurrency', 'DGlobalConcurrency', 'DRace', 'EBufferingHazard', 'InputHazard']
+
+
+        with open(f'{rootdir}/latex/nd-results-per-category-portrait-{suffix}.tex', 'w') as outfile:
+            # files_results = categorize_all_files(tools[used_toolnames[0]], used_toolnames[0], todo)
+            ext_results = {}
+            best = {}
+
+            # Put FOK at the first position
+            last = ''
+            for e in category:
+                last = e
+                best[e] = {
+                    'STP':0, 'STN':0, 'CTP':0, 'CFP':99999, 'SFP':99999, 'SFN':99999,
+                    'SE':99999,
+                    'accp':0, 'accm':0
+                }
+
+            for toolname in used_toolnames:
+                ext_results[toolname] = {}
+
+                files_results = categorize_all_files(tools[toolname], toolname, todo)
+                for error in category:
+                    ext_results[toolname][error] = {
+                        'STP':[], 'STN':[], 'CTP':[], 'CFP':[], 'SFP':[], 'SFN':[],
+                        'CE':[], 'TO':[], 'RE':[], 'O':[], 'SE':[],
+                        'accp':0, 'accm':0,
+                        'total':0
+                    }
+
+                    for f in files_results:
+                        if possible_details[files_results[f]['detail']] == error:
+                            ext_results[toolname][error][files_results[f]['result']].append(f)
+                            ext_results[toolname][error]['total'] += 1
+
+                    total = ext_results[toolname][error]['total']
+                    accp = round((len(ext_results[toolname][error]['STP']) + len(ext_results[toolname][error]['STN']) + len(ext_results[toolname][error]['CTP'])) / total, 2)
+                    accm = round((len(ext_results[toolname][error]['STP']) + len(ext_results[toolname][error]['STN'])) / total, 2)
+
+                    ext_results[toolname][error]['accp'] = accp
+                    ext_results[toolname][error]['accm'] = accm
+
+                for error in category:
+                    err = (len(ext_results[toolname][error]['CE'])
+                           + len(ext_results[toolname][error]['TO'])
+                           + len(ext_results[toolname][error]['RE'])
+                           + len(ext_results[toolname][error]['O'])
+                           + len(ext_results[toolname][error]['SE']))
+
+                    if best[error]['SE'] > err:
+                        best[error]['SE'] = err
+
+                    for res in ['CFP', 'SFP', 'SFN']:
+                        if best[error][res] > len(ext_results[toolname][error][res]):
+                            best[error][res] = len(ext_results[toolname][error][res])
+
+                    for res in ['STP', 'CTP', 'STN']:
+                        if best[error][res] < len(ext_results[toolname][error][res]):
+                            best[error][res] = len(ext_results[toolname][error][res])
+
+                    for res in ['accp', 'accm']:
+                        if best[error][res] < ext_results[toolname][error][res]:
+                            best[error][res] = ext_results[toolname][error][res]
+
+            ncol = 4 if not hazard else 6
+            align = 'c|c|c|c|' if not hazard else 'c|c|c|c|c|c|'
+
+            outfile.write("\\setlength\\tabcolsep{1.5pt}\n")
+            outfile.write(f"\\begin{{tabular}}{{|l|*{{{len(category)-1}}}{{ {align} |}} {align}}}\n")
+            outfile.write(f"\\cline{{2- {(len(category) * ncol) + 1} }}\n")
+
+            outfile.write("  \\multicolumn{1}{c|}{}")
+            for error in category:
+                if error == last:
+                    outfile.write(f" & \\multicolumn{{{ncol}}}{{c|}}")
+                else:
+                    outfile.write(f" & \\multicolumn{{{ncol}}}{{c||}}")
+
+                outfile.write(f"{{\it {displayed_name[error].split()[0]}}}")
+
+            outfile.write("\\\\\n")
+
+            outfile.write("  \\multicolumn{1}{c|}{}")
+            for error in category:
+                if error == last:
+                    outfile.write(f" & \\multicolumn{{{ncol}}}{{c|}}")
+                else:
+                    outfile.write(f" & \\multicolumn{{{ncol}}}{{c||}}")
+
+                outfile.write(f"{{\it {displayed_name[error].split()[1]}}}")
+
+            outfile.write("\\\\\n")
+            outfile.write(f"\\cline{{2- {(len(category) * ncol) + 1} }}\n")
+
+            outfile.write("  \\multicolumn{1}{c|}{}")
+            for error in category:
+                outfile.write(" & \\rotatebox{90}{SE}")
+                if error == "FOK":
+                    outfile.write(" & \\rotatebox{90}{{\\bf STN}}")
+                    if hazard:
+                        outfile.write(" & \\rotatebox{90}{{\\bf CFP}}")
+                    outfile.write(" & \\rotatebox{90}{{\\bf SFP}}")
+                else:
+                    outfile.write(" & \\rotatebox{90}{STP}")
+                    if hazard:
+                        outfile.write(" & \\rotatebox{90}{CTP}")
+                    outfile.write(" & \\rotatebox{90}{SFN}")
+
+                if hazard:
+                    outfile.write(" & \\rotatebox{90}{Accuracy\\textsuperscript{+}}")
+                    outfile.write(" & \\rotatebox{90}{Accuracy\\textsuperscript{-}}")
+                else:
+                    outfile.write(" & \\rotatebox{90}{Accuracy}")
+
+            outfile.write("\\\\\\hline\n")
+
+            for toolname in used_toolnames:
+                outfile.write(f"{displayed_name[toolname]}")
+
+                for error in category:
+                    disp_err = (len(ext_results[toolname][error]['CE'])
+                                + len(ext_results[toolname][error]['TO'])
+                                + len(ext_results[toolname][error]['RE'])
+                                + len(ext_results[toolname][error]['O'])
+                                + len(ext_results[toolname][error]['SE']))
+
+                    if disp_err == best[error]['SE']:
+                        outfile.write(f"& {{\\bf {disp_err}}}")
+                    else:
+                        outfile.write(f"& {disp_err}")
+
+                    format_if_best = lambda res : f" & {{\\bf {len(ext_results[toolname][error][res])}}}" if best[error][res] == len(ext_results[toolname][error][res]) else f" & {len(ext_results[toolname][error][res])}"
+
+                    format_if_best_2 = lambda res : f" & {{\\bf {1 if ext_results[toolname][error][res] >= 1.0 else 0 if ext_results[toolname][error][res] <= 0.0 else ext_results[toolname][error][res]} }}" if best[error][res] == ext_results[toolname][error][res] else f" & {1 if ext_results[toolname][error][res] >= 1.0 else 0 if ext_results[toolname][error][res] <= 0.0 else ext_results[toolname][error][res]}"
+
+                    if error == "FOK":
+                        outfile.write(format_if_best('STN'))
+                        if hazard:
+                            outfile.write(format_if_best('CFP'))
+                        outfile.write(format_if_best('SFP'))
+                    else:
+                        outfile.write(format_if_best('STP'))
+                        if hazard:
+                            outfile.write(format_if_best('CTP'))
+                        outfile.write(format_if_best('SFN'))
+
+                    if hazard:
+                        outfile.write(format_if_best_2('accp'))
+                        outfile.write(format_if_best_2('accm'))
+                    else:
+                        outfile.write(format_if_best_2('accp'))
+
+                outfile.write("\\\\\\hline\n")
+
+            outfile.write("\\textit{Ideal tool}")
+
+            for error in category:
+                outfile.write(" & \\textit{0}")
+                outfile.write(f" & \\textit{{ {ext_results[toolname][error]['total']} }}")
+                if hazard:
+                    outfile.write(" & \\textit{0}")
+                outfile.write(" & \\textit{0}")
+                outfile.write(" & \\textit{1}")
+                if hazard:
+                    outfile.write(" & \\textit{1}")
+
+            outfile.write("\\\\\\hline\n")
+
+
+            outfile.write("\\end{tabular}\n")
+            outfile.write("\\setlength\\tabcolsep{6pt}")
+
+    resultsPerCategory('deter', hazard=False)
+    resultsPerCategory('ndeter', hazard=True)
+
+
+    with open(f'{rootdir}/latex/reclassified-result.tex', 'w') as outfile:
+        reclassified = {}
+
+        category = ['FOK']
+        last = ''
+        for e in error_scope:
+            if e != 'FOK':
+                category.append(e)
+                last = e
+        category.append('total')
+
+        for toolname in used_toolnames:
+            reclassified[toolname] = {}
+
+            for e in category:
+                reclassified[toolname][e] = []
+
+            for test in todo:
+                binary=re.sub('\.c', '', os.path.basename(test['filename']))
+                ID=test['id']
+                test_id = f"{binary}_{ID}"
+
+                (res_category, elapsed, diagnostic, outcome) = categorize(tool=tools[toolname], toolname=toolname, test_id=test_id, expected=test['expect'], autoclean=False)
+
+                if not tools[toolname].is_correct_diagnostic(test_id, res_category, test['expect'], test['detail']):
+                    reclassified[toolname][possible_details[test['detail']]].append(test_id)
+                    reclassified[toolname]['total'].append(test_id)
+
+        outfile.write("\\begin{tabular}{|l|")
+        for e in category:
+            outfile.write("c|")
+        outfile.write("}\n")
+        outfile.write("  \\hline\n")
+
+        # Column title
+        outfile.write("  ")
+        for e in category:
+            if e != 'total':
+                outfile.write(f" &\\textit{{ {displayed_name[e].split()[0]} }}")
+            else:
+                outfile.write(" & ")
+
+        outfile.write("  \\\\\n")
+
+        outfile.write("  \\textbf{Tools}")
+        for e in category:
+            if e != 'total':
+                outfile.write(f" &\\textit{{ {displayed_name[e].split()[1]} }}")
+            else:
+                outfile.write(" & \\textbf{Total}")
+
+        outfile.write("\\\\\n")
+        outfile.write("  \\hline\n")
+
+        # Results
+        for toolname in used_toolnames:
+            outfile.write(f"  {displayed_name[toolname]}")
+            for e in category:
+                res = len(reclassified[toolname][e])
+                if res > 0:
+                    outfile.write(f" & \\textbf{{ {res} }}")
+                else:
+                    outfile.write(f" & {res}")
+
+            outfile.write("  \\\\\\hline\n")
+
+        outfile.write("\\end{tabular}\n")
+
+
+    files = get_C_files_from_dir(f"{rootdir}/gencodes/")
+
+    generate_errors(files, f"{rootdir}/latex/errors.tex")
+    generate_labels(files, f"{rootdir}/latex/labels.tex")
+    generate_features(files, f"{rootdir}/latex/features.tex")
+
+    os.chdir(here)
+
+
+########################
+# cmd_plots(): what to do when '-c plots' is used (extract the statistics of this tool)
+########################
+
+def make_radar_plot(name, errors, toolname, results, ext):
+    TP = 'TRUE_POS'
+    TN = 'TRUE_NEG'
+    colors = ['#4D5AAF']
+
+    N = len(errors)
+    data = []
+    spoke_labels = []
+
+    # Compute score by error type
+    for error in errors:
+        score = 0.0
+        if len(results['total'][toolname][TP]) != 0:
+            total = 0.0
+            for r in ['failure', 'timeout', 'unimplemented', 'other',
+                      'TRUE_NEG', 'TRUE_POS', 'FALSE_NEG', 'FALSE_POS']:
+                total += len(results[error][toolname][r])
+            if total != 0:
+                score = ((len(results[error][toolname][TP]) + len(results[error][toolname][TN])) / total)
+        # print (f'     +++ Result {error}: {len(results[error][toolname][TP])} ({score})')
+        data.append(score)
+        spoke_labels.append(' \n '.join(displayed_name[error].split()))
+
+    # Radar plot
+    theta = radar_factory(N, frame='polygon')
+    fig, ax = plt.subplots(subplot_kw=dict(projection='radar'))
+    fig.subplots_adjust(wspace=0.15, hspace=0.6, top=0.85, bottom=0.10)
+    ax.set_rgrids([0.2, 0.4, 0.6, 0.8])
+    ax.set_title(displayed_name[toolname],
+                 weight='bold', size='medium', position=(0.5, 1.1),
+                 horizontalalignment='center', verticalalignment='center')
+
+    ax.plot(theta, data, color=colors[0])
+    ax.fill(theta, data, facecolor=colors[0], alpha=0.4, label='_nolegend_')
+    ax.set_varlabels(spoke_labels)
+    ax.set_ylim(0,1)
+
+    plt.savefig(f'plots/{name}.{ext}')
+    plt.close('all')
+
+def make_radar_plot_ext(name, errors, toolname, results, ext):
+    TP = 'TRUE_POS'
+    TN = 'TRUE_NEG'
+    res_type = ["STP", "STN", "CTP", "CFP", "SFN", "SFP", "SE", "CE", "RE", "TO", "O"]
+    colors = ['#2ca02c', '#d62728', '#4D5AAF']
+    #colors = ['#ADB5BD', '#212529', '#495057']
+
+    N = len(errors)
+
+    data = []
+    data_p = []
+    data_m = []
+
+    # Dummy data for fillbetween
+    data_x = []
+    data_y = []
+    data_0 = []
+
+    spoke_labels = []
+    ext_results = {}
+
+    fresults = categorize_all_files(tools[toolname], toolname, todo)
+    for error in errors:
+        ext_results[error] = {
+            'STP':[], 'STN':[], 'CTP':[], 'CFP':[], 'SFP':[], 'SFN':[],
+            'CE':[], 'TO':[], 'RE':[], 'O':[], "SE":[],
+            'accp':0, 'accm':0,
+            'total':{'OK':0, 'Error':0}
+        }
+
+    for f in fresults:
+        # Get type of error
+        error = possible_details[fresults[f]['detail']]
+
+        if error not in errors:
+            continue
+
+        # Add f in right list
+        ext_results[error][fresults[f]['result']].append(f)
+
+        if fresults[f]['expected'] == 'OK':
+            ext_results[error]['total']['OK'] += 1
+        else:
+            ext_results[error]['total']['Error'] += 1
+
+    # Compute metrics
+    for error in errors:
+        # Accuracy
+        score = 0.0
+        if len(results['total'][toolname][TP]) != 0:
+            total = 0.0
+            for r in ['failure', 'timeout', 'unimplemented', 'other',
+                      'TRUE_NEG', 'TRUE_POS', 'FALSE_NEG', 'FALSE_POS']:
+                total += len(results[error][toolname][r])
+            if total != 0:
+                score = ((len(results[error][toolname][TP]) + len(results[error][toolname][TN])) / total)
+
+        data.append(score)
+
+        data_0.append(0)
+
+        # A+ and A-
+        total = ext_results[error]['total']['Error'] + ext_results[error]['total']['OK']
+        accp = round((len(ext_results[error]['STP']) + len(ext_results[error]['STN']) + len(ext_results[error]['CTP'])) / total, 2)
+        accm = round((len(ext_results[error]['STP']) + len(ext_results[error]['STN'])) / total, 2)
+
+        ext_results[error]['accp'] = accp
+        ext_results[error]['accm'] = accm
+
+        if error in ['DRace', 'EBufferingHazard', 'InputHazard', 'BLocalConcurrency', 'DGlobalConcurrency']:
+            data_p.append(ext_results[error]['accp'])
+            data_m.append(ext_results[error]['accm'])
+
+            data_x.append(ext_results[error]['accp'])
+            data_y.append(ext_results[error]['accm'])
+        else:
+            data_p.append(0)
+            data_m.append(0)
+
+            data_x.append(score)
+            data_y.append(0)
+
+        spoke_labels.append(' \n '.join(displayed_name[error].split()))
+        # spoke_labels.append(displayed_name[error])
+
+    # Radar plot
+    theta = radar_factory(N, frame='polygon')
+    fig, ax = plt.subplots(subplot_kw=dict(projection='radar')# , figsize=(16,12)
+    )
+    fig.subplots_adjust(wspace=0.15, hspace=0.6, top=0.85, bottom=0.10)
+    ax.set_rgrids([0.2, 0.4, 0.6, 0.8])
+    ax.set_title(displayed_name[toolname],
+                 weight='bold', size='medium', position=(0.5, 1.1),
+                 horizontalalignment='center', verticalalignment='center')
+    # plt.legend(prop={'size': 22})
+    # plt.rcParams.update({'font.size':22})
+
+    ax.fill(theta, data, facecolor=colors[2], alpha=0.6,
+            label='Accuracy', hatch="/"
+    )
+
+    ax.plot(theta, data, color=colors[2], alpha=1)
+
+    # ax.fill_between(theta, data_0, data_y, facecolor=colors[2], alpha=0.4)
+    # ax.fill_between(theta, data_y, data, facecolor=colors[1], alpha=0.4,
+    #                 label='Accuracy', hatch="/")
+    ax.fill_between(theta, data, data_x, facecolor=colors[0], alpha=0.6,
+                    label='Can be detected', hatch="\\")
+
+    ax.fill_between(theta, data_0, data_y, facecolor=colors[1], alpha=0.6,
+                    label='Always detected', hatch="\\")
+
+    # ax.plot(theta, data, color=colors[2], alpha=1, label='Overall Accuracy')
+
+    ax.plot(theta, data_p, color=colors[0], alpha=1, linestyle='dashed',
+            # label='Overall Accuracy$^+$'
+    )
+    # ax.fill(theta, data_p, facecolor=colors[0], alpha=0.4)
+
+    ax.plot(theta, data_m, color=colors[1], alpha=1, #linestyle='dotted',
+            #label='Always detected'
+    )
+    # ax.fill(theta, data_m, facecolor=colors[1], alpha=0.2)
+
+
+    legend = ax.legend(loc=(0.8, .99), labelspacing=0.1, fontsize='10')
+
+    ax.set_varlabels(spoke_labels)
+    ax.set_ylim(0,1)
+
+    plt.savefig(f'plots/ext_{name}.{ext}')
+    plt.close('all')
+
+def make_plot(name, toolnames, ext, black_list=[], merge=False):
+    res_type = ["STP", "STN", "CTP", "CFP", "SFN", "SFP", "CE", "RE", "TO", "O", "SE"]
+    res = {}
+    colors = [
+        # '#DEE2E6',
+        '#4D5AAF', # TP
+        # '#CED4DA',
+        '#2ca02c', # TN
+        # '#ADB5BD',
+        '#9467bd', # CTP
+        # '#6C757D',
+        '#ff7f0e', # CFP
+        # '#495057',
+        '#8c564b', # FN
+        # '#343A40',
+        '#d62728', # FP
+        # '#212529',
+        '#4f4c4c', # SE (CE)
+        # '#605d5d', # RE
+        # '#726f6f', # TO
+        # '#838181'  # O
+    ]
+    patterns = ["\\","o","","","O","/","x"]
+
+    # Modify colors for merged version
+    if merge:
+        colors = [
+            # '#DEE2E6',
+            '#4D5AAF', # OK
+            # '#ADB5BD',
+            '#9467bd', # COK
+            # '#495057',
+            '#d62728', # NOK
+            # '#212529',
+            '#605d5d', # SE
+        ]
+        patterns = ["\\","x","/",""]
+
+    merged_res_type = {
+        "STP" :"STP" if not merge else "OK",
+        "STN" :"STN" if not merge else "OK",
+        "CTP" :"CTP" if not merge else "COK",
+        "CFP" :"CFP" if not merge else "COK",
+        "SFN" :"SFN" if not merge else "NOK",
+        "SFP" :"SFP" if not merge else "NOK",
+        "SE"  :"SE",
+        "CE"  :"SE",
+        "RE"  :"SE",
+        "TO"  :"SE",
+        "O"   :"SE"
+    }
+    res_type_short = ["STP", "STN", "CTP", "CFP", "SFN", "SFP", "SE"]
+
+    if merge:
+        res_type_short = ["OK", "COK", "NOK", "SE"]
+
+    for tool in toolnames:
+        res[tool] = {}
+        for r in res_type:
+            res[tool][merged_res_type[r]] = 0
+
+    for toolname in toolnames:
+        results = categorize_all_files(tools[toolname], toolname, todo)
+        # print(results)
+        for r in results:
+            id = merged_res_type[results[r]['result']]
+
+            if possible_details[results[r]['detail']] in black_list:
+                continue
+
+            res[toolname][id] += 1
+
+    def res_sort(toolname):
+        if not merge:
+            return res[toolname]['STP'] + res[toolname]['STN']
+        else:
+            return res[toolname]['OK'] + res[toolname]['COK']
+
+    toolnames.sort(key=res_sort, reverse=True)
+
+    fig, ax = plt.subplots(figsize=(16,12))
+    # fig, ax = plt.subplots()
+    x = np.arange(len(toolnames))     # the label locations
+    width = 1.0                       # the width of the bars
+    fig.subplots_adjust(wspace=0.15, hspace=0.6, top=0.90, bottom=0.20)
+
+    ax.set_ylabel("Number of codes")
+
+    offset = 0
+    prev_data = np.zeros(len(toolnames))
+
+    ind = 0
+    for t in res_type_short:
+        id = t
+        data = []
+
+        for toolname in toolnames:
+            data.append(res[toolname][id])
+
+        l = plt.bar(x, data, width, alpha=0.75, label=displayed_name[id],
+                    bottom=prev_data, color=colors[ind], hatch=patterns[ind])
+
+        # if len(toolnames) == 1:
+        #     ax.bar_label(l, padding=-1.5)
+
+        prev_data += data
+        ind += 1
+
+    rotation = -45 if len(toolnames) > 1 else 0
+    plt.xticks(rotation=rotation)
+
+    ax.set_xticks(x)
+    ax.set_xticklabels([displayed_name[t] for t in toolnames])
+
+    min_y,max_y=ax.get_ybound()
+    ax.set_ybound([min_y,max_y*1.05])
+
+    fig.tight_layout()
+
+    plt.legend(prop={'size': 22})
+    plt.rcParams.update({'font.size':22})
+    plt.savefig(f"plots/{name}.{ext}")
+
+def cmd_plots(rootdir, toolnames, ext="pdf"):
+    here = os.getcwd()
+    os.chdir(rootdir)
+    os.makedirs('plots', exist_ok=True)
+    results = {}
+    total_elapsed = {}
+    used_toolnames = []
+
+    # select the tools for which we have some results
+    print("Produce the stats for:", end='')
+    for toolname in toolnames:
+        if not toolname in tools:
+            raise Exception(f"Tool {toolname} does not seem to be a valid name.")
+
+        if os.path.exists(f'logs/{toolname}'):
+            used_toolnames.append(toolname)
+            print(f' {toolname}', end="")
+
+            # To compute timing statistics
+            total_elapsed[toolname] = 0
+    print(".")
+
+    # Initialize the data structure to gather all results
+    results = {'total':{}, 'error':{}}
+    timing = {'total':{}, 'error':{}}
+    for error in error_scope:
+        results[error] = {}
+        timing[error] = {}
+        for toolname in used_toolnames:
+            results[error][toolname] = {'failure':[], 'timeout':[], 'unimplemented':[], 'other':[], 'TRUE_NEG':[], 'TRUE_POS':[], 'FALSE_NEG':[], 'FALSE_POS':[]}
+            results['total'][toolname] = {'failure':[], 'timeout':[], 'unimplemented':[], 'other':[], 'TRUE_NEG':[], 'TRUE_POS':[], 'FALSE_NEG':[], 'FALSE_POS':[],'error':[],'OK':[]}
+            results['error'][toolname] = {'failure':[], 'timeout':[], 'unimplemented':[], 'other':[], 'TRUE_NEG':[], 'TRUE_POS':[], 'FALSE_NEG':[], 'FALSE_POS':[],'error':[],'OK':[]}
+            timing[error][toolname] = []
+            timing['total'][toolname] = []
+            timing['error'][toolname] = []
+
+    # Get all data from the caches
+    for test in todo:
+        binary=re.sub('\.c', '', os.path.basename(test['filename']))
+        ID=test['id']
+        test_id = f"{binary}_{ID}"
+        expected=test['expect']
+        detail=test['detail']
+
+        for toolname in used_toolnames:
+            (res_category, elapsed, diagnostic, outcome) = categorize(tool=tools[toolname], toolname=toolname, test_id=test_id, expected=expected)
+
+            # if not tools[toolname].is_correct_diagnostic(test_id, res_category, expected, detail):
+            #     res_category = 'FALSE_NEG'
+
+            error = possible_details[test['detail']]
+            results[error][toolname][res_category].append(test_id)
+            results['total'][toolname][res_category].append(test_id)
+            timing[error][toolname].append(float(elapsed))
+            timing['total'][toolname].append(float(elapsed))
+            if expected == 'OK':
+                results['total'][toolname]['OK'].append(test_id)
+            else:
+                results['total'][toolname]['error'].append(test_id)
+                results['error'][toolname][res_category].append(test_id)
+                timing['error'][toolname].append(float(elapsed))
+
+    deter = ['AInvalidParam', 'BResLeak', 'DMatch', 'CMatch', 'BReqLifecycle', 'BEpochLifecycle']
+    ndeter = ['DGlobalConcurrency', 'BLocalConcurrency', 'DRace', 'EBufferingHazard', 'InputHazard']
+
+    # Radar plots
+    for tool in used_toolnames:
+        print (f' --- Radar plots {displayed_name[tool]}')
+        make_radar_plot(f'radar_deter_{tool}', deter, tool, results, ext)
+        make_radar_plot(f'radar_ndeter_{tool}', ndeter, tool, results, ext)
+        make_radar_plot(f'radar_all_{tool}', deter + ndeter, tool, results, ext)
+        make_radar_plot_ext(f'radar_all_{tool}', deter + ndeter, tool, results, ext)
+
+    # Bar plots with all tools
+    make_plot("cat_ext_all", used_toolnames, ext)
+    make_plot("cat_ext_all_2", used_toolnames, ext, merge=True)
+
+    # Bar plots with all tools but without determinist errors
+    make_plot("cat_ndeter_ext_all", used_toolnames, ext, black_list=deter+['FOK'])
+    make_plot("cat_ndeter_ext_all_2", used_toolnames, ext, black_list=deter+['FOK'], merge=True)
+
+
+    # Individual plots for each tools
+    # for tool in used_toolnames:
+    #     print (f' --- Bar plots {displayed_name[tool]}')
+    #     make_plot(f"cat_ext_{tool}", [tool], ext)
+    #     make_plot(f"cat_ext_{tool}_2", [tool], ext, merge=True)
+
+    plt.close('all')
+    os.chdir(here)
+
+########################
+# Main script argument parsing
+########################
+
+parser = argparse.ArgumentParser(
+    description='This runner intends to provide a bridge from a MPI compiler/executor + a test written with MPI bugs collection header and the actual result compared to the expected.')
+
+parser.add_argument('-c', metavar='cmd', default='all',
+                    help="The command you want to execute. By default, 'all', runs all commands in sequence. Other choices:\n"
+                    "  generate: redo all the test codes.\n"
+                    "  latex: Produce the LaTeX tables we need for the article, using the cached values from a previous 'run'.\n"
+                    "  run: run the tests on all codes.\n"
+                    "  html: produce the HTML statistics, using the cached values from a previous 'run'.\n"
+                    "  plots: produce the plots images, using the cached values from a previous 'run'.\n")
+
+parser.add_argument('-x', metavar='tool', default='mpirun',
+                    help='the tool you want at execution: one among [aislinn, civl, isp, mpisv, must, simgrid, parcoach]')
+
+parser.add_argument('-t', '--timeout', metavar='int', default=300, type=int,
+                    help='timeout value at execution time, given in seconds (default: %(default)s)')
+
+parser.add_argument('-b', metavar='batch', default='1/1',
+                    help="Limits the test executions to the batch #N out of M batches (Syntax: 'N/M'). To get 3 runners, use 1/3 2/3 3/3")
+
+parser.add_argument('-f', metavar='format', default='pdf',
+                    help="Format of output images [pdf, svg, png, ...] (only for 'plots' command)")
+
+args = parser.parse_args()
+rootdir = os.path.dirname(os.path.abspath(__file__))
+
+# Parameter checking: Did we get a valid tool to use?
+arg_tools=[]
+if args.c == 'all' or args.c == 'run':
+    if args.x == 'mpirun':
+        raise Exception("No tool was provided, please retry with -x parameter. (see -h for further information on usage)")
+    elif args.x in tools:
+        arg_tools = [args.x]
+    elif ',' in args.x:
+        for x in args.x.split(','):
+            if x not in tools:
+                raise Exception(f"The tool parameter you provided ({x}) is either incorect or not yet implemented.")
+            arg_tools.append(x)
+    else:
+        raise Exception(f"The tool parameter you provided ({args.x}) is either incorect or not yet implemented.")
+elif ',' in args.x:
+    for x in args.x.split(','):
+        if x not in tools:
+            raise Exception(f"The tool parameter you provided ({x}) is either incorect or not yet implemented.")
+    arg_tools = args.x.split(',')
+else:
+    arg_tools = [args.x]
+
+print(f'arg_tools: {arg_tools}')
+
+if args.c == 'all':
+    extract_all_todo(args.b)
+    cmd_run(rootdir=rootdir, toolname=args.x, batchinfo=args.b)
+    cmd_html(rootdir, toolnames=arg_tools)
+elif args.c == 'generate':
+    cmd_gencodes()
+elif args.c == 'build':
+    for t in arg_tools:
+        cmd_build(rootdir=rootdir, toolname=t)
+elif args.c == 'run':
+    extract_all_todo(args.b)
+    for t in arg_tools:
+        cmd_run(rootdir=rootdir, toolname=t, batchinfo=args.b)
+elif args.c == 'latex':
+    extract_all_todo(args.b)
+    # 'smpi','smpivg' are not shown in the paper
+    cmd_latex(rootdir, toolnames=['aislinn', 'civl', 'isp','itac', 'simgrid', 'mpisv', 'must', 'hermes', 'parcoach', 'mpi-checker'])
+elif args.c == 'html':
+    extract_all_todo(args.b)
+    if args.x == 'mpirun':
+        toolnames=['itac', 'simgrid','must', 'smpi', 'smpivg', 'aislinn', 'civl', 'isp', 'mpisv', 'parcoach', 'hermes', 'mpi-checker']
+    else:
+        toolnames=arg_tools
+    # Build SVG plots
+    if plots_loaded:
+        cmd_plots(rootdir, toolnames=toolnames, ext="svg")
+    # Build HTML page
+    cmd_html(rootdir, toolnames=toolnames)
+elif args.c == 'plots':
+    if not plots_loaded:
+        print("[MBI] Error: Dependancies ('numpy' or 'matplotlib') are not available!")
+        exit(-1)
+    extract_all_todo(args.b)
+    if args.x == 'mpirun':
+        toolnames=['itac', 'simgrid', 'must', 'aislinn', 'civl', 'isp', 'mpisv', 'parcoach', 'hermes', 'mpi-checker']
+    else:
+        toolnames=arg_tools
+
+    cmd_plots(rootdir, toolnames=toolnames, ext=args.f)
+else:
+    print(f"Invalid command '{args.c}'. Please choose one of 'all', 'generate', 'build', 'run', 'html' 'latex' or 'plots'")
+    sys.exit(1)
diff --git a/Pipfile b/Pipfile
new file mode 100644
index 0000000000000000000000000000000000000000..975e912f87cc52e74793215cce12611d406ed7f1
--- /dev/null
+++ b/Pipfile
@@ -0,0 +1,9 @@
+[[source]]
+url = "https://pypi.org/simple"
+verify_ssl = true
+name = "pypi"
+
+[packages]
+numpy = "==1.24.2"
+matplotlib = "*"
+scan-build = "*"
diff --git a/Pipfile.lock b/Pipfile.lock
new file mode 100644
index 0000000000000000000000000000000000000000..04eb3c32b009d2b008c71b7124e3cf398dad8039
--- /dev/null
+++ b/Pipfile.lock
@@ -0,0 +1,379 @@
+{
+    "_meta": {
+        "hash": {
+            "sha256": "487a31884df0ad536f2549322f30b2ab97a6d7e571d00ae08bc73f4fce067428"
+        },
+        "pipfile-spec": 6,
+        "requires": {},
+        "sources": [
+            {
+                "name": "pypi",
+                "url": "https://pypi.org/simple",
+                "verify_ssl": true
+            }
+        ]
+    },
+    "default": {
+        "contourpy": {
+            "hashes": [
+                "sha256:031154ed61f7328ad7f97662e48660a150ef84ee1bc8876b6472af88bf5a9b98",
+                "sha256:0f9d350b639db6c2c233d92c7f213d94d2e444d8e8fc5ca44c9706cf72193772",
+                "sha256:130230b7e49825c98edf0b428b7aa1125503d91732735ef897786fe5452b1ec2",
+                "sha256:152fd8f730c31fd67fe0ffebe1df38ab6a669403da93df218801a893645c6ccc",
+                "sha256:1c71fdd8f1c0f84ffd58fca37d00ca4ebaa9e502fb49825484da075ac0b0b803",
+                "sha256:24847601071f740837aefb730e01bd169fbcaa610209779a78db7ebb6e6a7051",
+                "sha256:2e9ebb4425fc1b658e13bace354c48a933b842d53c458f02c86f371cecbedecc",
+                "sha256:30676ca45084ee61e9c3da589042c24a57592e375d4b138bd84d8709893a1ba4",
+                "sha256:31a55dccc8426e71817e3fe09b37d6d48ae40aae4ecbc8c7ad59d6893569c436",
+                "sha256:366a0cf0fc079af5204801786ad7a1c007714ee3909e364dbac1729f5b0849e5",
+                "sha256:38e2e577f0f092b8e6774459317c05a69935a1755ecfb621c0a98f0e3c09c9a5",
+                "sha256:3c184ad2433635f216645fdf0493011a4667e8d46b34082f5a3de702b6ec42e3",
+                "sha256:3caea6365b13119626ee996711ab63e0c9d7496f65641f4459c60a009a1f3e80",
+                "sha256:3e927b3868bd1e12acee7cc8f3747d815b4ab3e445a28d2e5373a7f4a6e76ba1",
+                "sha256:4ee3ee247f795a69e53cd91d927146fb16c4e803c7ac86c84104940c7d2cabf0",
+                "sha256:54d43960d809c4c12508a60b66cb936e7ed57d51fb5e30b513934a4a23874fae",
+                "sha256:57119b0116e3f408acbdccf9eb6ef19d7fe7baf0d1e9aaa5381489bc1aa56556",
+                "sha256:58569c491e7f7e874f11519ef46737cea1d6eda1b514e4eb5ac7dab6aa864d02",
+                "sha256:5a011cf354107b47c58ea932d13b04d93c6d1d69b8b6dce885e642531f847566",
+                "sha256:5caeacc68642e5f19d707471890f037a13007feba8427eb7f2a60811a1fc1350",
+                "sha256:5dd34c1ae752515318224cba7fc62b53130c45ac6a1040c8b7c1a223c46e8967",
+                "sha256:60835badb5ed5f4e194a6f21c09283dd6e007664a86101431bf870d9e86266c4",
+                "sha256:62398c80ef57589bdbe1eb8537127321c1abcfdf8c5f14f479dbbe27d0322e66",
+                "sha256:6381fa66866b0ea35e15d197fc06ac3840a9b2643a6475c8fff267db8b9f1e69",
+                "sha256:64757f6460fc55d7e16ed4f1de193f362104285c667c112b50a804d482777edd",
+                "sha256:69f8ff4db108815addd900a74df665e135dbbd6547a8a69333a68e1f6e368ac2",
+                "sha256:6c180d89a28787e4b73b07e9b0e2dac7741261dbdca95f2b489c4f8f887dd810",
+                "sha256:71b0bf0c30d432278793d2141362ac853859e87de0a7dee24a1cea35231f0d50",
+                "sha256:769eef00437edf115e24d87f8926955f00f7704bede656ce605097584f9966dc",
+                "sha256:7f6979d20ee5693a1057ab53e043adffa1e7418d734c1532e2d9e915b08d8ec2",
+                "sha256:87f4d8941a9564cda3f7fa6a6cd9b32ec575830780677932abdec7bcb61717b0",
+                "sha256:89ba9bb365446a22411f0673abf6ee1fea3b2cf47b37533b970904880ceb72f3",
+                "sha256:8acf74b5d383414401926c1598ed77825cd530ac7b463ebc2e4f46638f56cce6",
+                "sha256:9056c5310eb1daa33fc234ef39ebfb8c8e2533f088bbf0bc7350f70a29bde1ac",
+                "sha256:95c3acddf921944f241b6773b767f1cbce71d03307270e2d769fd584d5d1092d",
+                "sha256:9e20e5a1908e18aaa60d9077a6d8753090e3f85ca25da6e25d30dc0a9e84c2c6",
+                "sha256:a1e97b86f73715e8670ef45292d7cc033548266f07d54e2183ecb3c87598888f",
+                "sha256:a877ada905f7d69b2a31796c4b66e31a8068b37aa9b78832d41c82fc3e056ddd",
+                "sha256:a9d7587d2fdc820cc9177139b56795c39fb8560f540bba9ceea215f1f66e1566",
+                "sha256:abf298af1e7ad44eeb93501e40eb5a67abbf93b5d90e468d01fc0c4451971afa",
+                "sha256:ae90d5a8590e5310c32a7630b4b8618cef7563cebf649011da80874d0aa8f414",
+                "sha256:b6d0f9e1d39dbfb3977f9dd79f156c86eb03e57a7face96f199e02b18e58d32a",
+                "sha256:b8d587cc39057d0afd4166083d289bdeff221ac6d3ee5046aef2d480dc4b503c",
+                "sha256:c5210e5d5117e9aec8c47d9156d1d3835570dd909a899171b9535cb4a3f32693",
+                "sha256:cc331c13902d0f50845099434cd936d49d7a2ca76cb654b39691974cb1e4812d",
+                "sha256:ce41676b3d0dd16dbcfabcc1dc46090aaf4688fd6e819ef343dbda5a57ef0161",
+                "sha256:d8165a088d31798b59e91117d1f5fc3df8168d8b48c4acc10fc0df0d0bdbcc5e",
+                "sha256:e7281244c99fd7c6f27c1c6bfafba878517b0b62925a09b586d88ce750a016d2",
+                "sha256:e96a08b62bb8de960d3a6afbc5ed8421bf1a2d9c85cc4ea73f4bc81b4910500f",
+                "sha256:ed33433fc3820263a6368e532f19ddb4c5990855e4886088ad84fd7c4e561c71",
+                "sha256:efb8f6d08ca7998cf59eaf50c9d60717f29a1a0a09caa46460d33b2924839dbd",
+                "sha256:efe99298ba37e37787f6a2ea868265465410822f7bea163edcc1bd3903354ea9",
+                "sha256:f99e9486bf1bb979d95d5cffed40689cb595abb2b841f2991fc894b3452290e8",
+                "sha256:fc1464c97579da9f3ab16763c32e5c5d5bb5fa1ec7ce509a4ca6108b61b84fab",
+                "sha256:fd7dc0e6812b799a34f6d12fcb1000539098c249c8da54f3566c6a6461d0dbad"
+            ],
+            "markers": "python_version >= '3.8'",
+            "version": "==1.0.7"
+        },
+        "cycler": {
+            "hashes": [
+                "sha256:3a27e95f763a428a739d2add979fa7494c912a32c17c4c38c4d5f082cad165a3",
+                "sha256:9c87405839a19696e837b3b818fed3f5f69f16f1eec1a1ad77e043dcea9c772f"
+            ],
+            "markers": "python_version >= '3.6'",
+            "version": "==0.11.0"
+        },
+        "fonttools": {
+            "hashes": [
+                "sha256:106caf6167c4597556b31a8d9175a3fdc0356fdcd70ab19973c3b0d4c893c461",
+                "sha256:dba8d7cdb8e2bac1b3da28c5ed5960de09e59a2fe7e63bb73f5a59e57b0430d2"
+            ],
+            "markers": "python_version >= '3.8'",
+            "version": "==4.39.4"
+        },
+        "importlib-resources": {
+            "hashes": [
+                "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6",
+                "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"
+            ],
+            "markers": "python_version < '3.10'",
+            "version": "==5.12.0"
+        },
+        "kiwisolver": {
+            "hashes": [
+                "sha256:02f79693ec433cb4b5f51694e8477ae83b3205768a6fb48ffba60549080e295b",
+                "sha256:03baab2d6b4a54ddbb43bba1a3a2d1627e82d205c5cf8f4c924dc49284b87166",
+                "sha256:1041feb4cda8708ce73bb4dcb9ce1ccf49d553bf87c3954bdfa46f0c3f77252c",
+                "sha256:10ee06759482c78bdb864f4109886dff7b8a56529bc1609d4f1112b93fe6423c",
+                "sha256:1d1573129aa0fd901076e2bfb4275a35f5b7aa60fbfb984499d661ec950320b0",
+                "sha256:283dffbf061a4ec60391d51e6155e372a1f7a4f5b15d59c8505339454f8989e4",
+                "sha256:28bc5b299f48150b5f822ce68624e445040595a4ac3d59251703779836eceff9",
+                "sha256:2a66fdfb34e05b705620dd567f5a03f239a088d5a3f321e7b6ac3239d22aa286",
+                "sha256:2e307eb9bd99801f82789b44bb45e9f541961831c7311521b13a6c85afc09767",
+                "sha256:2e407cb4bd5a13984a6c2c0fe1845e4e41e96f183e5e5cd4d77a857d9693494c",
+                "sha256:2f5e60fabb7343a836360c4f0919b8cd0d6dbf08ad2ca6b9cf90bf0c76a3c4f6",
+                "sha256:36dafec3d6d6088d34e2de6b85f9d8e2324eb734162fba59d2ba9ed7a2043d5b",
+                "sha256:3fe20f63c9ecee44560d0e7f116b3a747a5d7203376abeea292ab3152334d004",
+                "sha256:41dae968a94b1ef1897cb322b39360a0812661dba7c682aa45098eb8e193dbdf",
+                "sha256:4bd472dbe5e136f96a4b18f295d159d7f26fd399136f5b17b08c4e5f498cd494",
+                "sha256:4ea39b0ccc4f5d803e3337dd46bcce60b702be4d86fd0b3d7531ef10fd99a1ac",
+                "sha256:5853eb494c71e267912275e5586fe281444eb5e722de4e131cddf9d442615626",
+                "sha256:5bce61af018b0cb2055e0e72e7d65290d822d3feee430b7b8203d8a855e78766",
+                "sha256:6295ecd49304dcf3bfbfa45d9a081c96509e95f4b9d0eb7ee4ec0530c4a96514",
+                "sha256:62ac9cc684da4cf1778d07a89bf5f81b35834cb96ca523d3a7fb32509380cbf6",
+                "sha256:70e7c2e7b750585569564e2e5ca9845acfaa5da56ac46df68414f29fea97be9f",
+                "sha256:7577c1987baa3adc4b3c62c33bd1118c3ef5c8ddef36f0f2c950ae0b199e100d",
+                "sha256:75facbe9606748f43428fc91a43edb46c7ff68889b91fa31f53b58894503a191",
+                "sha256:787518a6789009c159453da4d6b683f468ef7a65bbde796bcea803ccf191058d",
+                "sha256:78d6601aed50c74e0ef02f4204da1816147a6d3fbdc8b3872d263338a9052c51",
+                "sha256:7c43e1e1206cd421cd92e6b3280d4385d41d7166b3ed577ac20444b6995a445f",
+                "sha256:81e38381b782cc7e1e46c4e14cd997ee6040768101aefc8fa3c24a4cc58e98f8",
+                "sha256:841293b17ad704d70c578f1f0013c890e219952169ce8a24ebc063eecf775454",
+                "sha256:872b8ca05c40d309ed13eb2e582cab0c5a05e81e987ab9c521bf05ad1d5cf5cb",
+                "sha256:877272cf6b4b7e94c9614f9b10140e198d2186363728ed0f701c6eee1baec1da",
+                "sha256:8c808594c88a025d4e322d5bb549282c93c8e1ba71b790f539567932722d7bd8",
+                "sha256:8ed58b8acf29798b036d347791141767ccf65eee7f26bde03a71c944449e53de",
+                "sha256:91672bacaa030f92fc2f43b620d7b337fd9a5af28b0d6ed3f77afc43c4a64b5a",
+                "sha256:968f44fdbf6dd757d12920d63b566eeb4d5b395fd2d00d29d7ef00a00582aac9",
+                "sha256:9f85003f5dfa867e86d53fac6f7e6f30c045673fa27b603c397753bebadc3008",
+                "sha256:a553dadda40fef6bfa1456dc4be49b113aa92c2a9a9e8711e955618cd69622e3",
+                "sha256:a68b62a02953b9841730db7797422f983935aeefceb1679f0fc85cbfbd311c32",
+                "sha256:abbe9fa13da955feb8202e215c4018f4bb57469b1b78c7a4c5c7b93001699938",
+                "sha256:ad881edc7ccb9d65b0224f4e4d05a1e85cf62d73aab798943df6d48ab0cd79a1",
+                "sha256:b1792d939ec70abe76f5054d3f36ed5656021dcad1322d1cc996d4e54165cef9",
+                "sha256:b428ef021242344340460fa4c9185d0b1f66fbdbfecc6c63eff4b7c29fad429d",
+                "sha256:b533558eae785e33e8c148a8d9921692a9fe5aa516efbdff8606e7d87b9d5824",
+                "sha256:ba59c92039ec0a66103b1d5fe588fa546373587a7d68f5c96f743c3396afc04b",
+                "sha256:bc8d3bd6c72b2dd9decf16ce70e20abcb3274ba01b4e1c96031e0c4067d1e7cd",
+                "sha256:bc9db8a3efb3e403e4ecc6cd9489ea2bac94244f80c78e27c31dcc00d2790ac2",
+                "sha256:bf7d9fce9bcc4752ca4a1b80aabd38f6d19009ea5cbda0e0856983cf6d0023f5",
+                "sha256:c2dbb44c3f7e6c4d3487b31037b1bdbf424d97687c1747ce4ff2895795c9bf69",
+                "sha256:c79ebe8f3676a4c6630fd3f777f3cfecf9289666c84e775a67d1d358578dc2e3",
+                "sha256:c97528e64cb9ebeff9701e7938653a9951922f2a38bd847787d4a8e498cc83ae",
+                "sha256:d0611a0a2a518464c05ddd5a3a1a0e856ccc10e67079bb17f265ad19ab3c7597",
+                "sha256:d06adcfa62a4431d404c31216f0f8ac97397d799cd53800e9d3efc2fbb3cf14e",
+                "sha256:d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955",
+                "sha256:d5b61785a9ce44e5a4b880272baa7cf6c8f48a5180c3e81c59553ba0cb0821ca",
+                "sha256:da152d8cdcab0e56e4f45eb08b9aea6455845ec83172092f09b0e077ece2cf7a",
+                "sha256:da7e547706e69e45d95e116e6939488d62174e033b763ab1496b4c29b76fabea",
+                "sha256:db5283d90da4174865d520e7366801a93777201e91e79bacbac6e6927cbceede",
+                "sha256:db608a6757adabb32f1cfe6066e39b3706d8c3aa69bbc353a5b61edad36a5cb4",
+                "sha256:e0ea21f66820452a3f5d1655f8704a60d66ba1191359b96541eaf457710a5fc6",
+                "sha256:e7da3fec7408813a7cebc9e4ec55afed2d0fd65c4754bc376bf03498d4e92686",
+                "sha256:e92a513161077b53447160b9bd8f522edfbed4bd9759e4c18ab05d7ef7e49408",
+                "sha256:ecb1fa0db7bf4cff9dac752abb19505a233c7f16684c5826d1f11ebd9472b871",
+                "sha256:efda5fc8cc1c61e4f639b8067d118e742b812c930f708e6667a5ce0d13499e29",
+                "sha256:f0a1dbdb5ecbef0d34eb77e56fcb3e95bbd7e50835d9782a45df81cc46949750",
+                "sha256:f0a71d85ecdd570ded8ac3d1c0f480842f49a40beb423bb8014539a9f32a5897",
+                "sha256:f4f270de01dd3e129a72efad823da90cc4d6aafb64c410c9033aba70db9f1ff0",
+                "sha256:f6cb459eea32a4e2cf18ba5fcece2dbdf496384413bc1bae15583f19e567f3b2",
+                "sha256:f8ad8285b01b0d4695102546b342b493b3ccc6781fc28c8c6a1bb63e95d22f09",
+                "sha256:f9f39e2f049db33a908319cf46624a569b36983c7c78318e9726a4cb8923b26c"
+            ],
+            "markers": "python_version >= '3.7'",
+            "version": "==1.4.4"
+        },
+        "matplotlib": {
+            "hashes": [
+                "sha256:08308bae9e91aca1ec6fd6dda66237eef9f6294ddb17f0d0b3c863169bf82353",
+                "sha256:14645aad967684e92fc349493fa10c08a6da514b3d03a5931a1bac26e6792bd1",
+                "sha256:21e9cff1a58d42e74d01153360de92b326708fb205250150018a52c70f43c290",
+                "sha256:28506a03bd7f3fe59cd3cd4ceb2a8d8a2b1db41afede01f66c42561b9be7b4b7",
+                "sha256:2bf092f9210e105f414a043b92af583c98f50050559616930d884387d0772aba",
+                "sha256:3032884084f541163f295db8a6536e0abb0db464008fadca6c98aaf84ccf4717",
+                "sha256:3a2cb34336110e0ed8bb4f650e817eed61fa064acbefeb3591f1b33e3a84fd96",
+                "sha256:3ba2af245e36990facf67fde840a760128ddd71210b2ab6406e640188d69d136",
+                "sha256:3d7bc90727351fb841e4d8ae620d2d86d8ed92b50473cd2b42ce9186104ecbba",
+                "sha256:438196cdf5dc8d39b50a45cb6e3f6274edbcf2254f85fa9b895bf85851c3a613",
+                "sha256:46a561d23b91f30bccfd25429c3c706afe7d73a5cc64ef2dfaf2b2ac47c1a5dc",
+                "sha256:4cf327e98ecf08fcbb82685acaf1939d3338548620ab8dfa02828706402c34de",
+                "sha256:4f99e1b234c30c1e9714610eb0c6d2f11809c9c78c984a613ae539ea2ad2eb4b",
+                "sha256:544764ba51900da4639c0f983b323d288f94f65f4024dc40ecb1542d74dc0500",
+                "sha256:56d94989191de3fcc4e002f93f7f1be5da476385dde410ddafbb70686acf00ea",
+                "sha256:57bfb8c8ea253be947ccb2bc2d1bb3862c2bccc662ad1b4626e1f5e004557042",
+                "sha256:617f14ae9d53292ece33f45cba8503494ee199a75b44de7717964f70637a36aa",
+                "sha256:6eb88d87cb2c49af00d3bbc33a003f89fd9f78d318848da029383bfc08ecfbfb",
+                "sha256:75d4725d70b7c03e082bbb8a34639ede17f333d7247f56caceb3801cb6ff703d",
+                "sha256:770a205966d641627fd5cf9d3cb4b6280a716522cd36b8b284a8eb1581310f61",
+                "sha256:7b73305f25eab4541bd7ee0b96d87e53ae9c9f1823be5659b806cd85786fe882",
+                "sha256:7c9a4b2da6fac77bcc41b1ea95fadb314e92508bf5493ceff058e727e7ecf5b0",
+                "sha256:81a6b377ea444336538638d31fdb39af6be1a043ca5e343fe18d0f17e098770b",
+                "sha256:83111e6388dec67822e2534e13b243cc644c7494a4bb60584edbff91585a83c6",
+                "sha256:8704726d33e9aa8a6d5215044b8d00804561971163563e6e6591f9dcf64340cc",
+                "sha256:89768d84187f31717349c6bfadc0e0d8c321e8eb34522acec8a67b1236a66332",
+                "sha256:8bf26ade3ff0f27668989d98c8435ce9327d24cffb7f07d24ef609e33d582439",
+                "sha256:8c587963b85ce41e0a8af53b9b2de8dddbf5ece4c34553f7bd9d066148dc719c",
+                "sha256:95cbc13c1fc6844ab8812a525bbc237fa1470863ff3dace7352e910519e194b1",
+                "sha256:97cc368a7268141afb5690760921765ed34867ffb9655dd325ed207af85c7529",
+                "sha256:a867bf73a7eb808ef2afbca03bcdb785dae09595fbe550e1bab0cd023eba3de0",
+                "sha256:b867e2f952ed592237a1828f027d332d8ee219ad722345b79a001f49df0936eb",
+                "sha256:c0bd19c72ae53e6ab979f0ac6a3fafceb02d2ecafa023c5cca47acd934d10be7",
+                "sha256:ce463ce590f3825b52e9fe5c19a3c6a69fd7675a39d589e8b5fbe772272b3a24",
+                "sha256:cf0e4f727534b7b1457898c4f4ae838af1ef87c359b76dcd5330fa31893a3ac7",
+                "sha256:def58098f96a05f90af7e92fd127d21a287068202aa43b2a93476170ebd99e87",
+                "sha256:e99bc9e65901bb9a7ce5e7bb24af03675cbd7c70b30ac670aa263240635999a4",
+                "sha256:eb7d248c34a341cd4c31a06fd34d64306624c8cd8d0def7abb08792a5abfd556",
+                "sha256:f67bfdb83a8232cb7a92b869f9355d677bce24485c460b19d01970b64b2ed476",
+                "sha256:f883a22a56a84dba3b588696a2b8a1ab0d2c3d41be53264115c71b0a942d8fdb",
+                "sha256:fbdeeb58c0cf0595efe89c05c224e0a502d1aa6a8696e68a73c3efc6bc354304"
+            ],
+            "index": "pypi",
+            "version": "==3.7.1"
+        },
+        "numpy": {
+            "hashes": [
+                "sha256:003a9f530e880cb2cd177cba1af7220b9aa42def9c4afc2a2fc3ee6be7eb2b22",
+                "sha256:150947adbdfeceec4e5926d956a06865c1c690f2fd902efede4ca6fe2e657c3f",
+                "sha256:2620e8592136e073bd12ee4536149380695fbe9ebeae845b81237f986479ffc9",
+                "sha256:2eabd64ddb96a1239791da78fa5f4e1693ae2dadc82a76bc76a14cbb2b966e96",
+                "sha256:4173bde9fa2a005c2c6e2ea8ac1618e2ed2c1c6ec8a7657237854d42094123a0",
+                "sha256:4199e7cfc307a778f72d293372736223e39ec9ac096ff0a2e64853b866a8e18a",
+                "sha256:4cecaed30dc14123020f77b03601559fff3e6cd0c048f8b5289f4eeabb0eb281",
+                "sha256:557d42778a6869c2162deb40ad82612645e21d79e11c1dc62c6e82a2220ffb04",
+                "sha256:63e45511ee4d9d976637d11e6c9864eae50e12dc9598f531c035265991910468",
+                "sha256:6524630f71631be2dabe0c541e7675db82651eb998496bbe16bc4f77f0772253",
+                "sha256:76807b4063f0002c8532cfeac47a3068a69561e9c8715efdad3c642eb27c0756",
+                "sha256:7de8fdde0003f4294655aa5d5f0a89c26b9f22c0a58790c38fae1ed392d44a5a",
+                "sha256:889b2cc88b837d86eda1b17008ebeb679d82875022200c6e8e4ce6cf549b7acb",
+                "sha256:92011118955724465fb6853def593cf397b4a1367495e0b59a7e69d40c4eb71d",
+                "sha256:97cf27e51fa078078c649a51d7ade3c92d9e709ba2bfb97493007103c741f1d0",
+                "sha256:9a23f8440561a633204a67fb44617ce2a299beecf3295f0d13c495518908e910",
+                "sha256:a51725a815a6188c662fb66fb32077709a9ca38053f0274640293a14fdd22978",
+                "sha256:a77d3e1163a7770164404607b7ba3967fb49b24782a6ef85d9b5f54126cc39e5",
+                "sha256:adbdce121896fd3a17a77ab0b0b5eedf05a9834a18699db6829a64e1dfccca7f",
+                "sha256:c29e6bd0ec49a44d7690ecb623a8eac5ab8a923bce0bea6293953992edf3a76a",
+                "sha256:c72a6b2f4af1adfe193f7beb91ddf708ff867a3f977ef2ec53c0ffb8283ab9f5",
+                "sha256:d0a2db9d20117bf523dde15858398e7c0858aadca7c0f088ac0d6edd360e9ad2",
+                "sha256:e3ab5d32784e843fc0dd3ab6dcafc67ef806e6b6828dc6af2f689be0eb4d781d",
+                "sha256:e428c4fbfa085f947b536706a2fc349245d7baa8334f0c5723c56a10595f9b95",
+                "sha256:e8d2859428712785e8a8b7d2b3ef0a1d1565892367b32f915c4a4df44d0e64f5",
+                "sha256:eef70b4fc1e872ebddc38cddacc87c19a3709c0e3e5d20bf3954c147b1dd941d",
+                "sha256:f64bb98ac59b3ea3bf74b02f13836eb2e24e48e0ab0145bbda646295769bd780",
+                "sha256:f9006288bcf4895917d02583cf3411f98631275bc67cce355a7f39f8c14338fa"
+            ],
+            "index": "pypi",
+            "version": "==1.24.2"
+        },
+        "packaging": {
+            "hashes": [
+                "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61",
+                "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"
+            ],
+            "markers": "python_version >= '3.7'",
+            "version": "==23.1"
+        },
+        "pillow": {
+            "hashes": [
+                "sha256:07999f5834bdc404c442146942a2ecadd1cb6292f5229f4ed3b31e0a108746b1",
+                "sha256:0852ddb76d85f127c135b6dd1f0bb88dbb9ee990d2cd9aa9e28526c93e794fba",
+                "sha256:1781a624c229cb35a2ac31cc4a77e28cafc8900733a864870c49bfeedacd106a",
+                "sha256:1e7723bd90ef94eda669a3c2c19d549874dd5badaeefabefd26053304abe5799",
+                "sha256:229e2c79c00e85989a34b5981a2b67aa079fd08c903f0aaead522a1d68d79e51",
+                "sha256:22baf0c3cf0c7f26e82d6e1adf118027afb325e703922c8dfc1d5d0156bb2eeb",
+                "sha256:252a03f1bdddce077eff2354c3861bf437c892fb1832f75ce813ee94347aa9b5",
+                "sha256:2dfaaf10b6172697b9bceb9a3bd7b951819d1ca339a5ef294d1f1ac6d7f63270",
+                "sha256:322724c0032af6692456cd6ed554bb85f8149214d97398bb80613b04e33769f6",
+                "sha256:35f6e77122a0c0762268216315bf239cf52b88865bba522999dc38f1c52b9b47",
+                "sha256:375f6e5ee9620a271acb6820b3d1e94ffa8e741c0601db4c0c4d3cb0a9c224bf",
+                "sha256:3ded42b9ad70e5f1754fb7c2e2d6465a9c842e41d178f262e08b8c85ed8a1d8e",
+                "sha256:432b975c009cf649420615388561c0ce7cc31ce9b2e374db659ee4f7d57a1f8b",
+                "sha256:482877592e927fd263028c105b36272398e3e1be3269efda09f6ba21fd83ec66",
+                "sha256:489f8389261e5ed43ac8ff7b453162af39c3e8abd730af8363587ba64bb2e865",
+                "sha256:54f7102ad31a3de5666827526e248c3530b3a33539dbda27c6843d19d72644ec",
+                "sha256:560737e70cb9c6255d6dcba3de6578a9e2ec4b573659943a5e7e4af13f298f5c",
+                "sha256:5671583eab84af046a397d6d0ba25343c00cd50bce03787948e0fff01d4fd9b1",
+                "sha256:5ba1b81ee69573fe7124881762bb4cd2e4b6ed9dd28c9c60a632902fe8db8b38",
+                "sha256:5d4ebf8e1db4441a55c509c4baa7a0587a0210f7cd25fcfe74dbbce7a4bd1906",
+                "sha256:60037a8db8750e474af7ffc9faa9b5859e6c6d0a50e55c45576bf28be7419705",
+                "sha256:608488bdcbdb4ba7837461442b90ea6f3079397ddc968c31265c1e056964f1ef",
+                "sha256:6608ff3bf781eee0cd14d0901a2b9cc3d3834516532e3bd673a0a204dc8615fc",
+                "sha256:662da1f3f89a302cc22faa9f14a262c2e3951f9dbc9617609a47521c69dd9f8f",
+                "sha256:7002d0797a3e4193c7cdee3198d7c14f92c0836d6b4a3f3046a64bd1ce8df2bf",
+                "sha256:763782b2e03e45e2c77d7779875f4432e25121ef002a41829d8868700d119392",
+                "sha256:77165c4a5e7d5a284f10a6efaa39a0ae8ba839da344f20b111d62cc932fa4e5d",
+                "sha256:7c9af5a3b406a50e313467e3565fc99929717f780164fe6fbb7704edba0cebbe",
+                "sha256:7ec6f6ce99dab90b52da21cf0dc519e21095e332ff3b399a357c187b1a5eee32",
+                "sha256:833b86a98e0ede388fa29363159c9b1a294b0905b5128baf01db683672f230f5",
+                "sha256:84a6f19ce086c1bf894644b43cd129702f781ba5751ca8572f08aa40ef0ab7b7",
+                "sha256:8507eda3cd0608a1f94f58c64817e83ec12fa93a9436938b191b80d9e4c0fc44",
+                "sha256:85ec677246533e27770b0de5cf0f9d6e4ec0c212a1f89dfc941b64b21226009d",
+                "sha256:8aca1152d93dcc27dc55395604dcfc55bed5f25ef4c98716a928bacba90d33a3",
+                "sha256:8d935f924bbab8f0a9a28404422da8af4904e36d5c33fc6f677e4c4485515625",
+                "sha256:8f36397bf3f7d7c6a3abdea815ecf6fd14e7fcd4418ab24bae01008d8d8ca15e",
+                "sha256:91ec6fe47b5eb5a9968c79ad9ed78c342b1f97a091677ba0e012701add857829",
+                "sha256:965e4a05ef364e7b973dd17fc765f42233415974d773e82144c9bbaaaea5d089",
+                "sha256:96e88745a55b88a7c64fa49bceff363a1a27d9a64e04019c2281049444a571e3",
+                "sha256:99eb6cafb6ba90e436684e08dad8be1637efb71c4f2180ee6b8f940739406e78",
+                "sha256:9adf58f5d64e474bed00d69bcd86ec4bcaa4123bfa70a65ce72e424bfb88ed96",
+                "sha256:9b1af95c3a967bf1da94f253e56b6286b50af23392a886720f563c547e48e964",
+                "sha256:a0aa9417994d91301056f3d0038af1199eb7adc86e646a36b9e050b06f526597",
+                "sha256:a0f9bb6c80e6efcde93ffc51256d5cfb2155ff8f78292f074f60f9e70b942d99",
+                "sha256:a127ae76092974abfbfa38ca2d12cbeddcdeac0fb71f9627cc1135bedaf9d51a",
+                "sha256:aaf305d6d40bd9632198c766fb64f0c1a83ca5b667f16c1e79e1661ab5060140",
+                "sha256:aca1c196f407ec7cf04dcbb15d19a43c507a81f7ffc45b690899d6a76ac9fda7",
+                "sha256:ace6ca218308447b9077c14ea4ef381ba0b67ee78d64046b3f19cf4e1139ad16",
+                "sha256:b416f03d37d27290cb93597335a2f85ed446731200705b22bb927405320de903",
+                "sha256:bf548479d336726d7a0eceb6e767e179fbde37833ae42794602631a070d630f1",
+                "sha256:c1170d6b195555644f0616fd6ed929dfcf6333b8675fcca044ae5ab110ded296",
+                "sha256:c380b27d041209b849ed246b111b7c166ba36d7933ec6e41175fd15ab9eb1572",
+                "sha256:c446d2245ba29820d405315083d55299a796695d747efceb5717a8b450324115",
+                "sha256:c830a02caeb789633863b466b9de10c015bded434deb3ec87c768e53752ad22a",
+                "sha256:cb841572862f629b99725ebaec3287fc6d275be9b14443ea746c1dd325053cbd",
+                "sha256:cfa4561277f677ecf651e2b22dc43e8f5368b74a25a8f7d1d4a3a243e573f2d4",
+                "sha256:cfcc2c53c06f2ccb8976fb5c71d448bdd0a07d26d8e07e321c103416444c7ad1",
+                "sha256:d3c6b54e304c60c4181da1c9dadf83e4a54fd266a99c70ba646a9baa626819eb",
+                "sha256:d3d403753c9d5adc04d4694d35cf0391f0f3d57c8e0030aac09d7678fa8030aa",
+                "sha256:d9c206c29b46cfd343ea7cdfe1232443072bbb270d6a46f59c259460db76779a",
+                "sha256:e49eb4e95ff6fd7c0c402508894b1ef0e01b99a44320ba7d8ecbabefddcc5569",
+                "sha256:f8286396b351785801a976b1e85ea88e937712ee2c3ac653710a4a57a8da5d9c",
+                "sha256:f8fc330c3370a81bbf3f88557097d1ea26cd8b019d6433aa59f71195f5ddebbf",
+                "sha256:fbd359831c1657d69bb81f0db962905ee05e5e9451913b18b831febfe0519082",
+                "sha256:fe7e1c262d3392afcf5071df9afa574544f28eac825284596ac6db56e6d11062",
+                "sha256:fed1e1cf6a42577953abbe8e6cf2fe2f566daebde7c34724ec8803c4c0cda579"
+            ],
+            "markers": "python_version >= '3.7'",
+            "version": "==9.5.0"
+        },
+        "pyparsing": {
+            "hashes": [
+                "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb",
+                "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"
+            ],
+            "markers": "python_full_version >= '3.6.8'",
+            "version": "==3.0.9"
+        },
+        "python-dateutil": {
+            "hashes": [
+                "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86",
+                "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"
+            ],
+            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'",
+            "version": "==2.8.2"
+        },
+        "scan-build": {
+            "hashes": [
+                "sha256:f1f9f1dc3daf906ef106077dbac4d872f5740843173dc74070ef3b39da3d0f07",
+                "sha256:f61cd6e05269819c8c5fb6d61eabb5cbc3c271ca99b89c096e45f82c454f63ef"
+            ],
+            "index": "pypi",
+            "version": "==2.0.20"
+        },
+        "six": {
+            "hashes": [
+                "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
+                "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
+            ],
+            "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'",
+            "version": "==1.16.0"
+        },
+        "zipp": {
+            "hashes": [
+                "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b",
+                "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"
+            ],
+            "markers": "python_version < '3.10'",
+            "version": "==3.15.0"
+        }
+    },
+    "develop": {}
+}
diff --git a/gitlab-simgrid.yml b/gitlab-simgrid.yml
new file mode 100755
index 0000000000000000000000000000000000000000..6d40ad739946354feaec8a4d497bb360b1fa0804
--- /dev/null
+++ b/gitlab-simgrid.yml
@@ -0,0 +1,31 @@
+# This file is intended for the shared infrastructures, so reduce the computational burden by testing simgrid only
+
+variables:
+    GIT_SUBMODULE_STRATEGY: none
+
+stages:
+    - build
+
+pages:
+    stage: build
+    needs: []
+    script:
+        - apt update; apt install -y p7zip wget
+        - wget https://gitlab.com/MpiBugsInitiative/MpiBugsInitiative/uploads/7072bf58ed7213d9d909c82023e4f66e/logs-220324.7z
+        - 7zr x -so logs-*.7z | tar xf -
+        - mkdir -p logs/simgrid
+        - touch logs/simgrid/trust_the_installation trust_the_installation # Silence the checks for the right docker image
+        - python3 ./MBI.py -c generate
+#        - python3 ./MBI.py -x simgrid | grep Test
+        - python3 ./MBI.py -c html -x itac,simgrid,must,parcoach,isp,aislinn,mpisv,civl,smpi,smpivg
+        - pwd ; ls ; echo logs:; ls logs
+        - mkdir public
+        - cp -r *.html gencodes img logs public/
+    image:
+        name: simgrid/tuto-mc
+        entrypoint: ['/bin/bash', '-c', 'ln -snf /bin/bash /bin/sh && /bin/bash -c $0' ]
+    artifacts:
+        untracked: false
+        when: always
+        paths:
+            - public
diff --git a/img/CREDITS b/img/CREDITS
new file mode 100644
index 0000000000000000000000000000000000000000..323c822d04441cd17d646597e47d3b7b6bf62957
--- /dev/null
+++ b/img/CREDITS
@@ -0,0 +1,4 @@
+All images were found on OpenClipArt, with an "public domain" licence.
+I changed the colors of two of these images.
+
+Original URL: https://openclipart.org/detail/326703/hmi-icon-bad
\ No newline at end of file
diff --git a/img/FALSE_NEG.svg b/img/FALSE_NEG.svg
new file mode 100644
index 0000000000000000000000000000000000000000..6dd4c29efa7560ab61c3d2d57f05a8e777a1ae93
--- /dev/null
+++ b/img/FALSE_NEG.svg
@@ -0,0 +1,113 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="38.085907mm"
+   height="38.085907mm"
+   viewBox="0 0 38.085907 38.085907"
+   version="1.1"
+   id="svg4282"
+   inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
+   sodipodi:docname="icon_bad.svg">
+  <defs
+     id="defs4276">
+    <filter
+       height="1.148898"
+       y="-0.074448995"
+       width="1.1865224"
+       x="-0.093261182"
+       id="filter14079-1"
+       style="color-interpolation-filters:sRGB"
+       inkscape:collect="always">
+      <feGaussianBlur
+         id="feGaussianBlur14081-7"
+         stdDeviation="1.1047884"
+         inkscape:collect="always" />
+    </filter>
+  </defs>
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1.0"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="0.35"
+     inkscape:cx="-104.62471"
+     inkscape:cy="592.51817"
+     inkscape:document-units="mm"
+     inkscape:current-layer="layer1"
+     inkscape:document-rotation="0"
+     showgrid="false"
+     fit-margin-top="0"
+     fit-margin-left="0"
+     fit-margin-right="0"
+     fit-margin-bottom="0"
+     inkscape:window-width="1438"
+     inkscape:window-height="1040"
+     inkscape:window-x="258"
+     inkscape:window-y="373"
+     inkscape:window-maximized="0" />
+  <metadata
+     id="metadata4279">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title></dc:title>
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Calque 1"
+     inkscape:groupmode="layer"
+     id="layer1"
+     transform="translate(-133.51529,8.6037653)">
+    <g
+       id="g5130"
+       transform="translate(-96.50493,280.11854)">
+      <circle
+         r="19.042953"
+         cy="-269.67935"
+         cx="249.06317"
+         id="circle4981-8"
+         style="opacity:1;vector-effect:none;fill:#ff0000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.529167;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+      <circle
+         style="opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.447471;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
+         id="circle4983-1"
+         cx="249.06317"
+         cy="-269.67935"
+         r="16.102989" />
+      <circle
+         r="14.633007"
+         cy="-269.67935"
+         cx="249.06317"
+         id="circle4985-1"
+         style="opacity:1;vector-effect:none;fill:#ff8080;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.406623;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+      <path
+         sodipodi:nodetypes="sscccccscccsscccsscccs"
+         inkscape:connector-curvature="0"
+         id="path5087"
+         d="m 238.92388,-281.33904 c -1.16408,-1.3e-4 -2.32786,0.43724 -3.20312,1.3125 l -0.17969,0.17969 c -1.75053,1.75051 -1.75053,4.65574 0,6.40625 l 5.34961,5.34961 -5.34961,5.34766 c -1.75053,1.75051 -1.75053,4.65573 0,6.40625 l 0.17969,0.17968 c 1.75052,1.75052 4.65607,1.75018 6.40625,0 l 5.34766,-5.34765 5.3496,5.34765 c 1.75052,1.75052 4.65574,1.75052 6.40625,0 l 0.17969,-0.17968 c 1.75052,-1.75052 1.75019,-4.65608 0,-6.40625 l -5.34961,-5.34766 5.34961,-5.34961 c 1.75019,-1.75017 1.75052,-4.65573 0,-6.40625 l -0.17969,-0.17969 c -1.75051,-1.75051 -4.65573,-1.75052 -6.40625,0 l -5.3496,5.34766 -5.34766,-5.34766 c -0.87509,-0.87509 -2.03904,-1.31236 -3.20313,-1.3125 z"
+         style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:0.453;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:0.79375;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;filter:url(#filter14079-1);color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+      <path
+         style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:3.96875;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
+         d="m 240.51138,-282.92654 c -1.16408,-1.3e-4 -2.32786,0.43724 -3.20312,1.3125 l -0.17969,0.17969 c -1.75053,1.75051 -1.75053,4.65573 0,6.40625 l 5.34961,5.34961 -5.34961,5.34766 c -1.75053,1.75051 -1.75053,4.65573 0,6.40625 l 0.17969,0.17968 c 1.75052,1.75052 4.65607,1.75018 6.40625,0 l 5.34766,-5.34765 5.3496,5.34765 c 1.75052,1.75052 4.65574,1.75052 6.40626,0 l 0.17968,-0.17968 c 1.75052,-1.75052 1.75019,-4.65608 0,-6.40625 l -5.34961,-5.34766 5.34961,-5.34961 c 1.75019,-1.75018 1.75052,-4.65573 0,-6.40625 l -0.17968,-0.17969 c -1.75052,-1.75052 -4.65574,-1.75052 -6.40626,0 l -5.3496,5.34766 -5.34766,-5.34766 c -0.87509,-0.87509 -2.03904,-1.31236 -3.20313,-1.3125 z"
+         id="path5083"
+         inkscape:connector-curvature="0"
+         sodipodi:nodetypes="sscccccscccsscccsscccs" />
+      <path
+         style="opacity:1;vector-effect:none;fill:#ff0000;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:0.529167;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
+         d="m 259.41526,-280.21076 c -0.99735,-0.99735 -2.60337,-0.99735 -3.60072,0 l -6.75164,6.75164 -6.75127,-6.75127 c -0.99735,-0.99735 -2.60301,-0.99772 -3.60036,-3.7e-4 l -0.17941,0.17942 c -0.99736,0.99735 -0.99736,2.60337 0,3.60072 l 6.75127,6.75127 -6.75127,6.75127 c -0.99736,0.99736 -0.99736,2.60338 0,3.60073 l 0.17941,0.17942 c 0.99735,0.99735 2.60301,0.99698 3.60036,-3.7e-4 l 6.75127,-6.75127 6.75164,6.75163 c 0.99735,0.99736 2.60337,0.99736 3.60072,1e-5 l 0.17942,-0.17942 c 0.99735,-0.99735 0.99699,-2.60301 -3.7e-4,-3.60036 l -6.75163,-6.75164 6.75163,-6.75163 c 0.99736,-0.99735 0.99772,-2.60301 3.7e-4,-3.60036 z"
+         id="rect5023"
+         inkscape:connector-curvature="0" />
+    </g>
+  </g>
+</svg>
diff --git a/img/FALSE_POS.svg b/img/FALSE_POS.svg
new file mode 100644
index 0000000000000000000000000000000000000000..6dd4c29efa7560ab61c3d2d57f05a8e777a1ae93
--- /dev/null
+++ b/img/FALSE_POS.svg
@@ -0,0 +1,113 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="38.085907mm"
+   height="38.085907mm"
+   viewBox="0 0 38.085907 38.085907"
+   version="1.1"
+   id="svg4282"
+   inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
+   sodipodi:docname="icon_bad.svg">
+  <defs
+     id="defs4276">
+    <filter
+       height="1.148898"
+       y="-0.074448995"
+       width="1.1865224"
+       x="-0.093261182"
+       id="filter14079-1"
+       style="color-interpolation-filters:sRGB"
+       inkscape:collect="always">
+      <feGaussianBlur
+         id="feGaussianBlur14081-7"
+         stdDeviation="1.1047884"
+         inkscape:collect="always" />
+    </filter>
+  </defs>
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1.0"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="0.35"
+     inkscape:cx="-104.62471"
+     inkscape:cy="592.51817"
+     inkscape:document-units="mm"
+     inkscape:current-layer="layer1"
+     inkscape:document-rotation="0"
+     showgrid="false"
+     fit-margin-top="0"
+     fit-margin-left="0"
+     fit-margin-right="0"
+     fit-margin-bottom="0"
+     inkscape:window-width="1438"
+     inkscape:window-height="1040"
+     inkscape:window-x="258"
+     inkscape:window-y="373"
+     inkscape:window-maximized="0" />
+  <metadata
+     id="metadata4279">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title></dc:title>
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Calque 1"
+     inkscape:groupmode="layer"
+     id="layer1"
+     transform="translate(-133.51529,8.6037653)">
+    <g
+       id="g5130"
+       transform="translate(-96.50493,280.11854)">
+      <circle
+         r="19.042953"
+         cy="-269.67935"
+         cx="249.06317"
+         id="circle4981-8"
+         style="opacity:1;vector-effect:none;fill:#ff0000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.529167;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+      <circle
+         style="opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.447471;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
+         id="circle4983-1"
+         cx="249.06317"
+         cy="-269.67935"
+         r="16.102989" />
+      <circle
+         r="14.633007"
+         cy="-269.67935"
+         cx="249.06317"
+         id="circle4985-1"
+         style="opacity:1;vector-effect:none;fill:#ff8080;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.406623;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+      <path
+         sodipodi:nodetypes="sscccccscccsscccsscccs"
+         inkscape:connector-curvature="0"
+         id="path5087"
+         d="m 238.92388,-281.33904 c -1.16408,-1.3e-4 -2.32786,0.43724 -3.20312,1.3125 l -0.17969,0.17969 c -1.75053,1.75051 -1.75053,4.65574 0,6.40625 l 5.34961,5.34961 -5.34961,5.34766 c -1.75053,1.75051 -1.75053,4.65573 0,6.40625 l 0.17969,0.17968 c 1.75052,1.75052 4.65607,1.75018 6.40625,0 l 5.34766,-5.34765 5.3496,5.34765 c 1.75052,1.75052 4.65574,1.75052 6.40625,0 l 0.17969,-0.17968 c 1.75052,-1.75052 1.75019,-4.65608 0,-6.40625 l -5.34961,-5.34766 5.34961,-5.34961 c 1.75019,-1.75017 1.75052,-4.65573 0,-6.40625 l -0.17969,-0.17969 c -1.75051,-1.75051 -4.65573,-1.75052 -6.40625,0 l -5.3496,5.34766 -5.34766,-5.34766 c -0.87509,-0.87509 -2.03904,-1.31236 -3.20313,-1.3125 z"
+         style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:0.453;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:0.79375;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;filter:url(#filter14079-1);color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+      <path
+         style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:3.96875;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
+         d="m 240.51138,-282.92654 c -1.16408,-1.3e-4 -2.32786,0.43724 -3.20312,1.3125 l -0.17969,0.17969 c -1.75053,1.75051 -1.75053,4.65573 0,6.40625 l 5.34961,5.34961 -5.34961,5.34766 c -1.75053,1.75051 -1.75053,4.65573 0,6.40625 l 0.17969,0.17968 c 1.75052,1.75052 4.65607,1.75018 6.40625,0 l 5.34766,-5.34765 5.3496,5.34765 c 1.75052,1.75052 4.65574,1.75052 6.40626,0 l 0.17968,-0.17968 c 1.75052,-1.75052 1.75019,-4.65608 0,-6.40625 l -5.34961,-5.34766 5.34961,-5.34961 c 1.75019,-1.75018 1.75052,-4.65573 0,-6.40625 l -0.17968,-0.17969 c -1.75052,-1.75052 -4.65574,-1.75052 -6.40626,0 l -5.3496,5.34766 -5.34766,-5.34766 c -0.87509,-0.87509 -2.03904,-1.31236 -3.20313,-1.3125 z"
+         id="path5083"
+         inkscape:connector-curvature="0"
+         sodipodi:nodetypes="sscccccscccsscccsscccs" />
+      <path
+         style="opacity:1;vector-effect:none;fill:#ff0000;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:0.529167;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
+         d="m 259.41526,-280.21076 c -0.99735,-0.99735 -2.60337,-0.99735 -3.60072,0 l -6.75164,6.75164 -6.75127,-6.75127 c -0.99735,-0.99735 -2.60301,-0.99772 -3.60036,-3.7e-4 l -0.17941,0.17942 c -0.99736,0.99735 -0.99736,2.60337 0,3.60072 l 6.75127,6.75127 -6.75127,6.75127 c -0.99736,0.99736 -0.99736,2.60338 0,3.60073 l 0.17941,0.17942 c 0.99735,0.99735 2.60301,0.99698 3.60036,-3.7e-4 l 6.75127,-6.75127 6.75164,6.75163 c 0.99735,0.99736 2.60337,0.99736 3.60072,1e-5 l 0.17942,-0.17942 c 0.99735,-0.99735 0.99699,-2.60301 -3.7e-4,-3.60036 l -6.75163,-6.75164 6.75163,-6.75163 c 0.99736,-0.99735 0.99772,-2.60301 3.7e-4,-3.60036 z"
+         id="rect5023"
+         inkscape:connector-curvature="0" />
+    </g>
+  </g>
+</svg>
diff --git a/img/TRUE_NEG.svg b/img/TRUE_NEG.svg
new file mode 100644
index 0000000000000000000000000000000000000000..9c3fa8a2dc4edf5c9cc85d013d625073f208afc8
--- /dev/null
+++ b/img/TRUE_NEG.svg
@@ -0,0 +1,114 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="38.085907mm"
+   height="38.085907mm"
+   viewBox="0 0 38.085907 38.085907"
+   version="1.1"
+   id="svg4282"
+   inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
+   sodipodi:docname="icon_ok.svg">
+  <defs
+     id="defs4276">
+    <filter
+       height="1.148898"
+       y="-0.074448995"
+       width="1.1865224"
+       x="-0.093261182"
+       id="filter14079"
+       style="color-interpolation-filters:sRGB"
+       inkscape:collect="always">
+      <feGaussianBlur
+         id="feGaussianBlur14081"
+         stdDeviation="1.1047884"
+         inkscape:collect="always" />
+    </filter>
+  </defs>
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1.0"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="0.35"
+     inkscape:cx="114.83049"
+     inkscape:cy="260.54479"
+     inkscape:document-units="mm"
+     inkscape:current-layer="layer1"
+     inkscape:document-rotation="0"
+     showgrid="false"
+     fit-margin-top="0"
+     fit-margin-left="0"
+     fit-margin-right="0"
+     fit-margin-bottom="0"
+     inkscape:window-width="1438"
+     inkscape:window-height="1040"
+     inkscape:window-x="78"
+     inkscape:window-y="0"
+     inkscape:window-maximized="0" />
+  <metadata
+     id="metadata4279">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title></dc:title>
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Calque 1"
+     inkscape:groupmode="layer"
+     id="layer1"
+     transform="translate(-75.451099,-79.230856)">
+    <g
+       id="g5131"
+       transform="translate(67.182531,26.047484)">
+      <circle
+         r="19.042953"
+         cy="72.226326"
+         cx="27.311522"
+         id="circle4911-0"
+         style="opacity:1;vector-effect:none;fill:#008000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.529167;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+      <circle
+         style="opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.447471;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
+         id="circle4913-2"
+         cx="27.311522"
+         cy="72.226326"
+         r="16.102989" />
+      <circle
+         r="14.633007"
+         cy="72.226326"
+         cx="27.311522"
+         id="circle4915-5"
+         style="opacity:1;vector-effect:none;fill:#00ff00;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.406623;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+      <path
+         transform="matrix(0.94620108,0,0,0.94394222,0.96905713,5.0458931)"
+         sodipodi:nodetypes="ccccccccc"
+         inkscape:connector-curvature="0"
+         id="path5001"
+         d="m 42.629426,56.585288 c -0.472205,0.0049 -0.927185,0.178035 -1.283203,0.488281 l -19.009766,16.56836 -8.628906,-2.03711 c -1.850267,-0.437528 -3.19737,1.726042 -1.988281,3.19336 l 10.023437,12.160156 c 0.829231,1.007011 2.387863,0.953799 3.146484,-0.107422 L 44.266144,59.722007 c 0.942364,-1.321478 -0.01374,-3.153819 -1.636718,-3.136719 z"
+         style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:0.453;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:0.79375;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;filter:url(#filter14079);color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+      <path
+         style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:3.96875;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
+         d="m 43.68776,53.939453 c -0.472205,0.0049 -0.927185,0.178035 -1.283203,0.488281 l -19.009766,16.56836 -8.628906,-2.03711 c -1.850267,-0.437528 -3.19737,1.726042 -1.988281,3.19336 L 22.801041,84.3125 c 0.829231,1.007011 2.387863,0.953799 3.146484,-0.107422 L 45.324478,57.076172 c 0.942364,-1.321478 -0.01374,-3.153819 -1.636718,-3.136719 z"
+         id="path4997"
+         inkscape:connector-curvature="0"
+         sodipodi:nodetypes="ccccccccc" />
+      <path
+         style="fill:#00ff00;stroke:#000000;stroke-width:0.529167;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+         d="M 14.309664,70.889978 24.332271,83.050741 43.709313,55.922885 23.931367,73.16177 Z"
+         id="path4995"
+         inkscape:connector-curvature="0" />
+    </g>
+  </g>
+</svg>
diff --git a/img/TRUE_POS.svg b/img/TRUE_POS.svg
new file mode 100644
index 0000000000000000000000000000000000000000..9c3fa8a2dc4edf5c9cc85d013d625073f208afc8
--- /dev/null
+++ b/img/TRUE_POS.svg
@@ -0,0 +1,114 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="38.085907mm"
+   height="38.085907mm"
+   viewBox="0 0 38.085907 38.085907"
+   version="1.1"
+   id="svg4282"
+   inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
+   sodipodi:docname="icon_ok.svg">
+  <defs
+     id="defs4276">
+    <filter
+       height="1.148898"
+       y="-0.074448995"
+       width="1.1865224"
+       x="-0.093261182"
+       id="filter14079"
+       style="color-interpolation-filters:sRGB"
+       inkscape:collect="always">
+      <feGaussianBlur
+         id="feGaussianBlur14081"
+         stdDeviation="1.1047884"
+         inkscape:collect="always" />
+    </filter>
+  </defs>
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1.0"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="0.35"
+     inkscape:cx="114.83049"
+     inkscape:cy="260.54479"
+     inkscape:document-units="mm"
+     inkscape:current-layer="layer1"
+     inkscape:document-rotation="0"
+     showgrid="false"
+     fit-margin-top="0"
+     fit-margin-left="0"
+     fit-margin-right="0"
+     fit-margin-bottom="0"
+     inkscape:window-width="1438"
+     inkscape:window-height="1040"
+     inkscape:window-x="78"
+     inkscape:window-y="0"
+     inkscape:window-maximized="0" />
+  <metadata
+     id="metadata4279">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title></dc:title>
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Calque 1"
+     inkscape:groupmode="layer"
+     id="layer1"
+     transform="translate(-75.451099,-79.230856)">
+    <g
+       id="g5131"
+       transform="translate(67.182531,26.047484)">
+      <circle
+         r="19.042953"
+         cy="72.226326"
+         cx="27.311522"
+         id="circle4911-0"
+         style="opacity:1;vector-effect:none;fill:#008000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.529167;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+      <circle
+         style="opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.447471;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
+         id="circle4913-2"
+         cx="27.311522"
+         cy="72.226326"
+         r="16.102989" />
+      <circle
+         r="14.633007"
+         cy="72.226326"
+         cx="27.311522"
+         id="circle4915-5"
+         style="opacity:1;vector-effect:none;fill:#00ff00;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.406623;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+      <path
+         transform="matrix(0.94620108,0,0,0.94394222,0.96905713,5.0458931)"
+         sodipodi:nodetypes="ccccccccc"
+         inkscape:connector-curvature="0"
+         id="path5001"
+         d="m 42.629426,56.585288 c -0.472205,0.0049 -0.927185,0.178035 -1.283203,0.488281 l -19.009766,16.56836 -8.628906,-2.03711 c -1.850267,-0.437528 -3.19737,1.726042 -1.988281,3.19336 l 10.023437,12.160156 c 0.829231,1.007011 2.387863,0.953799 3.146484,-0.107422 L 44.266144,59.722007 c 0.942364,-1.321478 -0.01374,-3.153819 -1.636718,-3.136719 z"
+         style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:0.453;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:0.79375;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;filter:url(#filter14079);color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+      <path
+         style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:3.96875;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
+         d="m 43.68776,53.939453 c -0.472205,0.0049 -0.927185,0.178035 -1.283203,0.488281 l -19.009766,16.56836 -8.628906,-2.03711 c -1.850267,-0.437528 -3.19737,1.726042 -1.988281,3.19336 L 22.801041,84.3125 c 0.829231,1.007011 2.387863,0.953799 3.146484,-0.107422 L 45.324478,57.076172 c 0.942364,-1.321478 -0.01374,-3.153819 -1.636718,-3.136719 z"
+         id="path4997"
+         inkscape:connector-curvature="0"
+         sodipodi:nodetypes="ccccccccc" />
+      <path
+         style="fill:#00ff00;stroke:#000000;stroke-width:0.529167;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+         d="M 14.309664,70.889978 24.332271,83.050741 43.709313,55.922885 23.931367,73.16177 Z"
+         id="path4995"
+         inkscape:connector-curvature="0" />
+    </g>
+  </g>
+</svg>
diff --git a/img/failure.svg b/img/failure.svg
new file mode 100644
index 0000000000000000000000000000000000000000..44928f54a58ea3c73315ae321fcb549f6739c16c
--- /dev/null
+++ b/img/failure.svg
@@ -0,0 +1,111 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="38.085907mm"
+   height="38.085907mm"
+   viewBox="0 0 38.085907 38.085907"
+   version="1.1"
+   id="svg4282"
+   inkscape:version="1.0.2 (e86c870879, 2021-01-15)"
+   sodipodi:docname="failure.svg">
+  <defs
+     id="defs4276">
+    <filter
+       height="1.1365677"
+       y="-0.068283834"
+       width="1.3679469"
+       x="-0.18397348"
+       id="filter5033-3"
+       style="color-interpolation-filters:sRGB"
+       inkscape:collect="always">
+      <feGaussianBlur
+         id="feGaussianBlur5035-0"
+         stdDeviation="0.62971388"
+         inkscape:collect="always" />
+    </filter>
+  </defs>
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1.0"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="3.0216635"
+     inkscape:cx="-11.847447"
+     inkscape:cy="95.29158"
+     inkscape:document-units="mm"
+     inkscape:current-layer="layer1"
+     inkscape:document-rotation="0"
+     showgrid="false"
+     fit-margin-top="0"
+     fit-margin-left="0"
+     fit-margin-right="0"
+     fit-margin-bottom="0"
+     inkscape:window-width="1438"
+     inkscape:window-height="1013"
+     inkscape:window-x="258"
+     inkscape:window-y="33"
+     inkscape:window-maximized="0" />
+  <metadata
+     id="metadata4279">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title></dc:title>
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Calque 1"
+     inkscape:groupmode="layer"
+     id="layer1"
+     transform="translate(-146.22234,-57.775904)">
+    <circle
+       style="vector-effect:none;fill:#d47500;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.529167;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
+       id="circle4974"
+       cx="165.26529"
+       cy="76.818855"
+       r="19.042953" />
+    <circle
+       r="16.102989"
+       cy="76.818855"
+       cx="165.11584"
+       id="circle4976"
+       style="vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.447471;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+    <circle
+       style="vector-effect:none;fill:#ffcb1e;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.406623;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
+       id="circle4978"
+       cx="165.26529"
+       cy="76.818855"
+       r="14.633007" />
+    <path
+       style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:0.45;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:2.201;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:fill markers stroke;filter:url(#filter5033-3);color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
+       d="m 305.71278,-284.95215 c -2.28953,0 -4.10742,1.95693 -4.10742,4.27539 v 8.35608 c 0,1.76425 1.0583,3.30646 2.57031,3.94922 -1.40551,0.60368 -2.40036,2.00211 -2.40039,3.61914 4e-5,2.162 1.77551,3.93747 3.9375,3.9375 2.16214,1.8e-4 3.93943,-1.77536 3.93946,-3.9375 -3e-5,-1.61657 -0.99555,-3.01712 -2.40039,-3.62109 1.51096,-0.64329 2.56836,-2.18375 2.56836,-3.94727 v -8.35608 c 0,-2.31857 -1.81788,-4.27539 -4.10743,-4.27539 z"
+       id="path5019-5"
+       inkscape:connector-curvature="0"
+       sodipodi:nodetypes="ssscccccsss"
+       transform="matrix(1.2962227,0,0,1.2962227,-233.49507,430.46096)" />
+    <path
+       sodipodi:nodetypes="ssscccccsss"
+       inkscape:connector-curvature="0"
+       id="path5015-6"
+       d="m 165.26529,59.06806 c -2.96775,0 -5.32413,2.53661 -5.32413,5.54186 v 10.83134 c 0,2.28686 1.3718,4.28591 3.3317,5.11907 -1.82186,0.78249 -3.11142,2.59517 -3.11144,4.69121 4e-5,2.80243 2.30145,5.10384 5.10387,5.10388 2.80262,2.2e-4 5.10637,-2.30127 5.10641,-5.10388 -3e-5,-2.09544 -1.29044,-3.91087 -3.11144,-4.69374 1.95855,-0.83385 3.32917,-2.83064 3.32917,-5.11654 V 64.60992 c 0,-3.00538 -2.35638,-5.54186 -5.32414,-5.54186 z"
+       style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:2.85299;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:fill markers stroke;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+    <path
+       sodipodi:nodetypes="sssssssccccc"
+       inkscape:connector-curvature="0"
+       id="path5010-2"
+       d="m 165.26513,60.49365 c -2.15916,0 -3.89719,1.83584 -3.89719,4.11629 v 10.83128 c 0,2.28045 1.73803,4.1163 3.89719,4.1163 2.15917,0 3.89752,-1.83585 3.89752,-4.1163 V 64.60994 c 0,-2.28045 -1.73835,-4.11629 -3.89752,-4.11629 z m 0,21.07833 c -2.03151,4e-5 -3.67837,1.64689 -3.6784,3.67841 3e-5,2.0315 1.64689,3.67836 3.6784,3.67839 2.03164,1.6e-4 3.67871,-1.64675 3.67874,-3.67839 -3e-5,-2.03165 -1.6471,-3.67857 -3.67874,-3.67841 z"
+       style="vector-effect:none;fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.520375;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:fill markers stroke" />
+  </g>
+</svg>
diff --git a/img/html.svg b/img/html.svg
new file mode 100644
index 0000000000000000000000000000000000000000..0f69eace609ed6c47703ea52fba50b5061fcd995
--- /dev/null
+++ b/img/html.svg
@@ -0,0 +1,250 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Sodipodi ("http://www.sodipodi.com/") -->
+<svg
+    xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+    xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+    xmlns="http://www.w3.org/2000/svg"
+    xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+    xmlns:cc="http://creativecommons.org/ns#"
+    xmlns:xlink="http://www.w3.org/1999/xlink"
+    xmlns:dc="http://purl.org/dc/elements/1.1/"
+    id="svg1089"
+    space="preserve"
+    viewBox="0 0 31.001 49.091"
+    sodipodi:version="0.34"
+    version="1.1"
+    inkscape:version="0.48.3.1 r9886"
+    sodipodi:docname="_svgclean2.svg"
+  >
+  <sodipodi:namedview
+      id="base"
+      inkscape:window-x="0"
+      inkscape:window-y="0"
+      inkscape:window-height="645"
+      inkscape:window-maximized="0"
+      inkscape:zoom="0.22425739"
+      showgrid="false"
+      inkscape:current-layer="svg1089"
+      inkscape:cx="366.54241"
+      inkscape:cy="-472.44343"
+      inkscape:window-width="674"
+  />
+  <path
+      id="path913"
+      sodipodi:rx="11.026123"
+      sodipodi:ry="10.746269"
+      style="stroke:#90a0ba;fill-rule:evenodd;fill:#90a0ba"
+      sodipodi:type="arc"
+      d="m580.19 35.149a11.026 10.746 0 1 1 -22.052 0 11.026 10.746 0 1 1 22.052 0z"
+      transform="translate(-557.63 -23.23)"
+      sodipodi:cy="35.148811"
+      sodipodi:cx="569.16095"
+  />
+  <a
+      id="a1514"
+      xlink:href="diagram/diagram.php"
+      transform="translate(-538.98 -11.45)"
+    >
+    <path
+        id="path914"
+        sodipodi:rx="11"
+        sodipodi:ry="10.75"
+        style="stroke:#90a0ba;fill-rule:evenodd;fill:#f9fbfd"
+        sodipodi:type="arc"
+        d="m562 22.7c0 5.9371-4.9249 10.75-11 10.75s-11-4.8129-11-10.75 4.9249-10.75 11-10.75 11 4.8129 11 10.75z"
+        sodipodi:cy="22.700001"
+        sodipodi:cx="551"
+    />
+  </a
+  >
+  <path
+      id="path915"
+      sodipodi:rx="0.33581543"
+      sodipodi:ry="1.9589615"
+      style="fill-rule:evenodd;fill:#90a0ba"
+      sodipodi:type="arc"
+      d="m611.98 38.563a0.33582 1.959 0 1 1 -0.67163 0 0.33582 1.959 0 1 1 0.67163 0z"
+      transform="matrix(.4751 .87993 -.87993 .4751 -239.77 -533.63)"
+      sodipodi:cy="38.563004"
+      sodipodi:cx="611.64233"
+  />
+  <path
+      id="path916"
+      sodipodi:rx="2.8495178"
+      sodipodi:ry="3.0474091"
+      style="fill-rule:evenodd;fill:#90a0ba"
+      sodipodi:type="arc"
+      d="m588.75 70.617a2.8495 3.0474 0 1 1 -5.699 0 2.8495 3.0474 0 1 1 5.699 0z"
+      transform="translate(-557.91 -24.574)"
+      sodipodi:cy="70.617256"
+      sodipodi:cx="585.89685"
+  />
+  <rect
+      id="rect917"
+      style="fill-rule:evenodd;fill:#70809a"
+      transform="rotate(-23.22)"
+      height="20.58"
+      width="4.1159"
+      y="33.266"
+      x="4.6948"
+  />
+  <rect
+      id="rect918"
+      style="fill-rule:evenodd;fill:#90a0ba"
+      transform="matrix(.89059 -.45480 .45480 .89059 0 0)"
+      height="20.58"
+      width="4.116"
+      y="33.525"
+      x="2.6985"
+  />
+  <rect
+      id="rect919"
+      style="fill-rule:evenodd;stroke:#90a0ba;stroke-opacity:0.557;fill:#f9fbfd"
+      transform="matrix(.88893 -.45804 .45654 .88970 0 0)"
+      height="20.48"
+      width="1.0903"
+      y="33.75"
+      x="5.3707"
+  />
+  <path
+      id="path920"
+      sodipodi:rx="1.2982178"
+      sodipodi:ry="0.71817398"
+      style="fill-rule:evenodd;fill:#90a0ba"
+      sodipodi:type="arc"
+      d="m599.61 42.396a1.2982 0.71817 0 1 1 -2.5964 0 1.2982 0.71817 0 1 1 2.5964 0z"
+      transform="matrix(.9091 -.41657 .41657 .9091 -545.09 232.89)"
+      sodipodi:cy="42.396488"
+      sodipodi:cx="598.31012"
+  />
+  <path
+      id="path921"
+      sodipodi:rx="1.4086914"
+      sodipodi:ry="1.4086914"
+      style="fill-rule:evenodd;fill:#90a0ba"
+      sodipodi:type="arc"
+      d="m595.63 50.6a1.4087 1.4087 0 1 1 -2.8174 0 1.4087 1.4087 0 1 1 2.8174 0z"
+      transform="translate(-576.23 -25.827)"
+      sodipodi:cy="50.60009"
+      sodipodi:cx="594.22217"
+  />
+  <rect
+      id="rect922"
+      style="fill-rule:evenodd;fill:#90a0ba"
+      transform="rotate(18.781)"
+      height="2.9831"
+      width="2.8727"
+      y="18.518"
+      x="25.852"
+  />
+  <rect
+      id="rect923"
+      style="fill-rule:evenodd;fill:#90a0ba"
+      transform="rotate(.85322)"
+      height="3.2594"
+      width="1.1601"
+      y="25.57"
+      x="18.366"
+  />
+  <rect
+      id="rect924"
+      style="fill-rule:evenodd;fill:#90a0ba"
+      transform="matrix(.73730 -.67556 .77612 .63058 0 0)"
+      height="2.8441"
+      width="1.0034"
+      y="31.761"
+      x="-8.9381"
+  />
+  <path
+      id="path925"
+      sodipodi:rx="0.69055176"
+      sodipodi:ry="0.74577332"
+      style="fill-rule:evenodd;fill:#90a0ba"
+      sodipodi:type="arc"
+      d="m598.95 47.009a0.69055 0.74577 0 1 1 -1.3811 0 0.69055 0.74577 0 1 1 1.3811 0z"
+      transform="translate(-581.26 -23.894)"
+      sodipodi:cy="47.009277"
+      sodipodi:cx="598.25488"
+  />
+  <path
+      id="path926"
+      sodipodi:rx="0.66409302"
+      sodipodi:ry="1.2500191"
+      style="stroke:#90a0ba;fill-rule:evenodd;stroke-opacity:.57590;fill:#f9fbfd"
+      sodipodi:type="arc"
+      d="m587.74 35.071a0.66409 1.25 0 1 1 -1.3282 0 0.66409 1.25 0 1 1 1.3282 0z"
+      transform="matrix(.702 -.12712 .17612 .663 -399.73 75.966)"
+      sodipodi:cy="35.071239"
+      sodipodi:cx="587.07861"
+  />
+  <rect
+      id="rect927"
+      style="fill-rule:evenodd;stroke:#90a0ba;stroke-opacity:.67720;fill:#f9fbfd"
+      transform="matrix(.82252 .56874 -.63661 .77119 0 0)"
+      height=".83515"
+      width="1.7545"
+      y="10.564"
+      x="32.132"
+  />
+  <path
+      id="path1952"
+      style="stroke:#89a1ba;stroke-width:1.25;fill:none"
+      inkscape:connector-curvature="0"
+      d="m12.016 5.1v12.21"
+  />
+  <path
+      id="path1953"
+      style="stroke:#90a1ba;stroke-width:1.25;fill:none"
+      inkscape:connector-curvature="0"
+      d="m6.5162 11.25h11.5"
+  />
+  <metadata
+      id="metadata22"
+    >
+    <rdf:RDF
+      >
+      <cc:Work
+        >
+        <dc:format
+          >image/svg+xml</dc:format
+        >
+        <dc:type
+            rdf:resource="http://purl.org/dc/dcmitype/StillImage"
+        />
+        <cc:license
+            rdf:resource="http://creativecommons.org/licenses/publicdomain/"
+        />
+        <dc:publisher
+          >
+          <cc:Agent
+              rdf:about="http://openclipart.org/"
+            >
+            <dc:title
+              >Openclipart</dc:title
+            >
+          </cc:Agent
+          >
+        </dc:publisher
+        >
+      </cc:Work
+      >
+      <cc:License
+          rdf:about="http://creativecommons.org/licenses/publicdomain/"
+        >
+        <cc:permits
+            rdf:resource="http://creativecommons.org/ns#Reproduction"
+        />
+        <cc:permits
+            rdf:resource="http://creativecommons.org/ns#Distribution"
+        />
+        <cc:permits
+            rdf:resource="http://creativecommons.org/ns#DerivativeWorks"
+        />
+      </cc:License
+      >
+    </rdf:RDF
+    >
+  </metadata
+  >
+</svg
+>
diff --git a/img/other.svg b/img/other.svg
new file mode 100644
index 0000000000000000000000000000000000000000..a15dce0b23a512f5a2622c4139802ca90f92d9c8
--- /dev/null
+++ b/img/other.svg
@@ -0,0 +1,122 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="39.853786mm"
+   height="38.085907mm"
+   viewBox="0 0 39.853787 38.085907"
+   version="1.1"
+   id="svg4282"
+   inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
+   sodipodi:docname="icon_manual.svg">
+  <defs
+     id="defs4276">
+    <filter
+       height="1.1365677"
+       y="-0.068283834"
+       width="1.3679469"
+       x="-0.18397348"
+       id="filter5033-3-7"
+       style="color-interpolation-filters:sRGB"
+       inkscape:collect="always">
+      <feGaussianBlur
+         id="feGaussianBlur5035-0-3"
+         stdDeviation="0.62971388"
+         inkscape:collect="always" />
+    </filter>
+  </defs>
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1.0"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="1.4"
+     inkscape:cx="231.95863"
+     inkscape:cy="-21.015106"
+     inkscape:document-units="mm"
+     inkscape:current-layer="g5215"
+     inkscape:document-rotation="0"
+     showgrid="false"
+     fit-margin-top="0"
+     fit-margin-left="0"
+     fit-margin-right="0"
+     fit-margin-bottom="0"
+     inkscape:window-width="1438"
+     inkscape:window-height="1040"
+     inkscape:window-x="744"
+     inkscape:window-y="158"
+     inkscape:window-maximized="0" />
+  <metadata
+     id="metadata4279">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title></dc:title>
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Calque 1"
+     inkscape:groupmode="layer"
+     id="layer1"
+     transform="translate(-276.49193,-185.43206)">
+    <g
+       transform="translate(-68.703644,430.08545)"
+       id="g5215">
+      <g
+         transform="translate(113.55131,-75.749551)"
+         id="g5610">
+        <circle
+           r="19.042953"
+           cy="-149.86089"
+           cx="252.45509"
+           id="circle5398"
+           style="opacity:1;vector-effect:none;fill:#d47500;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.529167;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+        <circle
+           style="opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.447471;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
+           id="circle5400"
+           cx="252.30565"
+           cy="-149.86089"
+           r="16.102989" />
+        <circle
+           r="14.633007"
+           cy="-149.86089"
+           cx="252.45509"
+           id="circle5402"
+           style="opacity:1;vector-effect:none;fill:#ffcb1e;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.406623;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+        <g
+           id="g5447"
+           transform="matrix(0.86174936,0,0,0.86174936,33.295491,-18.612709)">
+          <path
+             style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:0.45;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:2.85299;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:fill markers stroke;filter:url(#filter5033-3-7);color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
+             d="m 251.44604,-170.71043 c -1.87969,0 -3.43164,1.55191 -3.43164,3.43164 v 0.0703 c -0.48256,-0.26091 -1.00807,-0.44727 -1.58984,-0.44727 -1.8797,0 -3.42969,1.5519 -3.42969,3.43164 v 16.29102 l -0.75976,-0.75977 c -1.51559,-1.51562 -4.01371,-1.51562 -5.5293,0 -1.51562,1.51559 -1.51562,4.01371 0,5.5293 l 5.83789,5.83789 c -0.14858,-0.14858 -0.0337,-0.0335 0.0625,0.0977 0.0962,0.1312 0.23398,0.31993 0.4043,0.53907 0.34063,0.43827 0.81549,1.00137 1.43945,1.5625 1.24793,1.12224 3.1687,2.26172 5.67578,2.26172 h 7.65821 c 3.91995,0 7.13281,-3.21285 7.13281,-7.13282 v -19.1582 c -6e-5,-2.01693 -1.81105,-3.65062 -3.84571,-3.40625 -0.47698,0.0573 -0.78756,0.40396 -1.17578,0.63281 v -2.9414 c 0,-1.71102 -1.25694,-3.24387 -3.01562,-3.45508 -0.77917,-0.0936 -1.48705,0.15019 -2.11133,0.52344 -0.26456,-1.62575 -1.62752,-2.90821 -3.32227,-2.90821 z"
+             id="path5422"
+             inkscape:connector-curvature="0"
+             sodipodi:nodetypes="cscsscccccsccssscccscccc" />
+          <path
+             sodipodi:nodetypes="cscsscccccsccssscccscccc"
+             inkscape:connector-curvature="0"
+             id="path5418"
+             d="m 253.45056,-172.71495 c -1.87969,0 -3.43164,1.55191 -3.43164,3.43164 v 0.0703 c -0.48256,-0.26091 -1.00807,-0.44727 -1.58984,-0.44727 -1.8797,0 -3.42969,1.5519 -3.42969,3.43164 v 16.29102 l -0.75976,-0.75977 c -1.51559,-1.51562 -4.01371,-1.51562 -5.5293,0 -1.51562,1.51559 -1.51562,4.01371 0,5.5293 l 5.83789,5.83789 c -0.14858,-0.14858 -0.0337,-0.0335 0.0625,0.0977 0.0962,0.1312 0.23398,0.31993 0.4043,0.53907 0.34063,0.43827 0.81549,1.00137 1.43945,1.5625 1.24793,1.12224 3.1687,2.26172 5.67578,2.26172 h 7.65821 c 3.91995,0 7.13281,-3.21285 7.13281,-7.13282 v -19.1582 c -6e-5,-2.01693 -1.81105,-3.65062 -3.84571,-3.40625 -0.47698,0.0573 -0.78756,0.40396 -1.17578,0.63281 v -2.9414 c 0,-1.71102 -1.25694,-3.24387 -3.01562,-3.45508 -0.77917,-0.0936 -1.48705,0.15019 -2.11133,0.52344 -0.26456,-1.62575 -1.62752,-2.90821 -3.32227,-2.90821 z"
+             style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:3.175;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+          <path
+             sodipodi:nodetypes="cssccsscsssssccsssssccsccccsssscc"
+             style="stroke-width:0.0216683"
+             id="path2-5"
+             d="m 263.26486,-162.98965 c -0.93375,0.11213 -1.61849,0.93799 -1.61849,1.87843 v 8.57419 c 0,0.36821 -0.2985,0.66671 -0.66672,0.66671 v 0 c -0.36822,0 -0.66671,-0.2985 -0.66671,-0.66671 v -14.33786 c 0,-0.94045 -0.68474,-1.76631 -1.61848,-1.87845 -1.11304,-0.13366 -2.06807,0.74193 -2.06807,1.82976 v 14.38657 c 0,0.36821 -0.2985,0.66671 -0.66672,0.66671 -0.3682,0 -0.66669,-0.2985 -0.66669,-0.66671 v -16.74637 c 0,-1.0138 -0.82948,-1.84328 -1.84328,-1.84328 v 0 c -1.0138,0 -1.84329,0.82946 -1.84329,1.84328 v 16.74635 c 0,0.36821 -0.2985,0.66671 -0.66671,0.66671 -0.3682,0 -0.66671,-0.2985 -0.66671,-0.66671 v -13.69249 c 0,-1.01379 -0.82946,-1.84328 -1.84328,-1.84328 v 0 c -1.01378,0 -1.84328,0.82947 -1.84328,1.84328 v 20.12263 l -3.46882,-3.46883 c -0.90329,-0.90331 -2.38143,-0.90331 -3.28472,0 -0.90331,0.90329 -0.90331,2.38143 0,3.28472 l 5.83815,5.83815 c 0.38368,0.38368 2.38318,3.99544 6.45966,3.99544 h 7.65799 c 3.062,0 5.54426,-2.48225 5.54426,-5.54427 v -19.15823 c -3e-5,-1.08782 -0.95505,-1.96342 -2.06809,-1.82974 z"
+             inkscape:connector-curvature="0" />
+        </g>
+      </g>
+    </g>
+  </g>
+</svg>
diff --git a/img/timeout.svg b/img/timeout.svg
new file mode 100644
index 0000000000000000000000000000000000000000..198a5d0788736803334e040073a3fbfca272e448
--- /dev/null
+++ b/img/timeout.svg
@@ -0,0 +1,113 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="39.194511mm"
+   height="38.100613mm"
+   viewBox="0 0 39.194512 38.100613"
+   version="1.1"
+   id="svg4282"
+   inkscape:version="1.0.2 (e86c870879, 2021-01-15)"
+   sodipodi:docname="timeout.svg">
+  <defs
+     id="defs4276">
+    <filter
+       height="1.1365677"
+       y="-0.068283834"
+       width="1.3679469"
+       x="-0.18397348"
+       id="filter5033-3"
+       style="color-interpolation-filters:sRGB"
+       inkscape:collect="always">
+      <feGaussianBlur
+         id="feGaussianBlur5035-0"
+         stdDeviation="0.62971388"
+         inkscape:collect="always" />
+    </filter>
+  </defs>
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1.0"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="1.9616129"
+     inkscape:cx="41.801671"
+     inkscape:cy="42.238708"
+     inkscape:document-units="mm"
+     inkscape:current-layer="g5215"
+     inkscape:document-rotation="0"
+     showgrid="false"
+     fit-margin-top="0"
+     fit-margin-left="0"
+     fit-margin-right="0"
+     fit-margin-bottom="0"
+     inkscape:window-width="1438"
+     inkscape:window-height="1013"
+     inkscape:window-x="472"
+     inkscape:window-y="33"
+     inkscape:window-maximized="0" />
+  <metadata
+     id="metadata4279">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title></dc:title>
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Calque 1"
+     inkscape:groupmode="layer"
+     id="layer1"
+     transform="translate(-315.66997,-175.64217)">
+    <g
+       transform="translate(-68.703644,430.08545)"
+       id="g5215">
+      <circle
+         r="19.042953"
+         cy="-235.40033"
+         cx="404.52518"
+         id="circle5449"
+         style="vector-effect:none;fill:#d47500;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.529167;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+      <circle
+         style="vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.447471;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
+         id="circle5451"
+         cx="404.52518"
+         cy="-235.40033"
+         r="16.102989" />
+      <circle
+         r="14.633007"
+         cy="-235.40033"
+         cx="404.52518"
+         id="circle5453"
+         style="vector-effect:none;fill:#ffcb1e;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.406623;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+      <path
+         style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:0.45;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:2.45856;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:fill markers stroke;filter:url(#filter5033-3);color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
+         d="m 400.9125,-249.47126 c -0.25146,0.0757 -0.48011,0.21281 -0.66556,0.39873 l -8.50138,8.50173 c -0.53677,0.53723 -0.61792,1.37935 -0.19361,2.00919 -0.82659,-0.13236 -1.61395,0.39974 -1.7993,1.21612 -0.43867,1.93909 -0.4009,3.95553 0.11071,5.87667 l 5.2e-4,0.002 c 1.42045,5.30026 6.21081,8.79993 11.58765,8.94029 l 0.77292,2.88457 c 0.31644,1.18003 1.79133,1.57544 2.65561,0.71194 l 8.50138,-8.50173 c 0.86433,-0.86422 0.46889,-2.34002 -0.71175,-2.65629 l -11.61327,-3.11203 c -1.18024,-0.31566 -2.25984,0.76421 -1.94386,1.94436 l 0.36245,1.35269 c -1.37133,-0.65273 -2.46282,-1.82571 -2.87846,-3.36983 -0.1396,-0.52441 -0.19878,-1.06719 -0.17441,-1.6093 0.0266,-0.59197 -0.2787,-1.14948 -0.79184,-1.4459 l -2.37583,-1.37328 10.81562,2.89708 c 1.17939,0.31544 2.25865,-0.76277 1.94436,-1.94247 l -0.49741,-1.85639 c 2.0509,0.29462 3.8066,1.73329 4.37555,3.84825 0.13957,0.52442 0.19888,1.06717 0.17442,1.60931 -0.0266,0.59196 0.2787,1.14946 0.79183,1.44589 l 3.536,2.0431 c 0.92679,0.53431 2.10602,0.0177 2.34164,-1.02578 0.43869,-1.9391 0.4009,-3.95552 -0.1107,-5.87667 -2.6e-4,-9.6e-4 -7.8e-4,-0.003 -0.001,-0.004 -1.55745,-5.80087 -7.09445,-9.35297 -12.93413,-8.85652 l -0.78808,-2.94117 c -0.23132,-0.86527 -1.13216,-1.36806 -1.99005,-1.11067 z"
+         id="path5632"
+         inkscape:connector-curvature="0"
+         sodipodi:nodetypes="ccccccccccccccccccccccccccccccccc" />
+      <path
+         sodipodi:nodetypes="ccccccccccccccccccccccccccccccccc"
+         inkscape:connector-curvature="0"
+         id="path5567"
+         d="m 402.23542,-250.69968 c -0.25146,0.0757 -0.48011,0.21281 -0.66556,0.39873 l -8.50138,8.50173 c -0.53677,0.53723 -0.61792,1.37935 -0.19361,2.00919 -0.82659,-0.13236 -1.61395,0.39974 -1.7993,1.21612 -0.43867,1.93909 -0.4009,3.95553 0.11071,5.87667 l 5.2e-4,0.002 c 1.42045,5.30026 6.21081,8.79993 11.58765,8.94029 l 0.77292,2.88457 c 0.31644,1.18003 1.79133,1.57544 2.65561,0.71194 l 8.50138,-8.50173 c 0.86433,-0.86422 0.46889,-2.34002 -0.71175,-2.65629 l -11.61327,-3.11203 c -1.18024,-0.31566 -2.25984,0.76421 -1.94386,1.94436 l 0.36245,1.35269 c -1.37133,-0.65273 -2.46282,-1.82571 -2.87846,-3.36983 -0.1396,-0.52441 -0.19878,-1.06719 -0.17441,-1.6093 0.0266,-0.59197 -0.2787,-1.14948 -0.79184,-1.4459 l -2.37583,-1.37328 10.81562,2.89708 c 1.17939,0.31544 2.25865,-0.76277 1.94436,-1.94247 l -0.49741,-1.85639 c 2.0509,0.29462 3.8066,1.73329 4.37555,3.84825 0.13957,0.52442 0.19888,1.06717 0.17442,1.60931 -0.0266,0.59196 0.2787,1.14946 0.79183,1.44589 l 3.536,2.0431 c 0.92679,0.53431 2.10602,0.0177 2.34164,-1.02578 0.43869,-1.9391 0.4009,-3.95552 -0.1107,-5.87667 -2.6e-4,-9.6e-4 -7.8e-4,-0.003 -10e-4,-0.004 -1.55745,-5.80087 -7.09445,-9.35297 -12.93413,-8.85652 l -0.78808,-2.94117 c -0.23132,-0.86527 -1.13216,-1.36806 -1.99005,-1.11067 z"
+         style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:3.175;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:fill markers stroke;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+      <path
+         inkscape:connector-curvature="0"
+         id="path5551"
+         d="m 402.69164,-249.1774 -8.50142,8.50146 11.61318,3.11178 -1.02487,-3.82487 a 6.9116381,6.9116381 0 0 1 7.97029,4.99358 6.9116381,6.9116381 0 0 1 0.22673,2.09391 l 3.53648,2.0418 a 10.707668,10.707668 0 0 0 -0.0967,-5.11815 10.707668,10.707668 0 0 0 -12.62122,-7.68503 z m -10.06819,10.95429 a 10.707668,10.707668 0 0 0 0.0967,5.11815 10.707668,10.707668 0 0 0 11.30737,7.89231 l 1.0535,3.93174 8.50147,-8.50128 -11.61317,-3.11177 1.05077,3.92153 a 6.9116382,6.9116382 0 0 1 -6.63324,-5.11502 6.9116382,6.9116382 0 0 1 -0.2269,-2.09387 z"
+         style="vector-effect:none;fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:1.08123;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:fill markers stroke" />
+    </g>
+  </g>
+</svg>
diff --git a/img/unimplemented.svg b/img/unimplemented.svg
new file mode 100644
index 0000000000000000000000000000000000000000..d4ed5262f0aa8b0e4dd4bf8e89924aa0e3b67891
--- /dev/null
+++ b/img/unimplemented.svg
@@ -0,0 +1,124 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg
+   xmlns:dc="http://purl.org/dc/elements/1.1/"
+   xmlns:cc="http://creativecommons.org/ns#"
+   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+   xmlns:svg="http://www.w3.org/2000/svg"
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+   width="38.085907mm"
+   height="38.085907mm"
+   viewBox="0 0 38.085907 38.085907"
+   version="1.1"
+   id="svg4282"
+   inkscape:version="1.0.2 (e86c870879, 2021-01-15)"
+   sodipodi:docname="unimplemented.svg">
+  <defs
+     id="defs4276">
+    <filter
+       height="1.148898"
+       y="-0.074448995"
+       width="1.1865224"
+       x="-0.093261182"
+       id="filter14079"
+       style="color-interpolation-filters:sRGB"
+       inkscape:collect="always">
+      <feGaussianBlur
+         id="feGaussianBlur14081"
+         stdDeviation="1.1047884"
+         inkscape:collect="always" />
+    </filter>
+  </defs>
+  <sodipodi:namedview
+     id="base"
+     pagecolor="#ffffff"
+     bordercolor="#666666"
+     borderopacity="1.0"
+     inkscape:pageopacity="0.0"
+     inkscape:pageshadow="2"
+     inkscape:zoom="2.08957"
+     inkscape:cx="83.758231"
+     inkscape:cy="148.44158"
+     inkscape:document-units="mm"
+     inkscape:current-layer="layer1"
+     inkscape:document-rotation="0"
+     showgrid="false"
+     fit-margin-top="0"
+     fit-margin-left="0"
+     fit-margin-right="0"
+     fit-margin-bottom="0"
+     inkscape:window-width="1438"
+     inkscape:window-height="1013"
+     inkscape:window-x="215"
+     inkscape:window-y="33"
+     inkscape:window-maximized="0" />
+  <metadata
+     id="metadata4279">
+    <rdf:RDF>
+      <cc:Work
+         rdf:about="">
+        <dc:format>image/svg+xml</dc:format>
+        <dc:type
+           rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+        <dc:title></dc:title>
+      </cc:Work>
+    </rdf:RDF>
+  </metadata>
+  <g
+     inkscape:label="Calque 1"
+     inkscape:groupmode="layer"
+     id="layer1"
+     transform="translate(-274.06798,-108.5144)">
+    <circle
+       r="19.042953"
+       cy="127.55736"
+       cx="293.11093"
+       id="circle14171"
+       style="vector-effect:none;fill:#d47500;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.529167;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+    <circle
+       style="vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.447471;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
+       id="circle14173"
+       cx="293.11093"
+       cy="127.55736"
+       r="16.102989" />
+    <circle
+       r="14.633007"
+       cy="127.55736"
+       cx="293.11093"
+       id="circle14175"
+       style="vector-effect:none;fill:#ffcb1e;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.406622;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" />
+    <g
+       id="g5001"
+       style="font-variation-settings:normal;opacity:0.453;vector-effect:none;fill:#000000;fill-opacity:1;stroke:#000000;stroke-width:0.75015;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;filter:url(#filter14079);stop-color:#000000"
+       transform="translate(175.76426,108.19035)">
+      <path
+         style="color:#000000;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:36.9821px;line-height:1.25;font-family:'Arial monospaced for SAP';-inkscape-font-specification:'Arial monospaced for SAP Bold';font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-variant-east-asian:normal;font-feature-settings:normal;font-variation-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:0px;word-spacing:0px;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;shape-margin:0;inline-size:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:0.75015;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;filter:url(#filter14079);color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate;stop-color:#000000;stop-opacity:1"
+         d="m 105.9775,15.046109 c 0.18335,-2.276513 1.10006,-4.114322 2.75014,-5.5134286 1.65009,-1.3991066 3.81353,-2.0986599 6.49032,-2.0986599 2.76237,0 5.00526,0.7054817 6.72867,2.1164452 1.72342,1.3991073 2.58513,3.1598473 2.58513,5.2822203 0,1.339823 -0.35446,2.531435 -1.06339,3.574836 -0.70892,1.031545 -1.9312,2.146087 -3.66685,3.343628 -0.99005,0.687696 -1.69897,1.351679 -2.12677,1.991948 -0.4278,0.628413 -0.6417,1.493962 -0.6417,2.596648 0,0.29642 0.0122,0.527628 0.0366,0.693624 h -4.32688 c 0,-1.991948 0.23835,-3.52148 0.71504,-4.588595 0.47669,-1.067115 1.52785,-2.146088 3.15349,-3.236917 1.01449,-0.675839 1.7723,-1.333894 2.27344,-1.974163 0.51336,-0.640269 0.77004,-1.49989 0.77004,-2.578861 0,-1.078972 -0.3728,-1.932664 -1.11839,-2.561077 -0.74559,-0.628413 -1.78453,-0.942619 -3.11682,-0.942619 -1.38118,0 -2.49346,0.385347 -3.33684,1.156042 -0.84337,0.770695 -1.3384,1.879309 -1.48507,3.325842 z m 11.09215,18.727872 h -4.32681 v -4.517454 h 4.32681 z"
+         id="path4996" />
+      <path
+         style="color:#000000;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:36.9821px;line-height:1.25;font-family:'Arial monospaced for SAP';-inkscape-font-specification:'Arial monospaced for SAP Bold';font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-variant-east-asian:normal;font-feature-settings:normal;font-variation-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:0px;word-spacing:0px;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;shape-margin:0;inline-size:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:0.75015;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;filter:url(#filter14079);color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate;stop-color:#000000;stop-opacity:1"
+         d="m 115.21875,5.6347656 c -2.99016,0 -5.63829,0.8160218 -7.6543,2.5253906 -1.99204,1.6890584 -3.16326,4.0646938 -3.3789,6.7421878 l -0.13867,1.710937 1.70312,0.216797 6.45117,0.820313 0.18555,-1.835938 c 0.115,-1.134189 0.44212,-1.751981 0.91016,-2.179687 0.48244,-0.440863 1.07641,-0.685547 2.12304,-0.685547 1.0495,0 1.61512,0.231349 1.95703,0.519531 0.35772,0.301497 0.47852,0.520878 0.47852,1.185547 0,0.793715 -0.16814,1.199514 -0.37305,1.455078 l -0.008,0.0078 -0.006,0.0078 c -0.32738,0.418271 -0.94536,0.979638 -1.85547,1.585938 l -0.002,0.002 -0.002,0.002 c -1.78344,1.196713 -3.09882,2.442169 -3.79297,3.996094 -0.64668,1.447639 -0.87305,3.186838 -0.87305,5.322265 v 0.423828 1.375 6.740235 h 7.92578 v -1.798828 l -0.0195,-7.001954 -0.0176,-0.43164 c -0.0354,-0.869364 0.17768,-1.360114 0.33008,-1.583985 l 0.004,-0.0078 0.004,-0.0059 c 0.23625,-0.353582 0.78216,-0.904263 1.65235,-1.509766 l 0.006,-0.0039 c 1.83696,-1.26803 3.21112,-2.475794 4.12305,-3.802735 l 0.002,-0.0039 0.004,-0.0039 c 0.91006,-1.339417 1.37305,-2.928886 1.37305,-4.585938 0,-2.608285 -1.18049,-4.9962729 -3.24805,-6.675781 -2.08742,-1.7074663 -4.79633,-2.5214844 -7.86328,-2.5214844 z"
+         id="path4999"
+         sodipodi:nodetypes="sscccccssssccccccssccccccsccscccccscss" />
+    </g>
+    <g
+       id="g4990"
+       style="fill:#ffffff"
+       transform="translate(177.8563,105.91291)">
+      <path
+         style="color:#000000;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:36.9821px;line-height:1.25;font-family:'Arial monospaced for SAP';-inkscape-font-specification:'Arial monospaced for SAP Bold';font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-variant-east-asian:normal;font-feature-settings:normal;font-variation-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:0px;word-spacing:0px;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;shape-margin:0;inline-size:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:3.59728;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate;stop-color:#000000;stop-opacity:1"
+         d="m 105.9775,15.046109 c 0.18335,-2.276513 1.10006,-4.114322 2.75014,-5.5134286 1.65009,-1.3991066 3.81353,-2.0986599 6.49032,-2.0986599 2.76237,0 5.00526,0.7054817 6.72867,2.1164452 1.72342,1.3991073 2.58513,3.1598473 2.58513,5.2822203 0,1.339823 -0.35446,2.531435 -1.06339,3.574836 -0.70892,1.031545 -1.9312,2.146087 -3.66685,3.343628 -0.99005,0.687696 -1.69897,1.351679 -2.12677,1.991948 -0.4278,0.628413 -0.6417,1.493962 -0.6417,2.596648 0,0.29642 0.0122,0.527628 0.0366,0.693624 h -4.32688 c 0,-1.991948 0.23835,-3.52148 0.71504,-4.588595 0.47669,-1.067115 1.52785,-2.146088 3.15349,-3.236917 1.01449,-0.675839 1.7723,-1.333894 2.27344,-1.974163 0.51336,-0.640269 0.77004,-1.49989 0.77004,-2.578861 0,-1.078972 -0.3728,-1.932664 -1.11839,-2.561077 -0.74559,-0.628413 -1.78453,-0.942619 -3.11682,-0.942619 -1.38118,0 -2.49346,0.385347 -3.33684,1.156042 -0.84337,0.770695 -1.3384,1.879309 -1.48507,3.325842 z m 11.09215,18.727872 h -4.32681 v -4.517454 h 4.32681 z"
+         id="path4992" />
+      <path
+         style="color:#000000;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:36.9821px;line-height:1.25;font-family:'Arial monospaced for SAP';-inkscape-font-specification:'Arial monospaced for SAP Bold';font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-variant-east-asian:normal;font-feature-settings:normal;font-variation-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:0px;word-spacing:0px;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;shape-margin:0;inline-size:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:3.59728;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate;stop-color:#000000;stop-opacity:1"
+         d="m 115.21875,5.6347656 c -2.99016,0 -5.63829,0.8160218 -7.6543,2.5253906 -1.99204,1.6890584 -3.16326,4.0646938 -3.3789,6.7421878 l -0.13867,1.710937 1.70312,0.216797 6.45117,0.820313 0.18555,-1.835938 c 0.115,-1.134189 0.44212,-1.751981 0.91016,-2.179687 0.48244,-0.440863 1.07641,-0.685547 2.12304,-0.685547 1.0495,0 1.61512,0.231349 1.95703,0.519531 0.35772,0.301497 0.47852,0.520878 0.47852,1.185547 0,0.793715 -0.16814,1.199514 -0.37305,1.455078 l -0.008,0.0078 -0.006,0.0078 c -0.32738,0.418271 -0.94536,0.979638 -1.85547,1.585938 l -0.002,0.002 -0.002,0.002 c -1.78344,1.196713 -3.09882,2.442169 -3.79297,3.996094 -0.64668,1.447639 -0.87305,3.186838 -0.87305,5.322265 v 0.423828 1.375 6.740235 h 7.92578 v -1.798828 l -0.0195,-7.001954 -0.0176,-0.43164 c -0.0354,-0.869364 0.17768,-1.360114 0.33008,-1.583985 l 0.004,-0.0078 0.004,-0.0059 c 0.23625,-0.353582 0.78216,-0.904263 1.65235,-1.509766 l 0.006,-0.0039 c 1.83696,-1.26803 3.21112,-2.475794 4.12305,-3.802735 l 0.002,-0.0039 0.004,-0.0039 c 0.91006,-1.339417 1.37305,-2.928886 1.37305,-4.585938 0,-2.608285 -1.18049,-4.9962729 -3.24805,-6.675781 -2.08742,-1.7074663 -4.79633,-2.5214844 -7.86328,-2.5214844 z"
+         id="path4994"
+         sodipodi:nodetypes="sscccccssssccccccssccccccsccscccccscss" />
+    </g>
+    <path
+       d="m 283.8338,120.95901 q 0.27502,-3.41477 2.75014,-5.51343 2.47513,-2.09865 6.49032,-2.09865 4.14355,0 6.72867,2.11644 2.58513,2.09866 2.58513,5.28222 0,2.00973 -1.06339,3.57484 -1.06338,1.54731 -3.66685,3.34362 -1.48507,1.03155 -2.12677,1.99195 -0.6417,0.94262 -0.6417,2.59665 0,0.44463 0.0366,0.69362 h -4.32688 q 0,-2.98792 0.71504,-4.58859 0.71503,-1.60067 3.15349,-3.23692 1.52173,-1.01376 2.27344,-1.97416 0.77004,-0.9604 0.77004,-2.57886 0,-1.61846 -1.11839,-2.56108 -1.11839,-0.94262 -3.11682,-0.94262 -2.07177,0 -3.33684,1.15604 -1.26506,1.15605 -1.48507,3.32585 z m 11.42224,18.72788 h -4.6569 v -4.51746 h 4.6569 z"
+       style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:36.9821px;line-height:1.25;font-family:'Arial monospaced for SAP';-inkscape-font-specification:'Arial monospaced for SAP Bold';letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.924552"
+       id="path4984" />
+  </g>
+</svg>
diff --git a/scripts/LaTeXutils.py b/scripts/LaTeXutils.py
new file mode 100644
index 0000000000000000000000000000000000000000..1715461ad0e0275a54b9a08bba09d97a72f854a8
--- /dev/null
+++ b/scripts/LaTeXutils.py
@@ -0,0 +1,266 @@
+# Copyright 2022. The MBI project. All rights reserved.
+# This program is free software; you can redistribute it and/or modify it under the terms of the license (GNU GPL).
+
+from MBIutils import *
+
+possible_features=['P2P!basic', 'P2P!nonblocking', 'P2P!persistent', 'COLL!basic', 'COLL!nonblocking', 'COLL!persistent', 'COLL!tools', 'RMA']
+possible_characterization=["Lacking", "Yes"]
+
+feat_to_color = {'P2P!basic':'viridis0', 'P2P!nonblocking':'viridis1', 'P2P!persistent':'viridis3',
+    'RMA':'viridis10',
+    "COLL!basic":'viridis15', "COLL!nonblocking":'viridis16', "COLL!tools":'viridis17'}
+feat_to_bgcolor = {'P2P!basic':'white', 'P2P!nonblocking':'white', 'P2P!persistent':'white',
+    'RMA':'black',
+    "COLL!basic":'black', "COLL!nonblocking":'black', "COLL!tools":'black'}
+
+def parse_file_features(file):
+    """Takes a filename and returns a tuple (correct, lacking) of lists of features"""
+    correct = []
+    lacking = []
+    with open(file, 'r') as f:
+        line = f.readline()
+
+        # Search for the feature block
+        while line != 'BEGIN_MPI_FEATURES\n':
+            if line == '':
+                raise Exception("Impossible to find the feature block in {}".format(file))
+            line = f.readline()
+
+        while line != 'END_MPI_FEATURES\n':
+            if line == '':
+                raise Exception("Impossible to find the end of the feature block in {}".format(file))
+
+            line = line.strip()
+            matching = re.match("^ *([!a-zA-Z0-9]*): ([a-zA-Z0-9]*)$", line)
+            if matching is not None:
+                (feat, chara) = (matching.group(1), matching.group(2))
+                if feat not in possible_features:
+                    raise Exception("ERROR: file {} contains an unknown feature: '{}'".format(file, feat))
+                if chara not in possible_characterization:
+                    raise Exception("ERROR: file {} have feature {} with unknown characterization: '{}'".format(file, feat, chara))
+                if chara == 'Yes':
+                    correct.append(feat)
+                elif chara == 'Lacking':
+                    lacking.append(feat)
+                else:
+                    raise Exception("Impossible")
+            line = f.readline()
+    if len(correct) > 4:
+        raise Exception(f"ERROR: file {file} has more than one 4 features: {correct}")
+    return (correct, lacking)
+
+def parse_file_expected(file):
+    """Takes a file name, and returns the list of Expect headers (there may be more than one per file)"""
+    res  = list(filter(lambda line: line.startswith("  | ERROR: "), open(file, 'r').readlines()))
+    res += list(filter(lambda line: line.startswith("  | OK"), open(file, 'r').readlines()))
+    if len(res)==0:
+        raise Exception("No 'ERROR' nor 'OK' header in {}".format(file))
+    res = list(map(lambda line: re.sub("[| ]*ERROR: ", "", line.strip()), res))
+    res = list(map(lambda line: re.sub("[| ]*OK *", "OK", line), res))
+    for expected in res:
+        if expected not in possible_details:
+            raise Exception("Unexpected expectation header in {}: '{}'".format(file, expected))
+    res = list(map(lambda line: possible_details[line], res))
+    return res
+
+def get_C_files_from_dir(dir):
+    files = []
+    if dir[-1] != '/': # dir must be ended by a / for later separation between the path and basename
+        dir = "{}/".format(dir)
+    for filename in os.listdir(dir):
+        if filename.endswith(".c"):
+            files.append("{}/{}".format(dir,filename))
+    return files
+def filename_to_binary(file):
+    return re.sub("_", "\\_", re.sub(".*?//", "", re.sub("\.c","", file)))
+
+def parse_files_per_expected(list):
+    """
+    Reads all C files from the list and returns a hash [expected -> list_of_lists_of_files_having_that_feature].
+    list_of_lists_files elements are of type [file, test_number_in_that_file]
+    """
+    result = {}
+    for expected in possible_details:
+        result[ possible_details[expected] ] = []
+    for file in list:
+        test = 0
+        for expected in parse_file_expected(file):
+            result[expected].append([file, test])
+            test += 1
+    return result
+
+def generate_errors(files, outfile):
+    files_per_expected = parse_files_per_expected(files)
+    def get_counts(categories):
+        count = {'total':0}
+        for feat in possible_features:
+            count[feat] = 0
+        seen = []
+        for category in categories:
+            for (file,test) in files_per_expected[category]:
+                if not file in seen:
+                    seen.append(file)
+                    (features,  _) = parse_file_features(file)
+                    count['total'] += 1
+                    for feat in features:
+                        count[feat] += 1
+                else:
+                    print(f"Ignore duplicate {file} while counting files per feature.")
+        return count
+    def show_counts(categories):
+        count = get_counts(categories)
+        output.write(f"{count['P2P!basic']}&{count['P2P!nonblocking']}&{count['P2P!persistent']}&")
+        output.write(f"{count['COLL!basic']}&{count['COLL!nonblocking']}&{count['COLL!tools']} & {count['RMA']} & {count['total']} \\\\")
+
+    with open(outfile, 'w') as output:
+        output.write('\\begin{tabular}{|l|l|c|c|c| c|c|c |c||c|}\\cline{3-10}\n')
+        output.write('\\multicolumn{2}{c|}{}&\\multicolumn{3}{c|}{Point-to-point}&\\multicolumn{3}{c|}{Collective}&\multirow{6}{*}{RMA}&\multirow{6}{*}{Unique files}\\\\\\cline{3-8}\n')
+        output.write('\\multicolumn{2}{c|}{}&\\R{base calls}&\\R{~nonblocking~}&\R{persistent} & \\R{base calls}&\R{~nonblocking~}& \\R{tools} &&\\\\\\hline\n')
+
+        output.write('\\multirow{1}{*}{{Single call}} &Invalid Parameter & ');   show_counts(['AInvalidParam']); output.write(' \\hline')
+
+        output.write('\\multirow{3}{*}{{Single process}}&Resource Leak    & ');  show_counts(['BResLeak'])     ; output.write('\\cline{2-10}\n')
+        output.write( '                                 &Request lifecycle& ');  show_counts(['BReqLifecycle']); output.write('\\cline{2-10}\n')
+        output.write( '                                 &Epoch lifecycle& ');  show_counts(['BEpochLifecycle']); output.write('\\cline{2-10}\n')
+        output.write( '                                 &Local concurrency& ');  show_counts(['BLocalConcurrency']); output.write('\\hline\n')
+
+        output.write('\\multirow{4}{*}{{Multi-processes}}&Parameter matching& ');  show_counts(['CMatch'])        ; output.write('\\cline{2-10}\n')
+        output.write( '                                  &Message Race      & ');  show_counts(['DRace'])        ; output.write('\\cline{2-10}\n')
+        output.write( '                                  &Call ordering     & ');  show_counts(['DMatch'])       ; output.write('\\cline{2-10}\n')
+        output.write( '                                  &Global concurrency& ');  show_counts(['DGlobalConcurrency']); output.write('\\hline\n')
+
+        output.write( '      System & Buffering Hazard    &') ; show_counts(['EBufferingHazard']);output.write('\\hline\n')
+        output.write( '      Data   & Input Hazard    &') ; show_counts(['InputHazard']);output.write('\\hline\\hline\n')
+        output.write('\\multicolumn{2}{|c|}{Correct codes}&') ; show_counts(['FOK']);output.write('\\hline\\hline\n')
+
+        output.write('\\multicolumn{2}{|c|}{\\textbf{Total}}&')
+        show_counts(['AInvalidParam', 'BResLeak','BReqLifecycle','BEpochLifecycle','BLocalConcurrency', 'CMatch', 'DRace','DMatch','DGlobalConcurrency', 'EBufferingHazard', 'InputHazard', 'FOK'])
+        output.write('\\hline\n')
+
+        output.write('\\end{tabular}\n')
+
+
+def generate_labels(files, outfile):
+    files_per_expected = parse_files_per_expected(files)
+
+    # Get the data
+    OK = {'total':0}
+    Error = {'total':0}
+    for feat in possible_features:
+        OK[feat] = 0
+        Error[feat] = 0
+    seen = []
+    for detail in possible_details:
+        category = possible_details[detail]
+        for (file,test) in files_per_expected[category]:
+            if not file in seen:
+                seen.append(file)
+                (features,  _) = parse_file_features(file)
+                if detail == 'OK':
+                    OK['total'] += 1
+                    for feat in features:
+                        OK[feat] += 1
+                else:
+                    Error['total'] += 1
+                    for feat in features:
+                        Error[feat] += 1
+            else:
+                print(f"Ignore duplicate {file} while counting files per label.")
+
+    # Produce the output
+    with open(outfile, 'w') as output:
+        output.write('\\begin{tabular}{|l| l | l | c | c |}\\hline\n')
+        output.write('\\multicolumn{2}{|c|}{ \\textbf{MPI}} & \\multirow{2}{*}{\\textbf{Description}} & \\multicolumn{2}{c|}{\\textbf{Number of codes using the label}} \\\\')
+        output.write('\\multicolumn{2}{|c|}{ \\textbf{Feature Label}} &  & \\# Incorrect codes & \\# Correct codes \\\\ \\hline\n')
+
+        output.write("\\parbox[t]{4mm}{\\multirow{3}{*}{\\R{P2P}}} & base calls & Use of blocking point-to-point communication)")
+        output.write(f" & {Error['P2P!basic']} & {OK['P2P!basic']} \\\\ \n")
+        output.write("& nonblocking & Use of  nonblocking point-to-point communication")
+        output.write(f" & {Error['P2P!nonblocking']} & {OK['P2P!nonblocking']} \\\\ \n")
+#        output.write(f" &  116 &  19 \\\\ \n")
+        output.write("& persistent & Use of point-to-point persistent communications")
+        output.write(f" & {Error['P2P!persistent']} & {OK['P2P!persistent']} \\\\ \\hline \n")
+#        output.write(f" &  45 &  8 \\\\ \\hline \n")
+        output.write("\\parbox[t]{2mm}{\\multirow{3}{*}{\\R{COLL}}} & base calls & Use of blocking collective communication")
+        output.write(f" & {Error['COLL!basic']} & {OK['COLL!basic']} \\\\ \n")
+#        output.write(f" &  312 &  202 \\\\ \n")
+        output.write("& nonblocking & Use of nonblocking collective communication")
+        output.write(f" & {Error['COLL!nonblocking']} & {OK['COLL!nonblocking']} \\\\ \n")
+#        output.write(f" &  129 &   114 \\\\ \n")
+        output.write("& tools & Use of resource function (e.g.,  communicators, datatypes)")
+        output.write(f" & {Error['COLL!tools']} & {OK['COLL!tools']} \\\\ \\hline \n")
+#        output.write(f" &  94 &  23 \\\\ \\hline \n")
+        output.write("\\multicolumn{2}{|c|}{RMA} & Use of Remote Memory Access")
+        output.write(f" & {Error['RMA']} & {OK['RMA']} \\\\ \\hline \n")
+#        output.write(f" &  30 &  3 \\\\ \\hline \n")
+        output.write("\\end{tabular}\n")
+
+
+    # def show_counts(categories):
+    #     count = get_counts(categories)
+    #     output.write(f"{count['P2P!basic']}&{count['P2P!nonblocking']}&{count['P2P!persistent']}&")
+    #     output.write(f"{count['COLL!basic']}&{count['COLL!nonblocking']}&{count['COLL!tools']} & {count['RMA']} & {count['total']} \\\\")
+
+def generate_features(files, outfile):
+    lineheight = 0.4
+    feat_width = 0.7
+    cell_width = feat_width * 3
+    cell_per_line = 10
+    files_per_expected = parse_files_per_expected(files)
+
+    line = 800
+    with open(outfile, 'w') as output:
+        output.write("\\resizebox{\\linewidth}{!}{\\begin{tikzpicture}\n")
+        categories = []
+        for expected in possible_details:
+            if not possible_details[expected] in categories:
+                categories.append(possible_details[expected])
+        for expected in sorted(categories):
+            output.write(f" \\draw({cell_width*cell_per_line/2},{line*lineheight}) node {{\\large{{{displayed_name[expected]}}}}};\n")
+            line -= 1
+            cell = 0 # Position of this file on the line
+            # Draw the boxes
+            initial_line = line
+            for (file,test) in files_per_expected[expected]:
+                (features, _) = parse_file_features(file)
+                file = f'{filename_to_binary(file)}\\#{test}'
+                output.write(f" \\draw ({cell*cell_width-(0.4*feat_width)}, {(line+0.4)*lineheight}) rectangle ({cell*cell_width+(3.45*feat_width)}, {(line-0.4)*lineheight});\n")
+                xpos = 0
+#                for feat in incorrect_feat:
+#                    output.write(f"  \\draw [fill={feat_to_color[feat]}] ({cell*cell_width + xpos-(0.4*feat_width)}, {(line-0.4)*lineheight}) rectangle ({cell*cell_width + xpos + (0.45*feat_width)}, {(line+0.4)*lineheight});\n")
+#                    xpos += feat_width
+                for feat in features:
+                    output.write(f"  \\draw [fill={feat_to_color[feat]}] ({cell*cell_width + xpos-(0.4*feat_width)}, {(line-0.4)*lineheight}) rectangle ({cell*cell_width + xpos + (0.45*feat_width)}, {(line+0.4)*lineheight});\n")
+                    xpos += feat_width
+                if cell+1 == cell_per_line:
+                    cell = 0
+                    line -= 1
+                    if line < 0:
+                        raise Exception("Too much lines. Please increase the initial value of line")
+                else :
+                    cell += 1
+
+            # Put the texts (must come after all boxes for the tooltip to not be hidden behind)
+            cell = 0
+            line = initial_line
+            for (file,test) in files_per_expected[expected]:
+                (features,  _) = parse_file_features(file)
+                file = f'{filename_to_binary(file)}\\#{test}'
+                xpos = 0
+#                for feat in incorrect_feat:
+#                    output.write(f"  \\draw ({cell*cell_width + xpos}, {line*lineheight}) node {{\\scriptsize{{\\tooltip****[{feat_to_bgcolor[feat]}]{{\\sout{{{feat}}}}}{{{file} -- incorrect: {feat}}}}}}};\n")
+#                    xpos += feat_width
+                for feat in features:
+#                    output.write(f"  \\draw ({cell*cell_width + xpos}, {line*lineheight}) node {{\\scriptsize{{\\tooltip****[{feat_to_bgcolor[feat]}]{{{feat}}}{{{file} -- correct: {feat}}}}}}};\n")
+                    output.write(f"  \\draw ({cell*cell_width + xpos}, {line*lineheight}) node {{\\scriptsize{{\\color{{{feat_to_bgcolor[feat]}}}{{{displayed_name[feat]}}}}}}};\n")
+                    xpos += feat_width
+                if cell+1 == cell_per_line:
+                    cell = 0
+                    line -= 1
+                    if line < 0:
+                        raise Exception("Too much lines. Please increase the initial value of line")
+                else :
+                    cell += 1
+            if cell != 0: # we did not output anything on the new line, no need to go further
+                line -= 1
+        output.write("\\end{tikzpicture}}\n")
diff --git a/scripts/MBIutils.py b/scripts/MBIutils.py
new file mode 100644
index 0000000000000000000000000000000000000000..99fbacba55039f253597c6ef4af064ad33623048
--- /dev/null
+++ b/scripts/MBIutils.py
@@ -0,0 +1,502 @@
+# Copyright 2021-2022. The MBI project. All rights reserved.
+# This program is free software; you can redistribute it and/or modify it under the terms of the license (GNU GPL).
+
+import os
+import time
+import subprocess
+import sys
+import re
+import shlex
+import select
+import signal
+import hashlib
+
+class AbstractTool:
+    def ensure_image(self, params="", dockerparams=""):
+        """Verify that this is executed from the right docker image, and complain if not."""
+        if os.path.exists("/MBI") or os.path.exists("trust_the_installation"):
+            print("This seems to be a MBI docker image. Good.")
+        else:
+            print("Please run this script in a MBI docker image. Run these commands:")
+            print("  docker build -f Dockerfile -t mpi-bugs-initiative:latest . # Only the first time")
+            print(f"  docker run -it --rm --name MIB --volume $(pwd):/MBI {dockerparams}mpi-bugs-initiative /MBI/MBI.py {params}")
+            sys.exit(1)
+
+    def set_rootdir(self, rootdir):
+        self.rootdir = rootdir
+
+    def build(self, rootdir, cached=True):
+        """Rebuilds the tool binaries. By default, we try to reuse the existing build."""
+        print ("Nothing to do to rebuild the tool binaries.")
+
+    def setup(self):
+        """
+        Ensure that this tool (previously built) is usable in this environment: setup the PATH, etc.
+        This is called only once for all tests, from the logs directory.
+
+        It is called for each test so that it setups the environment variables (tests are ran in separate processes),
+        and should setup the filesystem (installing software etc) only once if possible.
+        """
+        # pass
+
+    def run(self, execcmd, filename, binary, num_id, timeout):
+        """Compile that test code and anaylse it with the Tool if needed (a cache system should be used)"""
+        # pass
+
+    def run_cmd(self, buildcmd, execcmd, cachefile, filename, binary, timeout, batchinfo, cwd=None, read_line_lambda=None):
+        """
+        Runs the test on need. Returns True if the test was ran, and False if it was cached. This method SHOULD NOT be overloaded, change things in run()
+
+        The result is cached if possible, and the test is rerun only if the `test.txt` (containing the tool output) or the `test.elapsed` (containing the timing info) do not exist, or if `test.md5sum` (containing the md5sum of the code to compile) does not match.
+
+        Parameters:
+         - buildcmd and execcmd are shell commands to run. buildcmd can be any shell line (incuding && groups), but execcmd must be a single binary to run.
+         - cachefile is the name of the test
+         - filename is the source file containing the code
+         - binary the file name in which to compile the code
+         - batchinfo: something like "1/1" to say that this run is the only batch (see -b parameter of MBI.py)
+         - cwd: directory in which the command must be run (or None if it's not to be changed from current working directory)
+         - read_line_lambda: a lambda to which each line of the tool output is feed ASAP. It allows MUST to interrupt the execution when a deadlock is reported.
+        """
+
+        if os.path.exists(f'{cachefile}.txt') and os.path.exists(f'{cachefile}.elapsed') and os.path.exists(f'{cachefile}.md5sum'):
+            hash_md5 = hashlib.md5()
+            with open(filename, 'rb') as sourcefile :
+                for chunk in iter(lambda: sourcefile.read(4096), b""):
+                    hash_md5.update(chunk)
+            newdigest = hash_md5.hexdigest()
+            with open(f'{cachefile}.md5sum', 'r') as md5file:
+                olddigest = md5file.read()
+            #print(f'Old digest: {olddigest}; New digest: {newdigest}')
+            if olddigest == newdigest:
+                print(f" (result cached -- digest: {olddigest})")
+                return False
+            os.remove(f'{cachefile}.txt')
+
+        self.setup()
+
+        print(f"Wait up to {timeout} seconds")
+
+        start_time = time.time()
+        if buildcmd is None:
+            output = f"No need to compile {binary}.c (batchinfo:{batchinfo})\n\n"
+        else:
+            output = f"Compiling {binary}.c (batchinfo:{batchinfo})\n\n"
+            output += f"$ {buildcmd}\n"
+
+            compil = subprocess.run(buildcmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+            if compil.stdout is not None:
+                output += str(compil.stdout, errors='replace')
+            if compil.returncode != 0:
+                output += f"Compilation of {binary}.c raised an error (retcode: {compil.returncode})"
+                for line in (output.split('\n')):
+                    print(f"| {line}", file=sys.stderr)
+                with open(f'{cachefile}.elapsed', 'w') as outfile:
+                    outfile.write(str(time.time() - start_time))
+                with open(f'{cachefile}.txt', 'w') as outfile:
+                    outfile.write(output)
+                return True
+
+        output += f"\n\nExecuting the command (cwd: {cwd})\n $ {execcmd}\n"
+        for line in (output.split('\n')):
+            print(f"| {line}", file=sys.stderr)
+
+        # We run the subprocess and parse its output line by line, so that we can kill it as soon as it detects a timeout
+        process = subprocess.Popen(shlex.split(execcmd), cwd=cwd, stdout=subprocess.PIPE,
+                                   stderr=subprocess.STDOUT, preexec_fn=os.setsid)
+        poll_obj = select.poll()
+        poll_obj.register(process.stdout, select.POLLIN)
+
+        pid = process.pid
+        pgid = os.getpgid(pid)  # We need that to forcefully kill subprocesses when leaving
+        outcome = None
+        while True:
+            if poll_obj.poll(5):  # Something to read? Do check the timeout status every 5 sec if not
+                line = process.stdout.readline()
+                # From byte array to string, replacing non-representable strings with question marks
+                line = str(line, errors='replace')
+                output = output + line
+                print(f"| {line}", end='', file=sys.stderr)
+                if read_line_lambda != None:
+                    read_line_lambda(line, process)
+            if time.time() - start_time > timeout:
+                outcome = 'timeout'
+                with open(f'{cachefile}.timeout', 'w') as outfile:
+                    outfile.write(f'{time.time() - start_time} seconds')
+                break
+            if process.poll() is not None:  # The subprocess ended. Grab all existing output, and return
+                line = 'more'
+                while line != None and line != '':
+                    line = process.stdout.readline()
+                    if line is not None:
+                        # From byte array to string, replacing non-representable strings with question marks
+                        line = str(line, errors='replace')
+                        output = output + line
+                        print(f"| {line}", end='', file=sys.stderr)
+
+                break
+
+        # We want to clean all forked processes in all cases, no matter whether they are still running (timeout) or supposed to be off. The runners easily get clogged with zombies :(
+        try:
+            os.killpg(pgid, signal.SIGTERM)  # Terminate all forked processes, to make sure it's clean whatever the tool does
+            process.terminate()  # No op if it's already stopped but useful on timeouts
+            time.sleep(0.2)  # allow some time for the tool to finish its childs
+            os.killpg(pgid, signal.SIGKILL)  # Finish 'em all, manually
+            os.kill(pid, signal.SIGKILL)  # die! die! die!
+        except ProcessLookupError:
+            pass  # OK, it's gone now
+
+        elapsed = time.time() - start_time
+
+        rc = process.poll()
+        if rc < 0:
+            status = f"Command killed by signal {-rc}, elapsed time: {elapsed}\n"
+        else:
+            status = f"Command return code: {rc}, elapsed time: {elapsed}\n"
+        print(status)
+        output += status
+
+        with open(f'{cachefile}.elapsed', 'w') as outfile:
+            outfile.write(str(elapsed))
+
+        with open(f'{cachefile}.txt', 'w') as outfile:
+            outfile.write(output)
+        with open(f'{cachefile}.md5sum', 'w') as outfile:
+            hashed = hashlib.md5()
+            with open(filename, 'rb') as sourcefile :
+                for chunk in iter(lambda: sourcefile.read(4096), b""):
+                    hashed.update(chunk)
+            outfile.write(hashed.hexdigest())
+
+        return True
+
+    def teardown(self):
+        """
+        Clean the results of all test runs: remove temp files and binaries.
+        This is called only once for all tests, from the logs directory.
+        """
+        # pass
+
+    def parse(self, cachefile):
+        """Read the result of a previous run from the cache, and compute the test outcome"""
+        return 'failure'
+
+    def is_correct_diagnostic(self, test_id, res_category, expected, detail):
+        """
+        Return True if the tool diagnostic correspond to the expected
+        error details.
+        """
+        return True
+
+# Associate all possible detailed outcome to a given error scope. Scopes must be sorted alphabetically.
+possible_details = {
+    # scope limited to one call
+    'InvalidBuffer':'AInvalidParam', 'InvalidCommunicator':'AInvalidParam', 'InvalidDatatype':'AInvalidParam', 'InvalidRoot':'AInvalidParam', 'InvalidTag':'AInvalidParam', 'InvalidWindow':'AInvalidParam', 'InvalidOperator':'AInvalidParam', 'InvalidOtherArg':'AInvalidParam', 'ActualDatatype':'AInvalidParam',
+    'InvalidSrcDest':'AInvalidParam',
+    # scope: Process-wide
+#    'OutOfInitFini':'BInitFini',
+    'CommunicatorLeak':'BResLeak', 'DatatypeLeak':'BResLeak', 'GroupLeak':'BResLeak', 'OperatorLeak':'BResLeak', 'TypeLeak':'BResLeak', 'RequestLeak':'BResLeak',
+    'MissingStart':'BReqLifecycle', 'MissingWait':'BReqLifecycle',
+    'MissingEpoch':'BEpochLifecycle','DoubleEpoch':'BEpochLifecycle',
+    'LocalConcurrency':'BLocalConcurrency',
+    # scope: communicator
+    'CallMatching':'DMatch',
+    'CommunicatorMatching':'CMatch', 'DatatypeMatching':'CMatch', 'OperatorMatching':'CMatch', 'RootMatching':'CMatch', 'TagMatching':'CMatch',
+    'MessageRace':'DRace',
+
+    'GlobalConcurrency':'DGlobalConcurrency',
+    # larger scope
+    'BufferingHazard':'EBufferingHazard',
+    # Input Hazard
+    'IHCallMatching':'InputHazard',
+    'OK':'FOK'}
+
+error_scope = {
+    'AInvalidParam':'single call',
+    'BResLeak':'single process',
+#    'BInitFini':'single process',
+    'BReqLifecycle':'single process',
+    'BEpochLifecycle':'single process',
+    'BLocalConcurrency':'single process',
+    'CMatch':'multi-processes',
+    'DRace':'multi-processes',
+    'DMatch':'multi-processes',
+    'DGlobalConcurrency':'multi-processes',
+    'EBufferingHazard':'system',
+    'InputHazard':'user input',
+    'FOK':'correct executions'
+}
+
+displayed_name = {
+    'AInvalidParam':'Invalid parameter',
+    'BResLeak':'Resource leak',
+#    'BInitFini':'MPI call before initialization/after finalization',
+    'BReqLifecycle':'Request lifecycle',
+    'BEpochLifecycle':'Epoch lifecycle',
+    'BLocalConcurrency':'Local concurrency',
+    'CMatch':'Parameter matching',
+    'DMatch':"Call ordering",
+    'DRace':'Message race',
+    'DGlobalConcurrency':'Global concurrency',
+    'EBufferingHazard':'Buffering hazard',
+    'InputHazard':'Input Hazard',
+    'FOK':"Correct execution",
+
+    'aislinn':'Aislinn','civl':'CIVL','hermes':'Hermes', 'isp':'ISP','itac':'ITAC', 'simgrid':'Mc SimGrid', 'smpi':'SMPI','smpivg':'SMPI+VG', 'mpisv':'MPI-SV', 'must':'MUST', 'parcoach':'PARCOACH', 'mpi-checker':'MPI-Checker',
+    'simgrid-3.27':'Mc SimGrid v3.27',
+    'simgrid-3.28':'Mc SimGrid v3.28',
+    'simgrid-3.29':'Mc SimGrid v3.29',
+    'simgrid-3.30':'Mc SimGrid v3.30',
+    'simgrid-3.31':'Mc SimGrid v3.31',
+
+    "STP":"Systematic True Positive",
+    "CTP":"Can be True Positive",
+    "SFN":"Systematic False Negative",
+    "SFP":"Systematic False Positive",
+    "CFP":"Can be False Positive",
+    "STN":"Systematic True Negative",
+    "SE":"Systematic Error",
+    "CE":"Compilation Error",
+    "RE":"Runtime Error",
+    "TO":"Timeout",
+    "O":"Other",
+
+    "OK":"Correct diagnostic",
+    "COK":"Can be correct diagnostic",
+    "NOK":"Uncorrect diagnostic",
+    "SE":"Errors",
+
+    'P2P!basic':'P2P', 'P2P!nonblocking':'iP2P', 'P2P!persistent':'pP2P',
+    'COLL!basic':'Coll', 'COLL!nonblocking':'iColl', 'COLL!tools':'Coll+',
+    'RMA':'RMA',
+}
+
+def parse_one_code(filename):
+    """
+    Reads the header of the provided filename, and extract a list of todo item, each of them being a (cmd, expect, test_num) tupple.
+    The test_num is useful to build a log file containing both the binary and the test_num, when there is more than one test in the same binary.
+    """
+    res = []
+    test_num = 0
+    with open(filename, "r") as input_file:
+        state = 0  # 0: before header; 1: in header; 2; after header
+        line_num = 1
+        for line in input_file:
+            if re.match(".*BEGIN_MBI_TESTS.*", line):
+                if state == 0:
+                    state = 1
+                else:
+                    raise ValueError(f"MBI_TESTS header appears a second time at line {line_num}: \n{line}")
+            elif re.match(".*END_MBI_TESTS.*", line):
+                if state == 1:
+                    state = 2
+                else:
+                    raise ValueError(f"Unexpected end of MBI_TESTS header at line {line_num}: \n{line}")
+            if state == 1 and re.match("\s+\$ ?.*", line):
+                m = re.match('\s+\$ ?(.*)', line)
+                cmd = m.group(1)
+                nextline = next(input_file)
+                detail = 'OK'
+                if re.match('[ |]*OK *', nextline):
+                    expect = 'OK'
+                else:
+                    m = re.match('[ |]*ERROR: *(.*)', nextline)
+                    if not m:
+                        raise ValueError(
+                            f"\n{filename}:{line_num}: MBI parse error: Test not followed by a proper 'ERROR' line:\n{line}{nextline}")
+                    expect = 'ERROR'
+                    detail = m.group(1)
+                    if detail not in possible_details:
+                        raise ValueError(
+                            f"\n{filename}:{line_num}: MBI parse error: Detailled outcome {detail} is not one of the allowed ones.")
+
+                if possible_details[detail] in ['BLocalConcurrency', 'DRace', 'DGlobalConcurrency']:
+                    #for i in [0,1,2,3,4]: # to uncomment for multiple tests
+                    test = {'filename': filename, 'id': test_num, 'cmd': cmd, 'expect': expect, 'detail': detail}
+                    if test_num < 1: # to comment for multiple tests
+                        res.append(test.copy())
+                        test_num += 1
+                        #print(f'{filename} has {test_num} tests')
+                else:
+                   test = {'filename': filename, 'id': test_num, 'cmd': cmd, 'expect': expect, 'detail': detail}
+                   if test_num < 1: # to comment for multiple tests
+                       res.append(test.copy())
+                       test_num += 1
+                       #print(f'{filename} has {test_num} tests')
+
+                line_num += 1
+
+    if state == 0:
+        raise ValueError(f"MBI_TESTS header not found in file '{filename}'.")
+    if state == 1:
+        raise ValueError(f"MBI_TESTS header not properly ended in file '{filename}'.")
+
+    if len(res) == 0:
+        raise ValueError(f"No test found in {filename}. Please fix it.")
+    return res
+
+cache_categorize = {}
+
+def categorize(tool, toolname, test_id, expected, autoclean=False):
+    cache_id = f'{toolname}_{test_id}'
+    if cache_id in cache_categorize:
+        return cache_categorize[cache_id]
+
+    outcome = tool.parse(test_id)
+
+    if not os.path.exists(f'{test_id}.elapsed') and not os.path.exists(f'logs/{toolname}/{test_id}.elapsed'):
+        if outcome == 'failure':
+            elapsed = 0
+        else:
+            raise ValueError(f"Invalid test result: {test_id}.txt exists but not {test_id}.elapsed")
+    else:
+        with open(f'{test_id}.elapsed' if os.path.exists(f'{test_id}.elapsed') else f'logs/{toolname}/{test_id}.elapsed', 'r') as infile:
+            elapsed = infile.read()
+
+    # Properly categorize this run
+    if outcome == 'timeout':
+        res_category = 'timeout'
+        if elapsed is None:
+            diagnostic = f'hard timeout'
+        else:
+            diagnostic = f'timeout after {elapsed} sec'
+    elif outcome == 'failure' or outcome == 'segfault':
+        res_category = 'failure'
+        diagnostic = f'tool error, or test not run'
+        if autoclean:
+            if os.path.exists(f'{test_id}.md5sum'):
+                os.unlink(f'{test_id}.md5sum')
+            elif os.path.exists(f'logs/{toolname}/{test_id}.md5sum'):
+                os.unlink(f'logs/{toolname}/{test_id}.md5sum')
+    elif outcome == 'UNIMPLEMENTED':
+        res_category = 'unimplemented'
+        diagnostic = f'coverage issue'
+    elif outcome == 'other':
+        res_category = 'other'
+        diagnostic = f'inconclusive run'
+        if autoclean:
+            if os.path.exists(f'{test_id}.md5sum'):
+                os.unlink(f'{test_id}.md5sum')
+            elif os.path.exists(f'logs/{toolname}/{test_id}.md5sum'):
+                os.unlink(f'logs/{toolname}/{test_id}.md5sum')
+    elif expected == 'OK':
+        if outcome == 'OK':
+            res_category = 'TRUE_NEG'
+            diagnostic = f'correctly reported no error'
+        else:
+            res_category = 'FALSE_POS'
+            diagnostic = f'reported an error in a correct code'
+    elif expected == 'ERROR':
+        if outcome == 'OK':
+            res_category = 'FALSE_NEG'
+            diagnostic = f'failed to detect an error'
+        else:
+            res_category = 'TRUE_POS'
+            diagnostic =  f'correctly detected an error'
+    else:
+        raise ValueError(f"Unexpected expectation: {expected} (must be OK or ERROR)")
+
+    cache_categorize[cache_id] = (res_category, elapsed, diagnostic, outcome)
+
+    return (res_category, elapsed, diagnostic, outcome)
+
+# Extended categorization
+
+def categorize_extended(results, expected, detail):
+    res = 'O'
+
+    result = {"result": res, 'expected': expected, 'detail': detail,
+              "FALSE_POS":0, "TRUE_NEG":0, "TRUE_POS":0, "FALSE_NEG":0,
+              "unimplemented":0, "timeout":0, "failure":0, "other":0}
+
+    for res in results:
+        result[res] += 1
+
+    TN = result['TRUE_NEG']
+    TP = result['TRUE_POS']
+    FN = result['FALSE_NEG']
+    FP = result['FALSE_POS']
+    CE = result['unimplemented']
+    TO = result['timeout']
+    RE = result['failure']
+    O  = result['other']
+
+    if expected == 'OK':
+        if TN != 0 and FP+O+CE+TO+RE == 0: # All execution is True neg
+            result['result'] = 'STN'
+        elif FP != 0 and TN+O+CE+TO+RE == 0: # All exection is False pos
+            result['result'] = 'SFP'
+        elif O+CE+TO+RE != 0 and TN+FP == 0: # All exection is other
+            if CE != 0 and O+TO+RE == 0:
+                result['result'] = 'CE'
+            elif TO != 0 and O+CE+RE == 0:
+                result['result'] = 'TO'
+            elif RE != 0 and O+TO+CE == 0:
+                result['result'] = 'RE'
+            else:
+                result['result'] = 'SE'
+        else:                   # Some exection is False pos
+            result['result'] = 'CFP'
+    else:
+        if TP != 0 and FN+O+CE+TO+RE == 0: # All execution is True pos
+            result['result'] = 'STP'
+        elif FN != 0 and TP+O+CE+TO+RE == 0: # All exection is False neg
+            result['result'] = 'SFN'
+        elif O+CE+TO+RE != 0 and TP+FN == 0: # All exection is other
+            if CE != 0 and O+TO+RE == 0:
+                result['result'] = 'CE'
+            elif TO != 0 and O+CE+RE == 0:
+                result['result'] = 'TO'
+            elif RE != 0 and O+TO+CE == 0:
+                result['result'] = 'RE'
+            else:
+                result['result'] = 'SE'
+        else:                   # Some exection is True pos
+            result['result'] = 'CTP'
+
+    return result
+
+def categorize_all_files(tool, toolname, tests, check_diagnostic=True):
+    res = {}
+    results = {}
+    # elapsed = {}
+    expected = {}
+    detail = {}
+
+    bad_diagnostic = 0
+
+    for test in tests:
+        if test['filename'] not in results:
+            results[test['filename']] = []
+            # elapsed[test['filename']] = []
+            expected[test['filename']] = []
+            detail[test['filename']] = []
+
+    for test in tests:
+        binary=re.sub('\.c', '', os.path.basename(test['filename']))
+        ID=test['id']
+        test_id = f"{binary}_{ID}"
+
+        (res_category, elapsed, diagnostic, outcome) = categorize(tool=tool, toolname=toolname, test_id=test_id, expected=test['expect'], autoclean=False)
+
+        # if check_diagnostic:
+        #     if not tool.is_correct_diagnostic(test_id, res_category, test['expect'], test['detail']):
+        #         bad_diagnostic += 1
+        #         res_category = 'FALSE_NEG'
+
+        results[test['filename']].append(res_category)
+        # elapsed[test['filename']].append(str(elapsed))
+        expected[test['filename']].append(test['expect'])
+        detail[test['filename']].append(test['detail'])
+
+    print(f'{displayed_name[toolname]} : {bad_diagnostic} bad diagnostic(s)')
+
+    # Check data consistence
+    for f in expected:
+        if len(set(expected[f])) != 1:
+            raise Exception(f"Inconsistence expected results for file '{f}': {expected[f]}")
+
+    for f in results:
+        res[f] = categorize_extended(results[f], expected[f][0], detail[f][0])
+
+    return res
diff --git a/scripts/deprecated/CollDataGenerator.py b/scripts/deprecated/CollDataGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..f531739443f8daa28168acecb9223343279cab16
--- /dev/null
+++ b/scripts/deprecated/CollDataGenerator.py
@@ -0,0 +1,99 @@
+#! /usr/bin/python3
+import sys
+from generator_utils import *
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: @{collfeature}@
+  COLL!nonblocking: @{icollfeature}@
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define buff_size 128
+
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int root = 0;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Op op = MPI_SUM;
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+  @{change_type}@
+
+  int dbs = sizeof(int)*nprocs; /* Size of the dynamic buffers for alltoall and friends */
+  @{init}@
+  @{start}@
+  @{operation}@ /* MBIERROR */
+  @{fini}@
+  @{free}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for c in coll + icoll + ibarrier:
+    patterns = {}
+    patterns = {'c': c}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {sys.argv[0]}. DO NOT EDIT.'
+    patterns['collfeature'] = 'Yes' if c in coll else 'Lacking'
+    patterns['icollfeature'] = 'Yes' if c in icoll + ibarrier else 'Lacking'
+    patterns['c'] = c
+    patterns['init'] = init[c]("1")
+    patterns['start'] = start[c]("1")
+    patterns['fini'] = fini[c]("1")
+    patterns['operation'] = operation[c]("1")
+    patterns['free'] = free[c]("1")
+
+    # Generate the incorrect matching
+    replace = patterns
+    replace['shortdesc'] = 'Collective @{c}@ with a datatype mismatch'
+    replace['longdesc'] = f'Odd ranks use MPI_INT as the datatype while even ranks use MPI_FLOAT'
+    replace['outcome'] = 'ERROR: DatatypeMatching'
+    replace['errormsg'] = 'Collective datatype mistmatch. @{c}@ at @{filename}@:@{line:MBIERROR}@ has MPI_INT or MPI_FLOAT as a datatype.'
+    replace['change_type'] = 'if (rank % 2)\n    type = MPI_FLOAT;'
+    make_file(template, f'CollDataMatching_{c}_nok.c', replace)
+
+    # Generate the call with null type
+    replace = patterns
+    replace['shortdesc'] = 'Collective @{c}@ with an invalid datatype '
+    replace['longdesc'] = 'Collective @{c}@ with an invalid datatype '
+    replace['outcome'] = 'ERROR: InvalidDatatype'
+    replace['errormsg'] = 'Invalid Datatype. @{c}@ at @{filename}@:@{line:MBIERROR}@ has an invalid datatype.'
+    replace['change_type'] = 'type=MPI_DATATYPE_NULL;'
+    make_file(template, f'CollDataNull_{c}_nok.c', replace)
diff --git a/scripts/deprecated/CollOpGenerator.py b/scripts/deprecated/CollOpGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..316b81116b0905e1bb35a70a70e5b34fde41e188
--- /dev/null
+++ b/scripts/deprecated/CollOpGenerator.py
@@ -0,0 +1,93 @@
+#! /usr/bin/python3
+import sys
+from generator_utils import *
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: @{collfeature}@
+  COLL!nonblocking: @{icollfeature}@
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+
+#define buff_size 128
+
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int root = 0;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+  MPI_Op op = MPI_SUM;
+  @{change_op}@
+
+  @{init}@
+  @{operation}@ /* MBIERROR */
+  @{fini}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for c in coll4op + icoll4op:
+    patterns = {}
+    patterns = {'c': c}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {sys.argv[0]}. DO NOT EDIT.'
+    patterns['collfeature'] = 'Yes' if c in  coll4op else 'Lacking'
+    patterns['icollfeature'] = 'Yes' if c in icoll4op else 'Lacking'
+    patterns['c'] = c
+    patterns['init'] = init[c]("1")
+    patterns['fini'] = fini[c]("1")
+    patterns['operation'] = operation[c]("1")
+
+    # Generate the incorrect matching
+    replace = patterns
+    replace['shortdesc'] = 'Collective @{c}@ with an operator  mismatch'
+    replace['longdesc'] = f'Odd ranks use MPI_SUM as the operator while even ranks use MPI_MAX'
+    replace['outcome'] = 'ERROR: OperatorMatching'
+    replace['errormsg'] = 'Collective operator mistmatch. @{c}@ at @{filename}@:@{line:MBIERROR}@ has MPI_MAX or MPI_SUM as an operator.'
+    replace['change_op'] = 'if (rank % 2)\n    op = MPI_MAX;'
+    make_file(template, f'CollOpMatching_{c}_nok.c', replace)
+
+    # Generate the call with Op=MPI_OP_NULL
+    replace = patterns
+    replace['shortdesc'] = 'Collective @{c}@ with an invalid operator '
+    replace['longdesc'] = 'Collective @{c}@ with an invalid operator '
+    replace['outcome'] = 'ERROR: InvalidOperator'
+    replace['errormsg'] = 'Invalid Operator. @{c}@ at @{filename}@:@{line:MBIERROR}@ has MPI_OP_NULL as an operator.'
+    replace['change_op'] = 'op = MPI_OP_NULL;'
+    make_file(template, f'CollOpNull_{c}_nok.c', replace)
diff --git a/scripts/deprecated/CollOtherArgGenerator.py b/scripts/deprecated/CollOtherArgGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..77cc1687f9ffde9587f756a091b1b5ceb45d2078
--- /dev/null
+++ b/scripts/deprecated/CollOtherArgGenerator.py
@@ -0,0 +1,87 @@
+#! /usr/bin/python3
+import sys
+from generator_utils import *
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: @{toolfeature}@
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define buff_size 128
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int size = 1, j = 0;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  @{init1}@
+
+  if(rank == 1) {
+    @{operation1}@
+  }else{
+    @{change_color}@ /* MBIERROR */
+    @{operation2}@
+  }
+
+  if(com != MPI_COMM_NULL)
+    MPI_Comm_free(&com);
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+# this code is for MPI_Comm_split
+
+for c in tcoll4color:
+    patterns = {}
+    patterns = {'c': c}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {sys.argv[0]}. DO NOT EDIT.'
+    patterns['toolfeature'] = 'Yes' if c in tcoll4color else 'Lacking'
+    patterns['c'] = c
+    patterns['init1'] = init[c]("1")
+    patterns['operation1'] = operation[c]("1")
+    patterns['operation2'] = operation[c]("2")
+
+    # Generate the code with invalid color
+    replace = patterns
+    replace['shortdesc'] = 'Invalid color in @{c}@'
+    replace['longdesc'] = f'invalid color in @{c}@'
+    replace['outcome'] = 'ERROR: InvalidOtherArg'
+    replace['errormsg'] = 'Invalid Argument in collective. @{c}@ has an invalid color (see line @{line:MBIERROR}@)'
+    replace['change_color'] = 'color=-10;'
+    make_file(template, f'CollInvalidOtherArg_{c}_nok.c', replace)
diff --git a/scripts/deprecated/CollRootGenerator.py b/scripts/deprecated/CollRootGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..193fe4e4e551b23d9de8fd3ca9409a97b0fd5c3e
--- /dev/null
+++ b/scripts/deprecated/CollRootGenerator.py
@@ -0,0 +1,115 @@
+#! /usr/bin/python3
+import sys
+from generator_utils import *
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: @{collfeature}@
+  COLL!nonblocking: @{icollfeature}@
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+
+#define buff_size 128
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Op op = MPI_SUM;
+  MPI_Datatype type = MPI_INT;
+
+  int dbs = sizeof(int)*nprocs; /* Size of the dynamic buffers for alltoall and friends */
+  int root = 0;
+
+  @{init}@
+  @{change_root}@
+
+  @{operation}@
+  @{fini}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+# Generate code with one collective
+for c in coll4root + icoll4root:
+    patterns = {}
+    patterns = {'c': c}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {sys.argv[0]}. DO NOT EDIT.'
+    patterns['collfeature'] = 'Yes' if c in coll4root else 'Lacking'
+    patterns['icollfeature'] = 'Yes' if c in icoll4root else 'Lacking'
+    patterns['c'] = c
+    patterns['init'] = init[c]("1")
+    patterns['fini'] = fini[c]("1")
+    patterns['operation'] = operation[c]("1")
+    patterns['change_root'] = ''
+
+    # Generate the correct code ==> to remove?
+    #replace = patterns
+    #replace['shortdesc'] = 'Collective @{c}@ with correct arguments'
+    #replace['longdesc'] = f'All ranks in MPI_COMM_WORLD call {c} with correct arguments'
+    #replace['outcome'] = 'OK'
+    #replace['errormsg'] = ''
+    #replace['change_root'] = '/* No error injected here */'
+    #make_file(template, f'CollCorrect_{c}.c', replace)
+
+    # Generate an incorrect root matching
+    replace = patterns
+    replace['shortdesc'] = 'Collective @{c}@ with a root mismatch'
+    replace['longdesc'] = f'Odd ranks use 0 as a root while even ranks use 1 as a root'
+    replace['outcome'] = 'ERROR: RootMatching'
+    replace['errormsg'] = 'Collective root mistmatch. @{c}@ at @{filename}@:@{line:MBIERROR}@ has 0 or 1 as a root.'
+    replace['change_root'] = 'if (rank % 2)\n    root = 1; /* MBIERROR */'
+    make_file(template, f'CollRootMatching_{c}_nok.c', replace)
+
+    # Generate the call with root=-1
+    replace = patterns
+    replace['shortdesc'] = f'Collective {c} with root = -1'
+    replace['longdesc'] = f'Collective {c} with root = -1'
+    replace['outcome'] = 'ERROR: InvalidRoot'
+    replace['errormsg'] = 'Invalid collective root.  @{c}@ at @{filename}@:@{line:MBIERROR}@ has -1 as a root while communicator MPI_COMM_WORLD requires ranks in range 0 to 1.'
+    replace['change_root'] = 'root = -1; /* MBIERROR */'
+    make_file(template, f'CollRootNeg_{c}_nok.c', replace)
+
+    # Generate the call with root=2
+    replace = patterns
+    replace['shortdesc'] = f'Collective {c} with root out of the communicator'
+    replace['longdesc'] = f'Collective {c} with root = 2 (there is only 2 ranks)'
+    replace['outcome'] = 'ERROR: InvalidRoot'
+    replace['errormsg'] = 'Invalid collective root.  @{c}@ at @{filename}@:@{line:MBIERROR}@ has 2 as a root while communicator MPI_COMM_WORLD requires ranks in range 0 to 1.'
+    replace['change_root'] = 'root = nprocs; /* MBIERROR */'
+    make_file(template, f'CollRootTooLarge_{c}_nok.c', replace)
diff --git a/scripts/deprecated/OutOfInitFiniGenerator.py b/scripts/deprecated/OutOfInitFiniGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..e072159e670eb569c65ac508f37d80ae4d5889db
--- /dev/null
+++ b/scripts/deprecated/OutOfInitFiniGenerator.py
@@ -0,0 +1,153 @@
+#! /usr/bin/python3
+import sys
+from generator_utils import make_file
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: @{collfeature}@
+  COLL!nonblocking: @{icollfeature}@
+  COLL!persistent: Lacking
+  COLL!Tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define buff_size 128
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  @{init1}@
+  @{operation1}@
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug.\\n");
+
+  int dbs = sizeof(int)*nprocs; /* Size of the dynamic buffers for alltoall and friends */
+  @{init2}@
+  @{fini1}@
+
+  MPI_Finalize();
+
+  @{operation2}@ /* MBIERROR */
+  @{fini2}@
+
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+collectives = ['MPI_Allgather', 'MPI_Allgatherv', 'MPI_Allreduce', 'MPI_Alltoall', 'MPI_Alltoallv', 'MPI_Barrier', 'MPI_Bcast', 'MPI_Gather', 'MPI_Reduce', 'MPI_Scatter']
+icollectives = ['MPI_Ibarrier', 'MPI_Ireduce']
+
+init = {}
+fini = {}
+operation = {}
+
+init['MPI_Allgather'] = lambda n: f"int *rbuf{n} = malloc(dbs);"
+operation['MPI_Allgather'] = lambda n: f"MPI_Allgather(&rank, 1, MPI_INT, rbuf{n}, 1, MPI_INT, MPI_COMM_WORLD);"
+fini['MPI_Allgather'] = lambda n: f"free(rbuf{n});"
+
+init['MPI_Allgatherv'] = lambda n: (f"int *rbuf{n} = malloc(dbs*2), *rcounts{n}=malloc(dbs),  *displs{n}=malloc(dbs);\n"
+  +  "  for (int i = 0; i < nprocs; i++) {\n"
+  + f"    rcounts{n}[i] = 1;\n"
+  + f"    displs{n}[i] = 2 * (nprocs - (i + 1));\n"
+  +  "  }")
+operation['MPI_Allgatherv'] = lambda n: f"MPI_Allgatherv(&rank, 1, MPI_INT, rbuf{n}, rcounts{n}, displs{n}, MPI_INT, MPI_COMM_WORLD);"
+fini['MPI_Allgatherv'] = lambda n: f"free(rbuf{n});free(rcounts{n});free(displs{n});"
+
+init['MPI_Allreduce'] = lambda n: f"int sum{n}, val{n} = 1;"
+operation['MPI_Allreduce'] = lambda n: f"MPI_Allreduce(&sum{n}, &val{n}, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);"
+fini['MPI_Allreduce'] = lambda n: ""
+
+init['MPI_Alltoall'] = lambda n: f"int *sbuf{n} = malloc(dbs), *rbuf{n} = malloc(dbs);"
+operation['MPI_Alltoall'] = lambda n: f"MPI_Alltoall(sbuf{n}, 1, MPI_INT, rbuf{n}, 1, MPI_INT, MPI_COMM_WORLD);"
+fini['MPI_Alltoall'] = lambda n: f"free(sbuf{n});free(rbuf{n});"
+
+init['MPI_Alltoallv'] = lambda n: (f"int *sbuf{n}=malloc(dbs*2), *rbuf{n}=malloc(dbs*2), *scounts{n}=malloc(dbs), *rcounts{n}=malloc(dbs), *sdispls{n}=malloc(dbs), *rdispls{n}=malloc(dbs);\n"
+  +  "  for (int i = 0; i < nprocs; i++) {\n"
+  + f"    scounts{n}[i] = 2;\n"
+  + f"    rcounts{n}[i] = 2;\n"
+  + f"    sdispls{n}[i] = (nprocs - (i + 1)) * 2;\n"
+  + f"    rdispls{n}[i] = i * 2;\n"
+  +  "  }")
+operation['MPI_Alltoallv'] = lambda n: f"MPI_Alltoallv(sbuf{n}, scounts{n}, sdispls{n}, MPI_INT, rbuf{n}, rcounts{n}, rdispls{n}, MPI_INT, MPI_COMM_WORLD);"
+fini['MPI_Alltoallv'] = lambda n: f"free(sbuf{n});free(rbuf{n});free(scounts{n});free(rcounts{n});free(sdispls{n});free(rdispls{n});"
+
+init['MPI_Barrier'] = lambda n: ""
+operation['MPI_Barrier'] = lambda n: 'MPI_Barrier(MPI_COMM_WORLD);'
+fini['MPI_Barrier'] = lambda n: ""
+
+init['MPI_Ibarrier'] = lambda n: f"MPI_Request req{n};MPI_Status sta{n};"
+operation['MPI_Ibarrier'] = lambda n: f"MPI_Ibarrier(MPI_COMM_WORLD,&req{n});MPI_Wait(&req{n},&sta{n});"
+fini['MPI_Ibarrier'] = lambda n: ""
+
+init['MPI_Bcast'] = lambda n: f'int buf{n}[buff_size];'
+operation['MPI_Bcast'] = lambda n: f'MPI_Bcast(buf{n}, buff_size, MPI_INT, 0, MPI_COMM_WORLD);'
+fini['MPI_Bcast'] = lambda n: ""
+
+init['MPI_Reduce'] = lambda n: f"int sum{n}, val{n} = 1;"
+operation['MPI_Reduce'] = lambda n: f"MPI_Reduce(&sum{n}, &val{n}, 1, MPI_INT, MPI_SUM, 0, MPI_COMM_WORLD);"
+fini['MPI_Reduce'] = lambda n: ""
+
+init['MPI_Ireduce'] = lambda n: f"MPI_Request req{n}; MPI_Status sta{n}; int sum{n}, val{n} = 1;"
+operation['MPI_Ireduce'] = lambda n: f"MPI_Ireduce(&sum{n}, &val{n}, 1, MPI_INT, MPI_SUM, 0, MPI_COMM_WORLD, &req{n}); MPI_Wait(&req{n},&sta{n});"
+fini['MPI_Ireduce'] = lambda n: ""
+
+init['MPI_Scatter'] = lambda n: f"int val{n}, buf{n}[buff_size];"
+operation['MPI_Scatter'] = lambda n: f"MPI_Scatter(&buf{n}, 1, MPI_INT, &val{n}, 1, MPI_INT, 0, MPI_COMM_WORLD);"
+fini['MPI_Scatter'] = lambda n: ""
+
+init['MPI_Gather'] = lambda n: f"int val{n}, buf{n}[buff_size];"
+operation['MPI_Gather'] = lambda n: f"MPI_Gather(&val{n}, 1, MPI_INT, buf{n},1, MPI_INT, 0, MPI_COMM_WORLD);"
+fini['MPI_Gather'] = lambda n: ""
+
+for coll in collectives + icollectives:
+    patterns = {}
+    patterns = {'coll': coll}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {sys.argv[0]}. DO NOT EDIT.'
+    patterns['collfeature'] = 'Yes' if coll in collectives else 'Lacking'
+    patterns['icollfeature'] = 'Yes' if coll in icollectives else 'Lacking'
+    patterns['coll'] = coll
+    patterns['init1'] = init[coll]("1")
+    patterns['init2'] = init[coll]("2")
+    patterns['fini1'] = fini[coll]("1")
+    patterns['fini2'] = fini[coll]("2")
+    patterns['operation1'] = operation[coll]("1")
+    patterns['operation2'] = operation[coll]("2")
+
+    # Generate the incorrect code
+    replace = patterns
+    replace['shortdesc'] = 'Collective outside of parallel region'
+    replace['longdesc'] = f'Collective {coll} called outside of the MPI parallel region'
+    replace['outcome'] = 'ERROR: OutOfInitFini'
+    replace['errormsg'] = 'Collective {coll} at @{filename}@:@{line:MBIERROR}@ is called after MPI_Finalize'
+    replace['init1'] = ""
+    replace['fini1'] = ""
+    replace['operation1'] = ""
+    make_file(template, f'CallAfterFinalize_{coll}_nok.c', replace)
diff --git a/scripts/deprecated/P2PDataGenerator.py b/scripts/deprecated/P2PDataGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..1185f4f1b2838266936c39b39308993ba6d6d4f9
--- /dev/null
+++ b/scripts/deprecated/P2PDataGenerator.py
@@ -0,0 +1,105 @@
+#! /usr/bin/python3
+import sys
+from generator_utils import *
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: @{persfeature}@
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int src=0, dest=1;
+  int stag = 0, rtag = 0;
+   int buff_size = 1;
+  MPI_Comm newcom = MPI_COMM_WORLD;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Datatype type = MPI_INT;
+  @{change_type}@
+
+  @{init1}@
+  @{init2}@
+  if (rank == 0) {
+    @{operation1}@ /* MBIERROR1 */
+    @{start1}@
+    @{fini1}@
+  }else if (rank == 1) {
+    @{operation2}@ /* MBIERROR2 */
+    @{start2}@
+    @{fini2}@
+  }
+  @{free1}@
+  @{free2}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for p1 in send + isend + psend:
+    for p2 in recv + irecv + precv:
+        patterns = {}
+        patterns = {'p1': p1, 'p2': p2}
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {sys.argv[0]}. DO NOT EDIT.'
+        patterns['p2pfeature'] = 'Yes' if p1 in send or p2 in recv  else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if p1 in isend or p2 in irecv  else 'Lacking'
+        patterns['persfeature'] = 'Yes' if p1 in psend or p2 in precv  else 'Lacking'
+        patterns['p1'] = p1
+        patterns['p2'] = p2
+        patterns['init1'] = init[p1]("1")
+        patterns['init2'] = init[p2]("2")
+        patterns['start1'] = start[p1]("1")
+        patterns['start2'] = start[p2]("2")
+        patterns['fini1'] = fini[p1]("1")
+        patterns['fini2'] = fini[p2]("2")
+        patterns['operation1'] = operation[p1]("1") #send
+        patterns['operation2'] = operation[p2]("2") #recv
+        patterns['free1'] = free[p1]("1")
+        patterns['free2'] = free[p2]("2")
+
+        # Generate the incorrect matching
+        replace = patterns
+        replace['shortdesc'] = 'Point to point @{p1}@ and @{p2}@ have a datatype mismatch'
+        replace['longdesc'] = 'Process 0 uses MPI_FLOAT as the datatype while process 1 uses MPI_INT.'
+        replace['outcome'] = 'ERROR: DatatypeMatching'
+        replace['errormsg'] = 'P2P Datatype mismatch. @{p1}@ at @{filename}@:@{line:MBIERROR1}@ and @{p2}@ at @{filename}@:@{line:MBIERROR2}@ have MPI_INT and MPI_FLOAT as a datatype'
+        replace['change_type'] = 'if (rank == 0)\n    type = MPI_FLOAT;'
+        make_file(template, f'P2PDataMatching_{p1}_{p2}_nok.c', replace)
+
diff --git a/scripts/deprecated/P2PTagGenerator.py b/scripts/deprecated/P2PTagGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..8142eb5790e23f8fd5b575abf5b6cc909779293a
--- /dev/null
+++ b/scripts/deprecated/P2PTagGenerator.py
@@ -0,0 +1,123 @@
+#! /usr/bin/python3
+import sys
+from generator_utils import *
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: @{persfeature}@
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int src=0, dest=1;
+  int stag=0, rtag=1;
+  int buff_size = 1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+
+  @{change_tag}@
+
+  @{init1}@
+  @{init2}@
+  if (rank == 0) {
+    @{operation1}@ /* MBIERROR1 */
+    @{start1}@
+    @{fini1}@
+    @{free1}@
+  }else if (rank == 1) {
+    @{operation2}@ /* MBIERROR2 */
+    @{start2}@
+    @{fini2}@
+    @{free2}@
+  }
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for p1 in send + isend + psend:
+    for p2 in recv + irecv + precv:
+        patterns = {}
+        patterns = {'p1': p1, 'p2': p2}
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {sys.argv[0]}. DO NOT EDIT.'
+        patterns['p2pfeature'] = 'Yes' if p1 in send or p2 in recv  else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if p1 in isend or p2 in irecv  else 'Lacking'
+        patterns['persfeature'] = 'Yes' if p1 in psend or p2 in precv  else 'Lacking'
+        patterns['p1'] = p1
+        patterns['p2'] = p2
+        patterns['init1'] = init[p1]("1")
+        patterns['init2'] = init[p2]("2")
+        patterns['start1'] = start[p1]("1")
+        patterns['start2'] = start[p2]("2")
+        patterns['fini1'] = fini[p1]("1")
+        patterns['fini2'] = fini[p2]("2")
+        patterns['operation1'] = operation[p1]("1") #send
+        patterns['operation2'] = operation[p2]("2") #recv
+        patterns['free1'] = free[p1]("1")
+        patterns['free2'] = free[p2]("2")
+        patterns['change_tag'] = ""
+
+        # Generate the incorrect matching
+        replace = patterns
+        replace['shortdesc'] = 'Point to point @{p1}@ and @{p2}@ have a tag mismatch'
+        replace['longdesc'] = 'Point to point @{p1}@ and @{p2}@ have a tag mismatch.'
+        replace['outcome'] = 'ERROR: TagMatching'
+        replace['errormsg'] = 'P2P tag mismatch. @{p1}@ at @{filename}@:@{line:MBIERROR1}@ and @{p2}@ at @{filename}@:@{line:MBIERROR2}@ use different tag.'
+        make_file(template, f'P2PTagMatching_{p1}_{p2}_nok.c', replace)
+
+        # Generate the code with an invalid communicator
+        replace = patterns
+        replace['shortdesc'] = 'Point to point @{p1}@ and @{p2}@ have an invalid tag'
+        replace['longdesc'] = 'Point to point @{p1}@ and @{p2}@ have an invalid tag.'
+        replace['outcome'] = 'ERROR: InvalidTag'
+        replace['errormsg'] = 'Invalid Tag. @{p1}@ at @{filename}@:@{line:MBIERROR1}@ and @{p2}@ at @{filename}@:@{line:MBIERROR2}@ use an invalid tag.'
+        replace['change_tag'] = 'stag=-1; rtag=-2;/* MBIERROR */'
+        make_file(template, f'P2PInvalidTag_{p1}_{p2}_nok.c', replace)
+
+        # Generate a correct code using MPI_ANY_TAG
+        replace = patterns
+        replace['shortdesc'] = 'Correct code'
+        replace['longdesc'] = 'Correct code'
+        replace['outcome'] = 'OK'
+        replace['errormsg'] = 'OK'
+        replace['change_tag'] = 'rtag=MPI_ANY_TAG;'
+        make_file(template, f'P2PTagMatching_{p1}_{p2}_ok.c', replace)
diff --git a/scripts/deprecated/RMALocalConcurrencyGenerator.py b/scripts/deprecated/RMALocalConcurrencyGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..f37e572824aca791730566005f31806323afb677
--- /dev/null
+++ b/scripts/deprecated/RMALocalConcurrencyGenerator.py
@@ -0,0 +1,164 @@
+#! /usr/bin/python3
+import sys
+from generator_utils import *
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 2, requires MPI 3 implementation (for lock_all/unlock_all epochs)
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: @{rmafeature}@
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 10
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  MPI_Win win;
+  int *winbuf = malloc(N * sizeof(int)); // Window buffer
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Datatype type = MPI_INT;
+  int target = (rank + 1) % nprocs;
+
+  MPI_Win_create(&winbuf, N * sizeof(int), sizeof(int), MPI_INFO_NULL, MPI_COMM_WORLD, &win);
+
+  @{epoch}@
+
+  @{init1}@
+
+  if (rank == 0) {
+    @{operation1}@ /* MBIERROR1 */
+    @{operation2}@ /* MBIERROR2 */
+  }
+
+  @{finEpoch}@
+
+  MPI_Win_free(&win);
+  free(winbuf);
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for e in epoch:
+    for p1 in get:
+        for p2 in put + store + load + get + loadstore:
+            patterns = {}
+            patterns = {'e': e, 'p1': p1, 'p2': p2}
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {sys.argv[0]}. DO NOT EDIT.'
+            patterns['rmafeature'] = 'Yes'
+            patterns['p1'] = p1
+            patterns['p2'] = p2
+            patterns['e'] = e
+            patterns['epoch'] = epoch[e]("1")
+            patterns['finEpoch'] = finEpoch[e]("1")
+            patterns['init1'] = init[p1]("1")
+            patterns['operation1'] = operation[p1]("1")
+            patterns['operation2'] = operation[p2]("1")
+
+            # Generate a data race (Get + Get/load/store/Put)
+            replace = patterns
+            replace['shortdesc'] = 'Local Concurrency error.'
+            replace['longdesc'] = 'Local Concurrency error. @{p2}@ conflicts with @{p1}@'
+            replace['outcome'] = 'ERROR: LocalConcurrency'
+            replace['errormsg'] = 'Local Concurrency error. @{p2}@ at @{filename}@:@{line:MBIERROR2}@ conflicts with @{p1}@ line @{line:MBIERROR1}@'
+            make_file(template, f'LocalConcurrency_{e}_{p1}_{p2}_nok.c', replace)
+            # Generate a correct code by switching operation1 and  operation2
+            if p2 in store + load + loadstore:
+                  replace = patterns
+                   replace['shortdesc'] = 'Correct code using RMA operations'
+                   replace['longdesc'] = 'Correct code using RMA operations'
+                   replace['outcome'] = 'OK'
+                   replace['errormsg'] = 'OK'
+                   replace['operation1'] = operation[p2]("1")
+                   replace['operation2'] = operation[p1]("1")
+                  make_file(template, f'LocalConcurrency_{e}_{p2}_{p1}_ok.c', replace)
+        # Generate a correct code by removing operation2
+        replace = patterns
+        replace['shortdesc'] = 'Correct code using RMA operations'
+        replace['longdesc'] = 'Correct code using RMA operations'
+        replace['outcome'] = 'OK'
+        replace['errormsg'] = 'OK'
+        replace['operation1'] = operation[p1]("1")
+        replace['operation2'] = ''
+        make_file(template, f'LocalConcurrency_{e}_{p1}_ok.c', replace)
+
+
+for e in epoch:
+    for p1 in put:
+        for p2 in store:
+            patterns = {}
+            patterns = {'e': e, 'p1': p1, 'p2': p2}
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {sys.argv[0]}. DO NOT EDIT.'
+            patterns['rmafeature'] = 'Yes'
+            patterns['p1'] = p1
+            patterns['p2'] = p2
+            patterns['e'] = e
+            patterns['epoch'] = epoch[e]("1")
+            patterns['finEpoch'] = finEpoch[e]("1")
+            patterns['init1'] = init[p1]("1")
+            patterns['operation1'] = operation[p1]("1")
+            patterns['operation2'] = operation[p2]("1")
+
+            # Generate a data race (Put + store)
+            replace = patterns
+            replace['shortdesc'] = 'Local Concurrency error.'
+            replace['longdesc'] = 'Local Concurrency error. @{p2}@ conflicts with @{p1}@'
+            replace['outcome'] = 'ERROR: LocalConcurrency'
+            replace['errormsg'] = 'Local Concurrency error. @{p2}@ at @{filename}@:@{line:MBIERROR2}@ conflicts with @{p1}@ line @{line:MBIERROR1}@'
+            make_file(template, f'LocalConcurrency_{e}_{p1}_{p2}_nok.c', replace)
+            # Generate a correct code by switching operation1 and operation2
+            replace = patterns
+            replace['shortdesc'] = 'Correct code using RMA operations'
+            replace['longdesc'] = 'Correct code using RMA operations'
+            replace['outcome'] = 'OK'
+            replace['errormsg'] = 'OK'
+            replace['operation1'] = operation[p2]("1")
+            replace['operation2'] = operation[p1]("1")
+            make_file(template, f'LocalConcurrency_{e}_{p2}_{p1}_ok.c', replace)
+
+            # Generate a correct code by removing operation2
+            replace = patterns
+            replace['shortdesc'] = 'Correct code using RMA operations'
+            replace['longdesc'] = 'Correct code using RMA operations'
+            replace['outcome'] = 'OK'
+            replace['errormsg'] = 'OK'
+            replace['operation1'] = operation[p1]("1")
+            replace['operation2'] = ''
+            make_file(template, f'LocalConcurrency_{e}_{p1}_ok.c', replace)
diff --git a/scripts/deprecated/RMARemoteRemoteConcurrencyGenerator2.py b/scripts/deprecated/RMARemoteRemoteConcurrencyGenerator2.py
new file mode 100755
index 0000000000000000000000000000000000000000..259c5cd7d5f3c9d8ca5cda50ee6dfee37530146c
--- /dev/null
+++ b/scripts/deprecated/RMARemoteRemoteConcurrencyGenerator2.py
@@ -0,0 +1,107 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 2, requires MPI 3 implementation (for lock_all/unlock_all epochs)
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: @{rmafeature}@
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 3 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 10
+
+int main(int argc, char **argv) {
+  int nprocs = -1, rank = -1;
+  MPI_Win win;
+  int *winbuf = (int *)malloc(N * sizeof(int)); // Window buffer
+  int target=1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Datatype type = MPI_INT;
+
+  MPI_Win_create(&winbuf, N * sizeof(int), 1, MPI_INFO_NULL, MPI_COMM_WORLD, &win);
+
+  @{epoch}@
+
+  @{init1}@
+  @{init2}@
+
+  if (rank == 0) {
+     @{operation1}@ /* MBIERROR1 */
+  }else if (rank == 2){
+     @{operation2}@ /* MBIERROR2 */
+  }
+
+  @{finEpoch}@
+
+
+  MPI_Win_free(&win);
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+TODO: changer avec les codes dans rr de Celia
+
+
+for e in gen.epoch:
+    for p1 in gen.put:
+        for p2 in gen.get:
+            patterns = {}
+            patterns = {'e': e, 'p1': p1, 'p2': p2}
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+            patterns['rmafeature'] = 'Yes'
+            patterns['p1'] = p1
+            patterns['p2'] = p2
+            patterns['e'] = e
+            patterns['epoch'] = gen.epoch[e]("1")
+            patterns['finEpoch'] = gen.finEpoch[e]("1")
+            patterns['init1'] = gen.init[p1]("1") #put
+            patterns['init2'] = gen.init[p2]("2") #get
+            patterns['operation1'] = gen.operation[p1]("1") #put
+            patterns['operation2'] = gen.operation[p2]("2") #get
+
+            # Generate a data race (Put + Get)
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Global Concurrency error.'
+            replace['longdesc'] = 'Global Concurrency error. @{p2}@ conflicts with @{p1}@ on the target side'
+            replace['outcome'] = 'ERROR: GlobalConcurrency'
+            replace['errormsg'] = 'Global Concurrency error. @{p2}@ at @{filename}@:@{line:MBIERROR2}@ conflicts with @{p1}@ line @{line:MBIERROR1}@ on the target side'
+            gen.make_file(template, f'GlobalConcurrency_{e}_{p1}_{p2}_nok.c', replace)
diff --git a/scripts/ensure_python3 b/scripts/ensure_python3
new file mode 100755
index 0000000000000000000000000000000000000000..962fe1ea7fd5a9b65f5f8b7bc925fd23f85da066
--- /dev/null
+++ b/scripts/ensure_python3
@@ -0,0 +1,56 @@
+#! /bin/bash
+
+# Try to use one of the python binaries existing on this machine
+for candidate in `which python3.8` `which python3.7` `which python3.6` `which python3` `which python`
+do
+    if $candidate <<EOF
+import sys
+if sys.version_info < (3, 7):
+    sys.exit(1)
+sys.exit(0)
+EOF
+    then
+        exec $candidate $@
+    fi
+done
+
+# Try to guess the used docker image, and install the missing packages from it
+if [ -e /etc/os-release ] 
+then
+    source /etc/os-release
+    if [ "$VERSION_ID" == "18.04" ] && [ "$ID" == "ubuntu" ] 
+    then 
+        echo "This is an Ubuntu 18.04 OS. Assuming an Aislinn run."
+        apt-get update
+        apt-get install -y python3.8
+        exec python3.8 $@
+    fi
+fi
+
+if [ -e /root/mpi-sv/mpisv ]
+then
+    echo "MPI-SV image detected. Good."
+    apt-get update
+    apt-get install -y software-properties-common # First install add-apt-repository
+    echo|add-apt-repository ppa:fkrull/deadsnakes # Add a source for the python we need
+    apt-get update
+    apt-get install -y python3.6
+    exec python3.6 $@
+fi
+
+echo "Please run this script from the relevant MBI image."
+if echo $@ | grep -qi aislinn 
+then
+    get="docker image pull ubuntu:18.04"
+    img=ubuntu:18.04
+elif echo $@ | grep -qi mpisv
+then
+    get="docker image pull mpisv/mpi-sv"
+    img=mpisv/mpi-sv
+else
+    get="docker build -f Dockerfile -t mpi-bugs-initiative:latest . # Only the first time"
+    img=mpi-bugs-initiative
+fi
+echo "  $get"
+echo '  docker run -it --rm --name MIB --volume $(pwd):/MBI '"$img /MBI/MBI.py $@"
+exit 1
diff --git a/scripts/generators/CollArgGenerator.py b/scripts/generators/CollArgGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..e269309bdf578387b24c009b61b4ec9e7ff07219
--- /dev/null
+++ b/scripts/generators/CollArgGenerator.py
@@ -0,0 +1,225 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: @{collfeature}@
+  COLL!nonblocking: @{icollfeature}@
+  COLL!persistent: Lacking
+  COLL!tools: @{toolfeature}@
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define buff_size 128
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int root = 0;
+  int size = 1;
+  int j=0;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Op op = MPI_SUM;
+  MPI_Datatype type = MPI_INT;
+
+  int dbs = sizeof(int)*nprocs; /* Size of the dynamic buffers for alltoall and friends */
+
+  @{init}@
+  @{start}@
+
+  @{change_arg}@
+  @{operation}@ /* MBIERROR2 */
+  @{fini}@
+  @{free}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+#####################################################
+# Generate code with color mismatch in MPI_Comm_split
+#####################################################
+
+for c in gen.tcoll4color:
+    patterns = {}
+    patterns = {'c': c}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+    patterns['collfeature'] = 'Lacking'
+    patterns['icollfeature'] = 'Lacking'
+    patterns['toolfeature'] = 'Yes' if c in gen.tcoll4color else 'Lacking'
+    patterns['c'] = c
+    patterns['init'] = gen.init[c]("1")
+    patterns['start'] = gen.start[c]("1")
+    patterns['operation'] = gen.operation[c]("1")
+    patterns['fini'] = gen.fini[c]("1")
+    patterns['free'] = gen.free[c]("1")
+    patterns['change_arg'] = ''
+
+    # Generate the code with invalid color
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Invalid color in @{c}@'
+    replace['longdesc'] = 'invalid color in @{c}@'
+    replace['outcome'] = 'ERROR: InvalidOtherArg'
+    replace['errormsg'] = 'Invalid Argument in collective. @{c}@ at line @{line:MBIERROR2}@ has an invalid color'
+    replace['change_arg'] = 'color=-10; /* MBIERROR1*/'
+    gen.make_file(template, f'InvalidParam_OtherArg_{c}_nok.c', replace)
+
+
+##################################
+# Generate code with root mismatch
+##################################
+
+for c in gen.coll4root + gen.icoll4root:
+    patterns = {}
+    patterns = {'c': c}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+    patterns['collfeature'] = 'Yes' if c in gen.coll4root else 'Lacking'
+    patterns['icollfeature'] = 'Yes' if c in gen.icoll4root else 'Lacking'
+    patterns['toolfeature'] = 'Lacking'
+    patterns['c'] = c
+    patterns['init'] = gen.init[c]("1")
+    patterns['start'] = gen.start[c]("1")
+    patterns['fini'] = gen.fini[c]("1")
+    patterns['free'] = gen.free[c]("1")
+    patterns['operation'] = gen.operation[c]("1")
+    patterns['change_arg'] = ''
+
+    # Generate an incorrect root matching (root mismatch)
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Collective @{c}@ with a root mismatch'
+    replace['longdesc'] = 'Odd ranks use 0 as a root while even ranks use 1 as a root'
+    replace['outcome'] = 'ERROR: RootMatching'
+    replace['errormsg'] = 'Collective root mistmatch. @{c}@ at @{filename}@:@{line:MBIERROR2}@ has 0 or 1 as a root.'
+    replace['change_arg'] = 'if (rank % 2)\n    root = 1; /* MBIERROR1 */'
+    gen.make_file(template, f'ParamMatching_Root_{c}_nok.c', replace)
+
+    # Generate the call with root=-1 (invalid root)
+    replace = patterns.copy()
+    replace['shortdesc'] = f'Collective {c} with root = -1'
+    replace['longdesc'] = f'Collective {c} with root = -1'
+    replace['outcome'] = 'ERROR: InvalidRoot'
+    replace['errormsg'] = 'Invalid collective root.  @{c}@ at @{filename}@:@{line:MBIERROR2}@ has -1 as a root while communicator MPI_COMM_WORLD requires ranks in range 0 to 1.'
+    replace['change_arg'] = 'root = -1; /* MBIERROR1 */'
+    gen.make_file(template, f'InvalidParam_RootNeg_{c}_nok.c', replace)
+
+    # Generate the call with root=2 (root not in communicator)
+    replace = patterns.copy()
+    replace['shortdesc'] = f'Collective {c} with root out of the communicator'
+    replace['longdesc'] = f'Collective {c} with root = 2 (there is only 2 ranks)'
+    replace['outcome'] = 'ERROR: InvalidRoot'
+    replace['errormsg'] = 'Invalid collective root.  @{c}@ at @{filename}@:@{line:MBIERROR2}@ has 2 as a root while communicator MPI_COMM_WORLD requires ranks in range 0 to 1.'
+    replace['change_arg'] = 'root = nprocs; /* MBIERROR1 */'
+    gen.make_file(template, f'InvalidParam_RootTooLarge_{c}_nok.c', replace)
+
+
+##################################
+# Generate code with type mismatch
+##################################
+
+for c in gen.coll + gen.icoll:
+    if c != 'MPI_Barrier': # Barrier has no Data to mismatch or to nullify
+        patterns = {}
+        patterns = {'c': c}
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['collfeature'] = 'Yes' if c in gen.coll else 'Lacking'
+        patterns['icollfeature'] = 'Yes' if c in gen.icoll + gen.ibarrier else 'Lacking'
+        patterns['toolfeature'] = 'Lacking'
+        patterns['c'] = c
+        patterns['init'] = gen.init[c]("1")
+        patterns['start'] = gen.start[c]("1")
+        patterns['fini'] = gen.fini[c]("1")
+        patterns['operation'] = gen.operation[c]("1")
+        patterns['free'] = gen.free[c]("1")
+        patterns['change_arg'] = ''
+
+        # Generate the incorrect matching (datatype Mmismatch)
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Collective @{c}@ with a datatype mismatch'
+        replace['longdesc'] = 'Odd ranks use MPI_INT as the datatype while even ranks use MPI_FLOAT'
+        replace['outcome'] = 'ERROR: DatatypeMatching'
+        replace['errormsg'] = 'Collective datatype mistmatch. @{c}@ at @{filename}@:@{line:MBIERROR2}@ has MPI_INT or MPI_FLOAT as a datatype.'
+        replace['change_arg'] = 'if (rank % 2)\n    type = MPI_FLOAT; /* MBIERROR1 */'
+        gen.make_file(template, f'ParamMatching_Data_{c}_nok.c', replace)
+
+        # Generate the call with null type (invalid datatype)
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Collective @{c}@ with an invalid datatype '
+        replace['longdesc'] = 'Collective @{c}@ with an invalid datatype '
+        replace['outcome'] = 'ERROR: InvalidDatatype'
+        replace['errormsg'] = 'Invalid Datatype. @{c}@ at @{filename}@:@{line:MBIERROR2}@ has an invalid datatype.'
+        replace['change_arg'] = 'type=MPI_DATATYPE_NULL; /* MBIERROR1 */'
+        gen.make_file(template, f'InvalidParam_DataNull_{c}_nok.c', replace)
+
+
+##################################
+# Generate code with Op  mismatch
+##################################
+
+for c in gen.coll4op + gen.icoll4op:
+    patterns = {}
+    patterns = {'c': c}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+    patterns['collfeature'] = 'Yes' if c in gen.coll4op else 'Lacking'
+    patterns['icollfeature'] = 'Yes' if c in gen.icoll4op else 'Lacking'
+    patterns['toolfeature'] = 'Lacking'
+    patterns['c'] = c
+    patterns['init'] = gen.init[c]("1")
+    patterns['start'] = gen.start[c]("1")
+    patterns['fini'] = gen.fini[c]("1")
+    patterns['operation'] = gen.operation[c]("1")
+    patterns['free'] = gen.free[c]("1")
+    patterns['change_arg'] = ''
+
+    # Generate the incorrect matching (op mismatch)
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Collective @{c}@ with an operator  mismatch'
+    replace['longdesc'] = 'Odd ranks use MPI_SUM as the operator while even ranks use MPI_MAX'
+    replace['outcome'] = 'ERROR: OperatorMatching'
+    replace['errormsg'] = 'Collective operator mistmatch. @{c}@ at @{filename}@:@{line:MBIERROR2}@ has MPI_MAX or MPI_SUM as an operator.'
+    replace['change_arg'] = 'if (rank % 2)\n    op = MPI_MAX; /* MBIERROR1 */'
+    gen.make_file(template, f'ParamMatching_Op_{c}_nok.c', replace)
+
+    # Generate the call with Op=MPI_OP_NULL (invalid op)
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Collective @{c}@ with an invalid operator '
+    replace['longdesc'] = 'Collective @{c}@ with an invalid operator '
+    replace['outcome'] = 'ERROR: InvalidOperator'
+    replace['errormsg'] = 'Invalid Operator. @{c}@ at @{filename}@:@{line:MBIERROR2}@ has MPI_OP_NULL as an operator.'
+    replace['change_arg'] = 'op = MPI_OP_NULL; /* MBIERROR1 */'
+    gen.make_file(template, f'InvalidParam_OpNull_{c}_nok.c', replace)
diff --git a/scripts/generators/CollComGenerator.py b/scripts/generators/CollComGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..f8431b807d83f5a16879891d5fcadd833798409e
--- /dev/null
+++ b/scripts/generators/CollComGenerator.py
@@ -0,0 +1,117 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+   Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: @{collfeature}@
+  COLL!nonblocking: @{icollfeature}@
+  COLL!persistent: Lacking
+  COLL!tools: Yes
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#define buff_size 128
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int root = 0;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Op op = MPI_SUM;
+  MPI_Datatype type = MPI_INT;
+  MPI_Comm newcom;
+  MPI_Comm_split(MPI_COMM_WORLD, 0, nprocs - rank, &newcom);
+
+  @{change_com}@
+
+  int dbs = sizeof(int)*nprocs; /* Size of the dynamic buffers for alltoall and friends */
+  @{init}@
+  @{start}@
+  @{operation}@ /* MBIERROR */
+  @{fini}@
+  @{free}@
+
+  if(newcom != MPI_COMM_NULL && newcom != MPI_COMM_WORLD)
+    MPI_Comm_free(&newcom);
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+# Generate code with one collective
+for c in gen.coll + gen.icoll + gen.ibarrier:
+    patterns = {}
+    patterns = {'c': c}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+    patterns['collfeature'] = 'Yes' if c in gen.coll else 'Lacking'
+    patterns['icollfeature'] = 'Yes' if c in gen.icoll + gen.ibarrier else 'Lacking'
+    patterns['c'] = c
+    patterns['init'] = gen.init[c]("1")
+    patterns['start'] = gen.start[c]("1")
+    patterns['fini'] = gen.fini[c]("1")
+    patterns['free'] = gen.free[c]("1")
+    patterns['operation'] = gen.operation[c]("1")
+
+    # Generate the correct code => to remove?
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Collective @{c}@ with correct arguments'
+    replace['longdesc'] = f'All ranks in newcom call {c} with correct arguments'
+    replace['outcome'] = 'OK'
+    replace['errormsg'] = ''
+    replace['change_com'] = '/* No error injected here */'
+    gen.make_file(template, f'ParamMatching_Com_{c}_ok.c', replace)
+
+    # Generate the incorrect communicator matching
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Collective @{c}@ with a communicator mismatch'
+    replace['longdesc'] = 'Odd ranks call the collective on newcom while even ranks call the collective on MPI_COMM_WORLD'
+    replace['outcome'] = 'ERROR: CommunicatorMatching'
+    replace['errormsg'] = 'Communicator mistmatch in collectives. @{c}@ at @{filename}@:@{line:MBIERROR}@ has newcom or MPI_COMM_WORLD as a communicator.'
+    replace['change_com'] = 'if (rank % 2)\n    newcom = MPI_COMM_WORLD; /* MBIERROR */'
+    gen.make_file(template, f'ParamMatching_Com_{c}_nok.c', replace)
+
+    # Generate the coll with newcom=MPI_COMM_NULL
+    replace = patterns.copy()
+    replace['shortdesc'] = f'Collective @{c}@ with newcom=MPI_COMM_NULL'
+    replace['longdesc'] = f'Collective @{c}@ with newcom=MPI_COMM_NULL'
+    replace['outcome'] = 'ERROR: InvalidCommunicator'
+    replace['errormsg'] = 'Invalid communicator. @{c}@ at @{filename}@:@{line:MBIERROR}@ has MPI_COMM_NULL as a communicator.'
+    replace['change_com'] = 'newcom = MPI_COMM_NULL; /* MBIERROR */'
+    gen.make_file(template, f'InvalidParam_ComNull_{c}_nok.c', replace)
diff --git a/scripts/generators/CollLocalConcurrencyGenerator.py b/scripts/generators/CollLocalConcurrencyGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..dd9ed0810263722c079865712f69b04f55a85eae
--- /dev/null
+++ b/scripts/generators/CollLocalConcurrencyGenerator.py
@@ -0,0 +1,91 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+   Version of MPI: Conforms to MPI 3, requires MPI 3 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: @{icollfeature}@
+  COLL!persistent: @{pcollfeature}@
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#define buff_size 128
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int root = 0;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  int dbs = sizeof(int)*nprocs; /* Size of the dynamic buffers for alltoall and friends */
+  MPI_Op op = MPI_SUM;
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+
+  @{init}@
+  @{start}@
+   @{operation}@
+  @{write}@ /* MBIERROR */
+  @{fini}@
+  @{free}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+for c in gen.icoll + gen.pcoll:
+    patterns = {}
+    patterns = {'c': c}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+    patterns['icollfeature'] = 'Yes' if c in gen.icoll else 'Lacking'
+    patterns['pcollfeature'] = 'Yes' if c in gen.pcoll else 'Lacking'
+    patterns['c'] = c
+    patterns['init'] = gen.init[c]("1")
+    patterns['start'] = gen.start[c]("1")
+    patterns['fini'] = gen.fini[c]("1")
+    patterns['operation'] = gen.operation[c]("1")
+    patterns['write'] = gen.write[c]("1")
+    patterns['free'] = gen.free[c]("1")
+
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Local concurrency with a collective'
+    replace['longdesc'] = f'The buffer in {c} is modified before the call has been completed.'
+    replace['outcome'] = 'ERROR: LocalConcurrency'
+    replace['errormsg'] = 'Local Concurrency with a collective. The buffer in @{c}@ is modified at @{filename}@:@{line:MBIERROR}@ whereas there is no guarantee the call has been completed.'
+    gen.make_file(template, f'LocalConcurrency_{c}_nok.c', replace)
diff --git a/scripts/generators/CollMatchingGenerator.py b/scripts/generators/CollMatchingGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..6de1bd8b8728dce4a7fb08e6af12491e34587b45
--- /dev/null
+++ b/scripts/generators/CollMatchingGenerator.py
@@ -0,0 +1,202 @@
+#! /usr/bin/python3
+
+# Copyright 2021-2022. The MBI project. All rights reserved.
+# This program is free software; you can redistribute it and/or modify it under the terms of the license (GNU GPL).
+
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+   Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: @{collfeature}@
+  COLL!nonblocking: @{icollfeature}@
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#define buff_size 128
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int root = 0;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug.\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+  MPI_Op op = MPI_SUM;
+
+  int dbs = sizeof(int)*nprocs; /* Size of the dynamic buffers for alltoall and friends */
+  @{init1}@
+  @{init2}@
+
+  if (@{change_cond}@) {
+    @{operation1a}@ /* MBIERROR1 */
+    @{fini1a}@
+    @{operation2a}@
+    @{fini2a}@
+  } else {
+    @{operation1b}@ /* MBIERROR2 */
+    @{fini1b}@
+    @{operation2b}@
+    @{fini2b}@
+  }
+
+  @{free1}@
+  @{free2}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+for c1 in gen.coll + gen.icoll + gen.ibarrier:
+    for c2 in gen.coll + gen.icoll + gen.ibarrier:
+        patterns = {}
+        patterns = {'c1': c1, 'c2': c2}
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['collfeature'] = 'Yes' if c1 in gen.coll or c2 in gen.coll else 'Lacking'
+        patterns['icollfeature'] = 'Yes' if c1 in gen.icoll + gen.ibarrier or c2 in gen.icoll + gen.ibarrier else 'Lacking'
+        patterns['c1'] = c1
+        patterns['c2'] = c2
+        patterns['init1'] = gen.init[c1]("1")
+        patterns['init2'] = gen.init[c2]("2")
+        patterns['fini1a'] = gen.fini[c1]("1")
+        patterns['fini2a'] = gen.fini[c2]("2")
+        patterns['fini1b'] = gen.fini[c1]("1")
+        patterns['fini2b'] = gen.fini[c2]("2")
+        patterns['free1'] = gen.free[c1]("1")
+        patterns['free2'] = gen.free[c2]("2")
+        patterns['operation1a'] = gen.operation[c1]("1")
+        patterns['operation1b'] = gen.operation[c1]("1")
+        patterns['operation2a'] = gen.operation[c2]("2")
+        patterns['operation2b'] = gen.operation[c2]("2")
+        patterns['change_cond'] = 'rank % 2'
+        shortdesc = ' collective ordering'
+
+        if c1 == c2:
+            # Generate the correct code using the same collective twice
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Correct' + shortdesc
+            replace['longdesc'] = f'All ranks call {c1} twice'
+            replace['outcome'] = 'OK'
+            replace['errormsg'] = ''
+            replace['change_cond'] = 'rank < nprocs'
+            replace['operation1b'] = ''
+            replace['operation2b'] = ''
+            replace['fini1b'] = ''
+            replace['fini2b'] = ''
+            gen.make_file(template, f'CallOrdering_{c1}_{c2}_ok.c', replace)
+            # Generate the correct code using the collective once
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Correct' + shortdesc
+            replace['longdesc'] = f'All ranks call {c1} once'
+            replace['outcome'] = 'OK'
+            replace['errormsg'] = ''
+            replace['init2'] = ''
+            replace['change_cond'] = 'rank < nprocs'
+            replace['operation2a'] = ''
+            replace['operation1b'] = ''
+            replace['operation2b'] = ''
+            replace['fini2a'] = ''
+            replace['fini1b'] = ''
+            replace['fini2b'] = ''
+            replace['free2'] = ''
+            gen.make_file(template, f'CallOrdering_{c1}_ok.c', replace)
+        else:
+            # Generate the correct ordering with two different collectives
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Correct' + shortdesc
+            replace['longdesc'] = f'All ranks call {c1} and then {c2}'
+            replace['outcome'] = 'OK'
+            replace['errormsg'] = ''
+            replace['change_cond'] = 'rank < nprocs'
+            replace['operation1b'] = ''
+            replace['operation2b'] = ''
+            replace['fini1b'] = ''
+            replace['fini2b'] = ''
+            gen.make_file(template, f'CallOrdering_{c1}_{c2}_ok.c', replace)
+            # Generate the incorrect ordering with two different collectives
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Incorrect' + shortdesc
+            replace['longdesc'] = f'Odd ranks call {c1} and then {c2} while even ranks call these collectives in the other order'
+            replace['outcome'] = 'ERROR: CallMatching'
+            replace['errormsg'] = 'Collective mistmatch. @{c1}@ at @{filename}@:@{line:MBIERROR1}@ is matched with @{c2}@ line @{filename}@:@{line:MBIERROR2}@.'
+            replace['operation1b'] = gen.operation[c2]("2")  # Inversion
+            replace['operation2b'] = gen.operation[c1]("1")
+            replace['fini1a'] = gen.fini[c1]("1") # Inversion
+            replace['fini2a'] = gen.fini[c2]("2")
+            replace['fini1b'] = gen.fini[c2]("2") # Inversion
+            replace['fini2b'] = gen.fini[c1]("1")
+            replace['free1'] = gen.free[c2]("2")
+            replace['free2'] = gen.free[c1]("1")
+
+            gen.make_file(template, f'CallOrdering_{c1}_{c2}_nok.c', replace)
+
+    # Generate the incorrect ordering with one collective
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Incorrect' + shortdesc
+    replace['longdesc'] = f'Odd ranks call {c1} while even ranks do not call any collective'
+    replace['outcome'] = 'ERROR: CallMatching'
+    replace['errormsg'] = 'Collective mistmatch. @{c1}@ at @{filename}@:@{line:MBIERROR1}@ is not matched.'
+    replace['operation1b'] = ''  # Remove functions
+    replace['operation2b'] = ''
+    replace['operation2a'] = ''
+    replace['init2'] = ''
+    replace['fini1b'] = ''
+    replace['fini2a'] = ''
+    replace['fini2b'] = ''
+    replace['free1'] = gen.free[c1]("1")
+    replace['free2'] = ''
+    gen.make_file(template, f'CallOrdering_{c1}_none_nok.c', replace)
+    # Generate a correct ordering with a conditional not depending on ranks
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Correct' + shortdesc
+    replace['longdesc'] = f'All ranks call {c1}'
+    replace['outcome'] = 'OK'
+    replace['errormsg'] = ''
+    replace['change_cond'] = 'rank < nprocs'
+    replace['operation1b'] = '' # Remove functions
+    replace['operation2b'] = ''
+    replace['operation2a'] = ''
+    replace['init2'] = ''
+    replace['fini1b'] = ''
+    replace['fini2a'] = ''
+    replace['fini2b'] = ''
+    replace['free1'] = gen.free[c1]("1")
+    replace['free2'] = ''
+    gen.make_file(template, f'CallOrdering_{c1}_none_ok.c', replace)
diff --git a/scripts/generators/CollP2PMatchingGenerator.py b/scripts/generators/CollP2PMatchingGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..ed1f44fd6e3ac94c2792cfaf1fe36e192b70532a
--- /dev/null
+++ b/scripts/generators/CollP2PMatchingGenerator.py
@@ -0,0 +1,133 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+   Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: Lacking
+  COLL!basic: @{collfeature}@
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int dest, src;
+  int root = 0;
+  int stag = 0, rtag = 0;
+  int buff_size = 1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  int dbs = sizeof(int)*nprocs; /* Size of the dynamic buffers for alltoall and friends */
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+  MPI_Op op = MPI_SUM;
+
+  @{init1}@
+  @{init2}@
+  @{init3}@
+  if (rank == 0) {
+    dest=1;src=1;
+    @{operation3}@ /* MBIERROR1 */
+    @{fini3}@
+    @{operation1}@
+    @{fini1}@
+  }else if (rank==1) {
+    dest=0;src=0;
+    @{operation2}@ /* MBIERROR2 */
+    @{fini2}@
+    @{operation3}@
+    @{fini3}@
+  }
+
+  @{free1}@
+  @{free2}@
+  @{free3}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for s in gen.send + gen.isend:
+    for r in gen.recv + gen.irecv:
+        for c in gen.coll:
+            patterns = {}
+            patterns = {'s': s, 'r': r, 'c': c}
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+            patterns['p2pfeature'] = 'Yes' if s in gen.send or r in gen.recv else 'Lacking'
+            patterns['ip2pfeature'] = 'Yes' if s in gen.isend or r in gen.irecv else 'Lacking'
+            patterns['collfeature'] = 'Yes' if c in gen.coll else 'Lacking'
+            patterns['s'] = s
+            patterns['r'] = r
+            patterns['c'] = c
+            patterns['init1'] = gen.init[s]("1")
+            patterns['init2'] = gen.init[r]("2")
+            patterns['init3'] = gen.init[c]("3")
+            patterns['fini1'] = gen.fini[s]("1")
+            patterns['fini2'] = gen.fini[r]("2")
+            patterns['fini3'] = gen.fini[c]("3")
+            patterns['free1'] = gen.free[s]("1")
+            patterns['free2'] = gen.free[r]("2")
+            patterns['free3'] = gen.free[c]("3")
+            patterns['operation1'] = gen.operation[s]("1")
+            patterns['operation2'] = gen.operation[r]("2")
+            patterns['operation3'] = gen.operation[c]("3")
+
+            # Generate the incorrect matching because of the conditional
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Point to point & collective mismatch'
+            replace['longdesc'] = 'The @{s}@ corresponding to the @{r}@ is after @{c}@, while @{r}@ is before @{c}@. This is a deadlock.'
+            replace['outcome'] = 'ERROR: CallMatching'
+            replace['errormsg'] = 'P2P & Collective mistmatch. @{r}@ at @{filename}@:@{line:MBIERROR2}@ is matched with @{c}@ at @{filename}@:@{line:MBIERROR1}@ wich causes a deadlock.'
+            gen.make_file(template, f'CallOrdering_{r}_{s}_{c}_nok.c', replace)
+
+            # Generate the incorrect code depending on buffering
+            #  replace = patterns.copy()
+            #  replace['shortdesc'] = 'Point to point & collective mismatch'
+            #  replace['longdesc'] = 'Point to point @{s}@ is matched with @{c}@ which causes a deadlock depending on the buffering mode.'
+            #  replace['outcome'] = 'ERROR: BufferingHazard'
+            #  replace['errormsg'] = 'P2P & Collective mistmatch. @{s}@ at @{filename}@:@{line:MBIERROR2}@ is matched with @{c}@ at @{filename}@:@{line:MBIERROR1}@ wich causes a deadlock.'
+            #  replace['init1'] = gen.init[s]("1")
+            #  replace['init2'] = gen.init[r]("2")
+            #  replace['operation1'] = gen.operation[r]("2")
+            #  replace['operation2'] = gen.operation[s]("1")
+            #  replace['fini1'] = gen.fini[r]("2")
+            #  replace['fini2'] = gen.fini[s]("1")
+            #  gen.make_file(template, f'CollP2PBuffering_{r}_{s}_{c}_nok.c', replace)
diff --git a/scripts/generators/CollP2PMessageRaceGenerator.py b/scripts/generators/CollP2PMessageRaceGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..ff23dfab8f0f35e3224ab0adbdfbbe8e3eb3e5db
--- /dev/null
+++ b/scripts/generators/CollP2PMessageRaceGenerator.py
@@ -0,0 +1,152 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: Lacking
+  COLL!basic: @{collfeature}@
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 4 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int dest, src;
+  int i=0;
+  int root = 0;
+  int stag = 0, rtag = 0;
+  int buff_size = 1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs != 4)
+    printf("MBI ERROR: This test needs 4 processes to produce a bug!\\n");
+
+  int dbs = sizeof(int)*nprocs; /* Size of the dynamic buffers for alltoall and friends */
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+  MPI_Op op = MPI_SUM;
+
+
+  @{init1}@
+  @{init2}@
+  @{init3}@
+  @{init4}@
+  if (rank == 0) {
+    dest=1;
+    @{operation1}@
+    @{fini1}@
+    @{operation2}@
+    @{fini2}@
+  }else if (rank==2) {
+    dest=1;
+    @{operation1}@
+    @{fini1}@
+    @{operation2}@
+    @{fini2}@
+  }else if (rank==1) {
+    src = MPI_ANY_SOURCE;
+    rtag = MPI_ANY_TAG;
+    @{operation3}@ /* MBIERROR1 */
+    @{operation1}@
+    @{fini1}@
+    @{changesrc}@
+    @{operation4}@ /* MBIERROR2 */
+    @{fini3}@
+    @{fini4}@
+  }else if (rank==3) {
+    @{operation1}@
+    @{fini1}@
+  }
+
+  @{free1}@
+  @{free2}@
+  @{free3}@
+  @{free4}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for s in gen.send + gen.isend:
+    for r in gen.irecv:
+        for c in gen.coll:
+            patterns = {}
+            patterns = {'s': s, 'r': r, 'c': c}
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+            patterns['p2pfeature'] = 'Yes' if s in gen.send or r in gen.recv else 'Lacking'
+            patterns['ip2pfeature'] = 'Yes' if s in gen.isend or r in gen.irecv else 'Lacking'
+            patterns['collfeature'] = 'Yes' if c in gen.coll else 'Lacking'
+            patterns['s'] = s
+            patterns['r'] = r
+            patterns['c'] = c
+            patterns['init1'] = gen.init[c]("1")
+            patterns['init2'] = gen.init[s]("2")
+            patterns['init3'] = gen.init[r]("3")
+            patterns['init4'] = gen.init[r]("4")
+            patterns['fini1'] = gen.fini[c]("1")
+            patterns['fini2'] = gen.fini[s]("2")
+            patterns['fini3'] = gen.fini[r]("3")
+            patterns['fini4'] = gen.fini[r]("4")
+            patterns['free1'] = gen.free[c]("1")
+            patterns['free2'] = gen.free[s]("2")
+            patterns['free3'] = gen.free[r]("3")
+            patterns['free4'] = gen.free[r]("4")
+            patterns['operation1'] = gen.operation[c]("1")
+            patterns['operation2'] = gen.operation[s]("2")
+            patterns['operation3'] = gen.operation[r]("3")
+            patterns['operation4'] = gen.operation[r]("4")
+            patterns['changesrc'] = ''
+
+            # Generate the correct matching because of the conditional
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Message race'
+            replace['longdesc'] = 'Message race without problem in @{r}@ with @{c}@.'
+            replace['outcome'] = 'OK'
+            replace['errormsg'] = 'OK'
+            replace['changesrc'] = ''
+            gen.make_file(template, f'MessageRace_{c}_{s}_{r}_ok.c', replace)
+
+            # Generate the incorrect matching because of the conditional
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Message race'
+            replace['longdesc'] = 'Message race in @{r}@ with @{c}@.'
+            replace['outcome'] = 'ERROR: MessageRace'
+            replace['errormsg'] = 'Message race. The use of wildcard receive calls (@{r}@ at @{filename}@:@{line:MBIERROR1}@ and @{r}@ at @{filename}@:@{line:MBIERROR2}@) leads to nondeterministic matching.'
+            replace['changesrc'] = 'src = 0;'
+            gen.make_file(template, f'MessageRace_{c}_{s}_{r}_nok.c', replace)
diff --git a/scripts/generators/CollTopoGenerator.py b/scripts/generators/CollTopoGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..7a137bd2aaef951ca63b1398a6530419db8d7293
--- /dev/null
+++ b/scripts/generators/CollTopoGenerator.py
@@ -0,0 +1,126 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+   Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: @{toolfeature}@
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define buff_size 128
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+   MPI_Comm newcom;
+   int dims[2], periods[2], coords[2];
+   int source, dest;
+   dims[0] = 2;
+   dims[1] = 1;
+   periods[0] = 1;
+   periods[1] = 1;
+
+   @{change_dims}@
+
+   MPI_Cart_create(MPI_COMM_WORLD, 2, dims, periods, 0, &newcom); /* create a cartesian communicator */
+
+   @{change_com}@
+
+   @{init}@
+   @{operation}@ /* MBIERROR2 */
+   @{fini}@
+
+   if (newcom != MPI_COMM_NULL)
+     MPI_Comm_free(&newcom);
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+for c in gen.tcoll4topo:
+    patterns = {}
+    patterns = {'c': c}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+    patterns['toolfeature'] = 'Yes' if c in gen.tcoll4topo else 'Lacking'
+    patterns['c'] = c
+    patterns['init'] = gen.init[c]("1")
+    patterns['fini'] = gen.fini[c]("1")
+    patterns['operation'] = gen.operation[c]("1")
+    patterns['change_dims'] = '/* No error injected here */'
+
+    # Generate the correct code
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Function @{c}@ with correct arguments'
+    replace['longdesc'] = f'All ranks in comm call {c} with correct arguments'
+    replace['outcome'] = 'OK'
+    replace['errormsg'] = ''
+    replace['change_com'] = '/* No error injected here */'
+    replace['change_dims'] = '/* No error injected here */'
+    gen.make_file(template, f'InvalidParam_{c}_ok.c', replace)
+
+    # Generate the incorrect code
+    replace = patterns.copy()
+    replace['shortdesc'] = 'The code tries to get cartesian information of MPI_COMM_WORLD.'
+    replace['longdesc'] = 'The code creates a cartesian communicator, and tries to get cartesian information of MPI_COMM_WORLD.'
+    replace['outcome'] = 'ERROR: InvalidCommunicator'
+    replace['errormsg'] = 'Invalid Communicator in a collective. @{c}@ at @{filename}@:@{line:MBIERROR2}@ tries to get cartesian information of MPI_COMM_WORLD.'
+    replace['change_com'] = 'newcom = MPI_COMM_WORLD; /* MBIERROR1 */'
+    gen.make_file(template, f'InvalidParam_Com_{c}_nok.c', replace)
+
+    # Generate the code with newcom=MPI_COMM_NULL
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Function @{c}@ called with comm=MPI_COMM_NULL'
+    replace['longdesc'] = 'Function @{c}@ called with comm=MPI_COMM_NULL'
+    replace['outcome'] = 'ERROR: InvalidCommunicator'
+    replace['errormsg'] = 'Invalid communicator. @{c}@ at @{filename}@:@{line:MBIERROR2}@ has MPI_COMM_NULL as a communicator.'
+    replace['change_com'] = 'newcom = MPI_COMM_NULL; /* MBIERROR1 */'
+    gen.make_file(template, f'InvalidParam_ComNull_{c}_nok.c', replace)
+
+    # Generate the code with invalid dimension
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Creates a cartesian communicator with a negative entry in the dims attribute'
+    replace['longdesc'] = 'Creates a cartesian communicator with a negative entry in the dims attribute, which is a usage error'
+    replace['outcome'] = 'ERROR: InvalidOtherArg'
+    replace['errormsg'] = 'Invalid Argument. MPI_Cart_create has invalid dimensions.'
+    replace['change_com'] = ""
+    replace['change_dims'] = 'dims[0] = -2; dims[1] = -1; /* MBIERROR1 */'
+    gen.make_file(template, 'InvalidParam_Dim_MPI_Cart_create_nok.c', replace)
diff --git a/scripts/generators/InputHazardGenerator.py b/scripts/generators/InputHazardGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..899e810f969afdd72ed1d4cf57825fb6aa46853b
--- /dev/null
+++ b/scripts/generators/InputHazardGenerator.py
@@ -0,0 +1,188 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: Lacking
+  COLL!basic: @{collfeature}@
+  COLL!nonblocking: @{icollfeature}@
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE} 1
+  | @{outcome}@
+  | @{errormsg}@
+  $ mpirun -np 2 ${EXE} 2
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 10
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  MPI_Status sta;
+  int i=0;
+  int root = 0;
+  int stag=0;
+  int rtag=0;
+  int buff_size = N;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  if (argc < 2)
+    printf("MBI ERROR: This test needs at least 1 argument to produce a bug!\\n");
+
+  int dbs = sizeof(int)*nprocs; /* Size of the dynamic buffers for alltoall and friends */
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+  MPI_Op op = MPI_SUM;
+
+  int n = atoi(argv[1]);
+  int buffer[N] = {42};
+
+  @{init1}@
+  @{init2}@
+
+  if (rank == 0) {
+    if ((n % 2) == 0) { @{errorcond}@
+      @{operation1b}@
+      @{fini1b}@
+    } else {
+      @{operation1a}@
+      @{fini1a}@
+    }
+  } else @{addcond}@ {
+    @{operation2}@
+    @{fini2}@
+  }
+
+  @{free1}@
+  @{free2}@
+
+  MPI_Finalize();
+
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+# P2P
+for s in gen.send + gen.isend:
+    for r in gen.recv + gen.irecv:
+        patterns = {}
+        patterns = {'s': s, 'r': r}
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['p2pfeature'] = 'Yes' if s in gen.send or r in gen.recv else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if s in gen.isend or r in gen.irecv else 'Lacking'
+        patterns['collfeature'] = 'Lacking'
+        patterns['icollfeature'] = 'Lacking'
+        patterns['s'] = s
+        patterns['r'] = r
+
+        patterns['init1'] = gen.init[s]("1")
+        patterns['operation1a'] = gen.operation[s]("1").replace("buf1", "buffer").replace("dest", "1")
+        patterns['operation1b'] = gen.operation[s]("1").replace("buf1", "buffer").replace("dest", "1")
+        patterns['fini1a'] = gen.fini[s]("1")
+        patterns['fini1b'] = gen.fini[s]("1")
+        patterns['free1'] = gen.free[s]("1")
+
+        patterns['init2'] = gen.init[r]("2")
+        patterns['operation2'] = gen.operation[r]("2").replace("buf2", "buffer").replace("src", "0")
+        patterns['fini2'] = gen.fini[r]("2")
+        patterns['free2'] = gen.free[r]("2")
+
+        patterns['errorcond'] = ''
+        patterns['addcond'] = 'if (rank == 1)'
+
+        # Generate a correct matching
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Correct call ordering.'
+        replace['longdesc'] = 'Correct call ordering.'
+        replace['outcome'] = 'OK'
+        replace['errormsg'] = 'OK'
+        gen.make_file(template, f'InputHazardCallOrdering_{r}_{s}_ok.c', replace)
+
+        # Generate the incorrect matching
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Missing Send function.'
+        replace['longdesc'] = 'Missing Send function call for a path depending to input, a deadlock is created.'
+        replace['outcome'] = 'ERROR: IHCallMatching'
+        replace['errormsg'] = 'P2P mistmatch. Missing @{r}@ at @{filename}@:@{line:MBIERROR}@.'
+        replace['errorcond'] = '/* MBIERROR */'
+        replace['operation1b'] = ''
+        replace['fini1b'] = ''
+        gen.make_file(template, f'InputHazardCallOrdering_{r}_{s}_nok.c', replace)
+
+# COLLECTIVE
+for c in gen.coll:
+    patterns = {}
+    patterns = {'c': c}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+    patterns['p2pfeature'] = 'Lacking'
+    patterns['ip2pfeature'] = 'Lacking'
+    patterns['collfeature'] = 'Yes' if c in gen.coll else 'Lacking'
+    patterns['icollfeature'] = 'Yes' if c in gen.icoll else 'Lacking'
+    patterns['c'] = c
+
+    patterns['init1'] = gen.init[c]("1")
+    patterns['operation1a'] = gen.operation[c]("1")
+    patterns['operation1b'] = gen.operation[c]("1")
+    patterns['fini1a'] = gen.fini[c]("1")
+    patterns['fini1b'] = gen.fini[c]("1")
+    patterns['free1'] = gen.free[c]("1")
+
+    patterns['init2'] = gen.init[c]("2")
+    patterns['operation2'] = gen.operation[c]("2")
+    patterns['fini2'] = gen.fini[c]("2")
+    patterns['free2'] = gen.free[c]("2")
+
+    patterns['errorcond'] = ''
+    patterns['addcond'] = ''
+
+    # Generate a correct matching
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Correct call ordering.'
+    replace['longdesc'] = 'Correct call ordering.'
+    replace['outcome'] = 'OK'
+    replace['errormsg'] = 'OK'
+    gen.make_file(template, f'InputHazardCallOrdering_{c}_ok.c', replace)
+
+    # Generate the incorrect matching
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Missing collective function call.'
+    replace['longdesc'] = 'Missing collective function call for a path depending to input, a deadlock is created.'
+    replace['outcome'] = 'ERROR: IHCallMatching'
+    replace['errormsg'] = 'P2P mistmatch. Missing @{c}@ at @{filename}@:@{line:MBIERROR}@.'
+    replace['errorcond'] = '/* MBIERROR */'
+    replace['operation1b'] = ''
+    replace['fini1b'] = ''
+    gen.make_file(template, f'InputHazardCallOrdering_{c}_nok.c', replace)
diff --git a/scripts/generators/MissingWaitandStartGenerator.py b/scripts/generators/MissingWaitandStartGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..2a12cab8992fe7816b12acca23a41eed67d0369b
--- /dev/null
+++ b/scripts/generators/MissingWaitandStartGenerator.py
@@ -0,0 +1,201 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+   Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: @{persfeature}@
+  COLL!basic: Lacking
+  COLL!nonblocking: @{icollfeature}@
+  COLL!persistent: @{cpersfeature}@
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int root = 0;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+  MPI_Op op = MPI_SUM;
+  int stag = 0, rtag = 0;
+  int buff_size = 1;
+
+  int dbs = sizeof(int)*nprocs; /* Size of the dynamic buffers for alltoall and friends */
+
+  int dest = (rank == nprocs - 1) ? (0) : (rank + 1);
+  int src = (rank == 0) ? (nprocs - 1) : (rank - 1);
+
+  @{init1}@
+  @{init2}@
+
+  @{operation1}@ /* MBIERROR */
+  @{start1}@
+  @{operation2}@
+  @{start2}@
+
+  @{fini1}@
+  @{fini2}@
+
+  @{free1}@
+  @{free2}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for s in gen.isend + gen.psend:
+    for r in gen.irecv + gen.precv:
+        patterns = {}
+        patterns = {'s': s, 'r': r}
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['persfeature'] = 'Yes' if s in gen.psend or r in gen.precv  else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if s in gen.isend or r in gen.irecv  else 'Lacking'
+        patterns['icollfeature'] = 'Lacking'
+        patterns['cpersfeature'] = 'Lacking'
+        patterns['s'] = s
+        patterns['r'] = r
+        patterns['init1'] = gen.init[s]("1")
+        patterns['init2'] = gen.init[r]("2")
+        patterns['start1'] = gen.start[s]("1")
+        startPers = patterns['start1']
+        patterns['start2'] = gen.start[r]("2")
+        patterns['operation1'] = gen.operation[s]("1")
+        patterns['operation2'] = gen.operation[r]("2")
+        patterns['fini1'] = gen.fini[s]("1")
+        wait = patterns['fini1']
+        patterns['fini2'] = gen.fini[r]("2")
+        patterns['free1'] = gen.free[s]("1")
+        Reqfree = patterns['free1']
+        patterns['free2'] = gen.free[r]("2")
+
+        # Generate the correct code
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Correct matching'
+        replace['longdesc'] = 'No error'
+        replace['outcome'] = 'OK'
+        replace['errormsg'] = 'OK'
+        gen.make_file(template, f'ReqLifecycle_{s}_{r}_ok.c', replace)
+
+        # Generate the code with a missing wait
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Missing wait'
+        replace['longdesc'] = 'Missing Wait. @{s}@ at @{filename}@:@{line:MBIERROR}@ has no completion.'
+        replace['outcome'] = 'ERROR: MissingWait'
+        replace['errormsg'] = 'ERROR: MissingWait'
+        replace['fini1'] = ' /* MBIERROR MISSING: ' + wait + ' */'
+        gen.make_file(template, f'ReqLifecycle_MissingWait_{s}_{r}_nok.c', replace)
+
+        if s in gen.psend:
+            # Generate the code with a missing start - persistent only
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Missing start'
+            replace['longdesc'] = 'Missing start. @{s}@ at @{filename}@:@{line:MBIERROR}@ has no start'
+            replace['outcome'] = 'ERROR: MissingStart'
+            replace['errormsg'] = 'ERROR: MissingStart'
+            replace['fini1'] = gen.fini[s]("1")
+            replace['start1'] = ' /* MBIERROR MISSING: ' + startPers + ' */'
+            gen.make_file(template, f'ReqLifecycle_MissingStart_{s}_{r}_nok.c', replace)
+            # Generate the code with a missing free - persistent only
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Missing free'
+            replace['longdesc'] = 'Missing free. @{s}@ at @{filename}@:@{line:MBIERROR}@ has no free'
+            replace['outcome'] = 'ERROR: RequestLeak'
+            replace['errormsg'] = 'ERROR: RequestLeak'
+            replace['start1'] = gen.start[s]("1")
+            replace['free1'] = ' /* MBIERROR MISSING: ' + Reqfree + ' */'
+            gen.make_file(template, f'ResLeak_nofree_{s}_{r}_nok.c', replace)
+
+
+# Collectives only
+for c in gen.pcoll + gen.icoll + gen.ibarrier:
+    patterns = {}
+    patterns = {'c': c}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+    patterns['persfeature'] = 'Lacking'
+    patterns['ip2pfeature'] = 'Lacking'
+    patterns['cpersfeature'] = 'Yes' if c in gen.pcoll else 'Lacking'
+    patterns['icollfeature'] = 'Yes' if c in gen.icoll + gen.ibarrier else 'Lacking'
+    patterns['c'] = c
+    patterns['init1'] = gen.init[c]("1")
+    patterns['operation1'] = gen.operation[c]("1")
+    patterns['start1'] = gen.start[c]("1")
+    patterns['fini1'] = gen.fini[c]("1")
+    patterns['free1'] = gen.free[c]("1")
+    opstart = patterns['start1']
+    opwait = patterns['fini1']
+    opfree = patterns['free1']
+    patterns['init2'] = ""
+    patterns['operation2'] = ""
+    patterns['start2'] = ""
+    patterns['fini2'] = ""
+    patterns['free2'] = ""
+
+    # Generate the code with a missing wait
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Missing wait'
+    replace['longdesc'] = 'Missing Wait. @{c}@ at @{filename}@:@{line:MBIERROR}@ has no completion'
+    replace['outcome'] = 'ERROR: MissingWait'
+    replace['errormsg'] = 'ERROR: MissingWait'
+    replace['fini1'] = ' /* MBIERROR MISSING: ' + opwait + ' */'
+    replace['free1'] = ' /* MISSING: ' + replace['free1'] + ' (to not free the buffer before an internal wait */'
+    gen.make_file(template, f'ReqLifecycle_MissingWait_{c}_nok.c', replace)
+
+    if c in gen.pcoll:
+        # Generate the code with a missing start - persistent only
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Missing start functio'
+        replace['longdesc'] = 'Missing Start. @{c}@ at @{filename}@:@{line:MBIERROR}@ has no start'
+        replace['outcome'] = 'ERROR: MissingStart'
+        replace['errormsg'] = 'ERROR: MissingStart'
+        replace['fini1'] = gen.fini[c]("1")
+        replace['start1'] = ' /* MBIERROR MISSING: ' + opstart + ' */'
+        gen.make_file(template, f'ReqLifecycle_MissingStart_{c}_nok.c', replace)
+
+        # Generate the code with a resleak (no free) - persistent only
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Missing free'
+        replace['longdesc'] = 'Missing free. @{c}@ at @{filename}@:@{line:MBIERROR}@ has no free'
+        replace['outcome'] = 'ERROR: RequestLeak'
+        replace['errormsg'] = 'ERROR: RequestLeak'
+        replace['start1'] = gen.start[c]("1")
+        replace['free1'] = ' /* MBIERROR MISSING: ' + opfree + ' */'
+        gen.make_file(template, f'ResLeak_nofree_{c}_nok.c', replace)
diff --git a/scripts/generators/P2PArgGenerator.py b/scripts/generators/P2PArgGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..ae0f2d0783bae3fb2114671c6f057beefb80c1f3
--- /dev/null
+++ b/scripts/generators/P2PArgGenerator.py
@@ -0,0 +1,182 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+   Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: @{persfeature}@
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int src=0, dest=1;
+  int stag=0, rtag=0;
+  int buff_size = 1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+
+  @{change_arg}@
+
+  @{init1}@
+  @{init2}@
+  if (rank == 0) {
+    @{operation1}@ /* MBIERROR1 */
+    @{start1}@
+    @{fini1}@
+  }else if (rank == 1) {
+    @{operation2}@ /* MBIERROR2 */
+    @{start2}@
+    @{fini2}@
+  }
+  @{free1}@
+  @{free2}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+##################################
+# Generate code with type mismatch
+##################################
+
+for p1 in gen.allsend:
+    for p2 in gen.allrecv:
+        patterns = {}
+        patterns = {'p1': p1, 'p2': p2}
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['p2pfeature'] = 'Yes' if p1 in gen.send + gen.ssend + gen.bsend or p2 in gen.recv  else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if p1 in gen.isend or p2 in gen.irecv  else 'Lacking'
+        patterns['persfeature'] = 'Yes' if p1 in gen.psend or p2 in gen.precv  else 'Lacking'
+        patterns['p1'] = p1
+        patterns['p2'] = p2
+        patterns['init1'] = gen.init[p1]("1")
+        patterns['init2'] = gen.init[p2]("2")
+        patterns['start1'] = gen.start[p1]("1")
+        patterns['start2'] = gen.start[p2]("2")
+        patterns['fini1'] = gen.fini[p1]("1")
+        patterns['fini2'] = gen.fini[p2]("2")
+        patterns['operation1'] = gen.operation[p1]("1") #send
+        patterns['operation2'] = gen.operation[p2]("2") #recv
+        patterns['free1'] = gen.free[p1]("1")
+        patterns['free2'] = gen.free[p2]("2")
+
+        # Generate the incorrect matching
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Point to point @{p1}@ and @{p2}@ have a datatype mismatch'
+        replace['longdesc'] = 'Process 0 uses MPI_FLOAT as the datatype while process 1 uses MPI_INT.'
+        replace['outcome'] = 'ERROR: DatatypeMatching'
+        replace['errormsg'] = 'P2P Datatype mismatch. @{p1}@ at @{filename}@:@{line:MBIERROR1}@ and @{p2}@ at @{filename}@:@{line:MBIERROR2}@ have MPI_INT and MPI_FLOAT as a datatype'
+        replace['change_arg'] = 'if (rank == 0)\n    type = MPI_FLOAT; /* MBIERROR3 */'
+        gen.make_file(template, f'ParamMatching_Data_{p1}_{p2}_nok.c', replace)
+
+        # Generate code with a null type
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Use of invalid datatype in point-to-point communication'
+        replace['longdesc'] = 'Point to point @{p1}@ and @{p2}@ have MPI_DATATYPE_NULL as a type'
+        replace['outcome'] = 'ERROR: InvalidDatatype'
+        replace['errormsg'] = 'Invalid datatype in P2P. @{p1}@ at @{filename}@:@{line:MBIERROR1}@ and @{p2}@ at @{filename}@:@{line:MBIERROR2}@ have MPI_DATATYPE_NULL as a type'
+        replace['change_arg'] = 'type = MPI_DATATYPE_NULL; /* MBIERROR3 */'
+        gen.make_file(template, f'InvalidParam_DatatypeNull_{p1}_{p2}_nok.c', replace)
+
+        # Generate code with an invalid datatype
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Use of invalid datatype in point-to-point communication'
+        replace['longdesc'] = 'Point to point @{p1}@ and @{p2}@ have an invalid datatype'
+        replace['outcome'] = 'ERROR: InvalidDatatype'
+        replace['errormsg'] = 'Invalid datatype in P2P. @{p1}@ at @{filename}@:@{line:MBIERROR1}@ and @{p2}@ at @{filename}@:@{line:MBIERROR2}@ have an invalid datatype'
+        replace['change_arg'] = 'MPI_Type_contiguous (2, MPI_INT, &type); MPI_Type_commit(&type);MPI_Type_free(&type); /* MBIERROR3 */'
+        gen.make_file(template, f'InvalidParam_Datatype_{p1}_{p2}_nok.c', replace)
+
+#################################
+# Generate code with tag mismatch
+#################################
+
+for p1 in gen.allsend:
+    for p2 in gen.allrecv:
+        patterns = {}
+        patterns = {'p1': p1, 'p2': p2}
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['p2pfeature'] = 'Yes' if p1 in gen.send + gen.ssend + gen.bsend or p2 in gen.recv  else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if p1 in gen.isend or p2 in gen.irecv  else 'Lacking'
+        patterns['persfeature'] = 'Yes' if p1 in gen.psend or p2 in gen.precv  else 'Lacking'
+        patterns['p1'] = p1
+        patterns['p2'] = p2
+        patterns['init1'] = gen.init[p1]("1")
+        patterns['init2'] = gen.init[p2]("2")
+        patterns['start1'] = gen.start[p1]("1")
+        patterns['start2'] = gen.start[p2]("2")
+        patterns['fini1'] = gen.fini[p1]("1")
+        patterns['fini2'] = gen.fini[p2]("2")
+        patterns['operation1'] = gen.operation[p1]("1") #send
+        patterns['operation2'] = gen.operation[p2]("2") #recv
+        patterns['free1'] = gen.free[p1]("1")
+        patterns['free2'] = gen.free[p2]("2")
+        patterns['change_arg'] = ""
+
+        # Generate the incorrect tag matching
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Point to point @{p1}@ and @{p2}@ have a tag mismatch'
+        replace['longdesc'] = 'Point to point @{p1}@ and @{p2}@ have a tag mismatch.'
+        replace['outcome'] = 'ERROR: TagMatching'
+        replace['errormsg'] = 'P2P tag mismatch. @{p1}@ at @{filename}@:@{line:MBIERROR1}@ and @{p2}@ at @{filename}@:@{line:MBIERROR2}@ use different tag.'
+        replace['change_arg'] = 'stag=0; rtag=1;/* MBIERROR */'
+        gen.make_file(template, f'ParamMatching_Tag_{p1}_{p2}_nok.c', replace)
+
+        # Generate the code with an invalid tag
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Point to point @{p1}@ and @{p2}@ have an invalid tag'
+        replace['longdesc'] = 'Point to point @{p1}@ and @{p2}@ have an invalid tag.'
+        replace['outcome'] = 'ERROR: InvalidTag'
+        replace['errormsg'] = 'Invalid Tag. @{p1}@ at @{filename}@:@{line:MBIERROR1}@ and @{p2}@ at @{filename}@:@{line:MBIERROR2}@ use an invalid tag.'
+        replace['change_arg'] = 'stag=-1; rtag=-2;/* MBIERROR */'
+        gen.make_file(template, f'InvalidParam_Tag_{p1}_{p2}_nok.c', replace)
+
+        # Generate a correct code using MPI_ANY_TAG
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Correct code'
+        replace['longdesc'] = 'Correct code'
+        replace['outcome'] = 'OK'
+        replace['errormsg'] = 'OK'
+        replace['change_arg'] = 'rtag=MPI_ANY_TAG;'
+        gen.make_file(template, f'ParamMatching_Tag_{p1}_{p2}_ok.c', replace)
diff --git a/scripts/generators/P2PBufferingGenerator.py b/scripts/generators/P2PBufferingGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..05253b87380cb19bc9ec3a39ccd08116d2c643ef
--- /dev/null
+++ b/scripts/generators/P2PBufferingGenerator.py
@@ -0,0 +1,188 @@
+#! /usr/bin/python3
+
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: @{origin}@
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 4 $zero_buffer ${EXE}
+  | @{outcome1}@
+  | @{errormsg1}@
+  $ mpirun -np 4 $infty_buffer ${EXE}
+  | @{outcome1}@
+  | @{errormsg1}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int dest, src;
+  int stag = 0, rtag = 0;
+  int buff_size = 1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 4)
+    printf("MBI ERROR: This test needs at least 4 processes to produce a bug!\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+
+  @{init1}@
+  @{init2}@
+  if (rank == 0) {
+    src=@{src1}@,dest=@{dest1}@;
+    @{operation1a}@ /* MBIERROR1 */
+    @{fini1a}@
+    @{operation2a}@
+    @{fini2a}@
+  }else if (rank == 1) {
+    src=@{src2}@,dest=@{dest2}@;
+    @{operation1b}@ /* MBIERROR2 */
+    @{fini1b}@
+    @{operation2b}@
+    @{fini2b}@
+  }else{
+    src=@{src3}@,dest=@{dest3}@;
+    @{operation1c}@
+    @{fini1c}@
+    @{operation2c}@
+    @{fini2c}@
+  }
+  @{free1}@
+  @{free2}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+for s in gen.send + gen.isend:
+    for r in gen.recv + gen.irecv:
+        patterns = {}
+        patterns = {'s': s, 'r': r}
+        patterns['origin'] = 'MBI'
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['p2pfeature'] = 'Yes' if s in gen.send or r in gen.recv  else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if s in gen.isend or r in gen.irecv  else 'Lacking'
+        patterns['s'] = s
+        patterns['r'] = r
+        patterns['src1'] = '1'
+        patterns['dest1'] = '1'
+        patterns['src2'] = '0'
+        patterns['dest2'] = '0'
+        patterns['src3'] = '0'
+        patterns['dest3'] = '0'
+        patterns['init1'] = gen.init[s]("1")
+        patterns['init2'] = gen.init[r]("2")
+        patterns['fini1a'] = gen.fini[s]("1")
+        patterns['fini1b'] = gen.fini[s]("1")
+        patterns['fini1c'] = ''
+        patterns['fini2a'] = gen.fini[r]("2")
+        patterns['fini2b'] = gen.fini[r]("2")
+        patterns['fini2c'] = ''
+        patterns['free1'] = gen.free[s]("1")
+        patterns['free2'] = gen.free[r]("2")
+        patterns['operation1a'] = gen.operation[s]("1")
+        patterns['operation2a'] = gen.operation[r]("2")
+        patterns['operation1b'] = gen.operation[s]("1")
+        patterns['operation2b'] = gen.operation[r]("2")
+        patterns['operation1c'] = ''
+        patterns['operation2c'] = ''
+
+        # Generate the incorrect matching depending on the buffering mode (send + recv)
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Point to point @{s}@ and @{r}@ may not be matched'
+        replace['longdesc'] = 'Processes 0 and 1 both call @{s}@ and @{r}@. This results in a deadlock depending on the buffering mode'
+        replace['outcome1'] = 'ERROR: BufferingHazard'
+        replace['errormsg1'] = f'Buffering Hazard. Possible deadlock depending the buffer size of MPI implementation and system environment cause by two processes call {s} before {r}.'
+        gen.make_file(template, f'P2PBuffering_{s}_{r}_{s}_{r}_nok.c', replace)
+
+        # Generate the incorrect matching with send message to the same process depending on the buffering mode (send + recv)
+        replace = patterns.copy().copy()
+        replace['origin'] = 'RTED'
+        replace['src1'] = '0'
+        replace['dest1'] = '0'
+        replace['src2'] = '1'
+        replace['dest2'] = '1'
+        replace['shortdesc'] = 'Point to point @{s}@ and @{r}@ may not be matched'
+        replace['longdesc'] = 'Processes 0 and 1 both call @{s}@ and @{r}@. This results in a deadlock depending on the buffering mode'
+        replace['outcome1'] = 'ERROR: BufferingHazard'
+        replace['errormsg1'] = f'Buffering Hazard. Possible deadlock depending the buffer size of MPI implementation and system environment cause Send message to the same process.'
+        gen.make_file(template, f'P2PBuffering_SameProcess_{s}_{r}_nok.c', replace)
+
+        # Generate the incorrect matching with circular send message depending on the buffering mode (send + recv)
+        replace = patterns.copy().copy()
+        replace['origin'] = 'RTED'
+        replace['src1'] = '(nprocs - 1)'
+        replace['dest1'] = '1'
+        replace['src2'] = '0'
+        replace['dest2'] = '2'
+        replace['src3'] = '(rank - 1)'
+        replace['dest3'] = '((rank + 1) % nprocs)'
+        replace['fini1c'] = gen.fini[s]("1")
+        replace['fini2c'] = gen.fini[r]("2")
+        replace['operation1c'] = gen.operation[s]("1") + ' /* MBIERROR3 */'
+        replace['operation2c'] = gen.operation[r]("2")
+        replace['shortdesc'] = 'Point to point @{s}@ and @{r}@ may not be matched'
+        replace['longdesc'] = 'Processes 0 and 1 both call @{s}@ and @{r}@. This results in a deadlock depending on the buffering mode'
+        replace['outcome1'] = 'ERROR: BufferingHazard'
+        replace['errormsg1'] = f'Buffering Hazard. Possible deadlock depending the buffer size of MPI implementation and system environment cause circular send message.'
+        gen.make_file(template, f'P2PBuffering_Circular_{s}_{r}_nok.c', replace)
+
+        # Generate the incorrect matching depending on the buffering mode (recv + send)
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Point to point @{s}@ and @{r}@ are not matched'
+        replace['longdesc'] = 'Processes 0 and 1 both call @{r}@ and @{s}@. This results in a deadlock'
+        replace['outcome1'] = 'ERROR: CallMatching'
+        replace['errormsg1'] = 'ERROR: CallMatching'
+        replace['operation1a'] = gen.operation[r]("2")
+        replace['fini1a'] = gen.fini[r]("2")
+        replace['operation2a'] = gen.operation[s]("1")
+        replace['fini2a'] = gen.fini[s]("1")
+        replace['operation1b'] = gen.operation[r]("2")
+        replace['fini1b'] = gen.fini[r]("2")
+        replace['operation2b'] = gen.operation[s]("1")
+        replace['fini2b'] = gen.fini[s]("1")
+        gen.make_file(template, f'P2PCallMatching_{r}_{s}_{r}_{s}_nok.c', replace)
+
+        # Generate the correct matching
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Point to point @{s}@ and @{r}@ are correctly  matched'
+        replace['longdesc'] = 'Process 0 calls @{s}@ and process 1 calls @{r}@.'
+        replace['outcome1'] = 'OK'
+        replace['errormsg1'] = 'OK'
+        replace['fini1a'] = gen.fini[s]("1")
+        replace['fini2a'] = gen.fini[r]("2")
+        replace['operation1a'] = gen.operation[s]("1")
+        replace['operation2a'] = gen.operation[r]("2")
+        gen.make_file(template, f'P2PCallMatching_{s}_{r}_{r}_{s}_ok.c', replace)
diff --git a/scripts/generators/P2PComGenerator.py b/scripts/generators/P2PComGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..6382267aa9f917faf751f0dbb7b82cd70ba32d12
--- /dev/null
+++ b/scripts/generators/P2PComGenerator.py
@@ -0,0 +1,154 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: @{origin}@
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+   Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: @{persfeature}@
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Yes
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int src=0, dest=1;
+  int stag = 0, rtag = 0;
+  int buff_size = 1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Datatype type = MPI_INT;
+  MPI_Comm newcom;
+  MPI_Comm_split(MPI_COMM_WORLD, 0, nprocs - rank, &newcom);
+  @{change_com}@
+  @{change_srcdest}@
+
+  @{init1}@
+  @{init2}@
+  if (rank == 0) {
+    @{operation1}@ /* MBIERROR1 */
+    @{start1}@
+    @{fini1}@
+  }else if (rank == 1) {
+    @{operation2}@ /* MBIERROR2 */
+    @{start2}@
+    @{fini2}@
+  }
+  @{free1}@
+  @{free2}@
+
+  if(newcom != MPI_COMM_NULL && newcom != MPI_COMM_WORLD)
+    MPI_Comm_free(&newcom);
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for p1 in gen.send + gen.isend + gen.psend:
+    for p2 in gen.recv + gen.irecv + gen.precv:
+        patterns = {}
+        patterns = {'p1': p1, 'p2': p2}
+        patterns['origin'] = "MBI"
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['p2pfeature'] = 'Yes' if p1 in gen.send or p2 in gen.recv  else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if p1 in gen.isend or p2 in gen.irecv  else 'Lacking'
+        patterns['persfeature'] = 'Yes' if p1 in gen.psend or p2 in gen.precv  else 'Lacking'
+        patterns['p1'] = p1
+        patterns['p2'] = p2
+        patterns['init1'] = gen.init[p1]("1")
+        patterns['init2'] = gen.init[p2]("2")
+        patterns['start1'] = gen.start[p1]("1")
+        patterns['start2'] = gen.start[p2]("2")
+        patterns['fini1'] = gen.fini[p1]("1")
+        patterns['fini2'] = gen.fini[p2]("2")
+        patterns['operation1'] = gen.operation[p1]("1") #send
+        patterns['operation2'] = gen.operation[p2]("2") #recv
+        patterns['free1'] = gen.free[p1]("1")
+        patterns['free2'] = gen.free[p2]("2")
+        patterns['change_srcdest'] = ""
+        patterns['change_com'] = ""
+
+        # Generate the incorrect matching
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Point to point @{p1}@ and @{p2}@ have a communicator mismatch'
+        replace['longdesc'] = 'Process 1 uses newcom as the communicator while process 0 uses MPI_COMM_WORLD.'
+        replace['outcome'] = 'ERROR: CommunicatorMatching'
+        replace['errormsg'] = 'P2P Communicator mismatch. @{p1}@ at @{filename}@:@{line:MBIERROR1}@ and @{p2}@ at @{filename}@:@{line:MBIERROR2}@ have newcom or MPI_COMM_WORLD as a communicator.'
+        replace['change_com'] = 'if (rank==0)\n    newcom = MPI_COMM_WORLD; /* MBIERROR */'
+        gen.make_file(template, f'ParamMatching_Com_{p1}_{p2}_nok.c', replace)
+
+        # Generate the code with an invalid communicator
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Point to point @{p1}@ and @{p2}@ have an invalid communicator'
+        replace['longdesc'] = 'Point to point @{p1}@ and @{p2}@ have an invalid communicator.'
+        replace['outcome'] = 'ERROR: InvalidCommunicator'
+        replace['errormsg'] = 'Invalid Communicator. @{p1}@ at @{filename}@:@{line:MBIERROR1}@ and @{p2}@ at @{filename}@:@{line:MBIERROR2}@ use a communicator that is freed line @{line:MBIERROR}@.'
+        replace['change_com'] = 'MPI_Comm_free(&newcom);  /* MBIERROR */'
+        gen.make_file(template, f'InvalidParam_Com_{p1}_{p2}_nok.c', replace)
+
+        #  Generate the code with an invalid communicator ==> TO CHECK
+        #replace = patterns.copy()
+        #replace['shortdesc'] = 'Point to point @{p1}@ and @{p2}@ have an invalid communicator'
+       # replace['longdesc'] = 'Point to point @{p1}@ and @{p2}@ have an invalid communicator.'
+       # replace['outcome'] = 'ERROR: InvalidCommunicator'
+       # replace['errormsg'] = 'Invalid Communicator. @{p1}@ at @{filename}@:@{line:MBIERROR1}@ and @{p2}@ at @{filename}@:@{line:MBIERROR2}@ use different communicators'
+       # replace['origin'] = "MPI-Corrbench"
+       # replace['change_com'] = ""
+       # gen.make_file(template, f'InvalidParam_Com_{p1}_{p2}_nok.c', replace)
+
+        # Generate the code with an invalid dest
+        replace = patterns.copy()
+        replace['origin'] = "MBI"
+        replace['shortdesc'] = 'Point to point @{p1}@ has an invalid argument'
+        replace['longdesc'] = 'Point to point @{p1}@ and @{p2}@ have an invalid communicator.'
+        replace['outcome'] = 'ERROR: InvalidSrcDest'
+        replace['errormsg'] = 'InvalidSrcDest. @{p1}@ at @{filename}@:@{line:MBIERROR1}@ performs a send with a dest not in communicator (dest is changed line @{line:MBIERROR}@).'
+        replace['change_com'] = ""
+        replace['change_srcdest'] = 'dest=4; /* MBIERROR */'
+        gen.make_file(template, f'InvalidParam_Dest_{p1}_{p2}_nok.c', replace)
+
+        # Generate the code with an invalid src
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Point to point @{p2}@ has an invalid argument'
+        replace['longdesc'] = 'Point to point @{p1}@ and @{p2}@ have an invalid communicator.'
+        replace['outcome'] = 'ERROR: InvalidSrcDest'
+        replace['errormsg'] = 'InvalidSrcDest. @{p2}@ at @{filename}@:@{line:MBIERROR2}@ performs a recv with a negative integer as source (src is changed line @{line:MBIERROR}@).'
+        replace['change_srcdest'] = 'src=-1; /* MBIERROR */'
+        gen.make_file(template, f'InvalidParam_Src_{p1}_{p2}_nok.c', replace)
diff --git a/scripts/generators/P2PInvalidComGenerator.py b/scripts/generators/P2PInvalidComGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..2a8a515ab0fe3323ec3bb75d0fe2b8e6ce002644
--- /dev/null
+++ b/scripts/generators/P2PInvalidComGenerator.py
@@ -0,0 +1,120 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: @{origin}@
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+   Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: @{persfeature}@
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Yes
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int src=0, dest=1;
+  int stag = 0, rtag = 0;
+  int buff_size = 1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  MPI_Datatype type = MPI_INT;
+  MPI_Comm newcom = MPI_COMM_WORLD;
+
+  @{init1}@
+  @{init2}@
+  if (rank == 0) {
+    @{change_com1}@
+    @{operation1}@ /* MBIERROR1 */
+    @{start1}@
+    @{fini1}@
+  }else if (rank == 1) {
+    @{change_com2}@
+    @{operation2}@ /* MBIERROR2 */
+    @{start2}@
+    @{fini2}@
+  }
+  @{free1}@
+  @{free2}@
+
+  if(newcom != MPI_COMM_NULL && newcom != MPI_COMM_WORLD)
+    MPI_Comm_free(&newcom);
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for p1 in gen.send + gen.isend + gen.psend:
+    for p2 in gen.recv + gen.irecv + gen.precv:
+        patterns = {}
+        patterns = {'p1': p1, 'p2': p2}
+        patterns['origin'] = "MBI"
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['p2pfeature'] = 'Yes' if p1 in gen.send or p2 in gen.recv  else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if p1 in gen.isend or p2 in gen.irecv  else 'Lacking'
+        patterns['persfeature'] = 'Yes' if p1 in gen.psend or p2 in gen.precv  else 'Lacking'
+        patterns['p1'] = p1
+        patterns['p2'] = p2
+        patterns['init1'] = gen.init[p1]("1")
+        patterns['init2'] = gen.init[p2]("2")
+        patterns['start1'] = gen.start[p1]("1")
+        patterns['start2'] = gen.start[p2]("2")
+        patterns['fini1'] = gen.fini[p1]("1")
+        patterns['fini2'] = gen.fini[p2]("2")
+        patterns['operation1'] = gen.operation[p1]("1") #send
+        patterns['operation2'] = gen.operation[p2]("2") #recv
+        patterns['free1'] = gen.free[p1]("1")
+        patterns['free2'] = gen.free[p2]("2")
+        patterns['change_com1'] = ""
+        patterns['change_com2'] = ""
+
+        replace = patterns.copy()
+        replace['origin'] = "inspired from MPI-Corrbench"
+        replace['shortdesc'] = 'Point to point @{p2}@ has an invalid communicator'
+        replace['longdesc'] = 'MPI_COMM_NULL used in point to point @{p2}@'
+        replace['outcome'] = 'ERROR: InvalidCommunicator'
+        replace['errormsg'] = 'Invalid Communicator. @{p2}@ at @{filename}@:@{line:MBIERROR2}@ uses a null communicator.'
+        replace['change_com2'] = 'newcom = MPI_COMM_NULL;'
+        gen.make_file(template, f'InvalidParam_ComNull_{p2}_{p1}nok.c', replace)
+
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Point to point @{p2}@ has an invalid communicator'
+        replace['longdesc'] = 'MPI_COMM_NULL used in point to point @{p2}@'
+        replace['outcome'] = 'ERROR: InvalidCommunicator'
+        replace['errormsg'] = 'Invalid Communicator. @{p1}@ at @{filename}@:@{line:MBIERROR1}@ uses a null communicator.'
+        replace['change_com1'] = 'newcom = MPI_COMM_NULL;'
+        replace['change_com2'] = ""
+        gen.make_file(template, f'InvalidParam_ComNull_{p1}_{p2}nok.c', replace)
diff --git a/scripts/generators/P2PLocalConcurrencyGenerator.py b/scripts/generators/P2PLocalConcurrencyGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..072817b5a56bb946a81eda0076af6edd36041a98
--- /dev/null
+++ b/scripts/generators/P2PLocalConcurrencyGenerator.py
@@ -0,0 +1,127 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+   Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: @{persfeature}@
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int dest=0, src=0;
+  int stag = 0, rtag = 0;
+  int buff_size = 1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+
+  @{init1}@
+  @{init2}@
+  if (rank == 0) {
+    dest = 1; src = 1;
+    @{operation1}@
+    @{start1}@
+    @{write1}@ /* MBIERROR1 */
+    @{fini1}@
+    @{free1}@
+  }else if (rank == 1){
+    dest = 0; src = 0;
+    @{operation2}@
+    @{start2}@
+    @{write2}@ /* MBIERROR2 */
+    @{fini2}@
+    @{free2}@
+  }
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for s in gen.send + gen.isend + gen.psend:
+    for r in gen.irecv + gen.precv + gen.recv:
+        patterns = {}
+        patterns = {'s': s, 'r': r}
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['p2pfeature'] = 'Yes' if s in gen.send else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if r in gen.irecv else 'Lacking'
+        patterns['persfeature'] = 'Yes' if r in gen.precv else 'Lacking'
+        patterns['s'] = s
+        patterns['r'] = r
+        patterns['init1'] = gen.init[s]("1")
+        patterns['init2'] = gen.init[r]("2")
+        patterns['fini1'] = gen.fini[s]("1")
+        patterns['fini2'] = gen.fini[r]("2")
+        patterns['start1'] = gen.start[s]("1")
+        patterns['start2'] = gen.start[r]("2")
+        patterns['operation1'] = gen.operation[s]("1")
+        patterns['operation2'] = gen.operation[r]("2")
+        patterns['write1'] = gen.write[s]("1")
+        patterns['write2'] = gen.write[r]("2")
+        patterns['free1'] = gen.free[s]("1")
+        patterns['free2'] = gen.free[r]("2")
+        shortdesc = ' Local Concurrency with a P2P'
+
+        # Generate a message race
+        if s in gen.send and r in gen.irecv + gen.precv:
+            replace = patterns.copy()
+            replace['shortdesc'] = shortdesc
+            replace['longdesc'] = f'The message buffer in {r} is modified before the call has been completed.'
+            replace['outcome'] = 'ERROR: LocalConcurrency'
+            replace['errormsg'] = 'Local Concurrency with a P2P. The receive buffer in @{r}@ is modified at @{filename}@:@{line:MBIERROR2}@ whereas there is no guarantee the message has been received.'
+            gen.make_file(template, f'LocalConcurrency_{r}_{s}_nok.c', replace)
+        if s in gen.isend + gen.psend and r in gen.recv:
+            replace = patterns.copy()
+            replace['shortdesc'] = shortdesc
+            replace['longdesc'] = f'The message buffer in {s} is modified before the call has been completed.'
+            replace['outcome'] = 'ERROR: LocalConcurrency'
+            replace['errormsg'] = 'Local Concurrency with a P2P. The send buffer in @{s}@ is modified at @{filename}@:@{line:MBIERROR1}@ whereas there is no guarantee the message has been sent.'
+            gen.make_file(template, f'LocalConcurrency_{r}_{s}_nok.c', replace)
+        if s in gen.isend + gen.psend and r in gen.irecv + gen.precv:
+            replace = patterns.copy()
+            replace['shortdesc'] = shortdesc
+            replace['longdesc'] = f'The message buffer in {s} and {r} are modified before the calls have completed.'
+            replace['outcome'] = 'ERROR: LocalConcurrency'
+            replace['errormsg'] = 'Local Concurrency with a P2P. The message buffers in @{s}@ and @{r}@ are modified at @{filename}@:@{line:MBIERROR1}@ and @{filename}@:@{line:MBIERROR2}@ whereas there is no guarantee the calls have been completed.'
+            gen.make_file(template, f'LocalConcurrency_{r}_{s}_nok.c', replace)
diff --git a/scripts/generators/P2PMatchingANYSRCGenerator.py b/scripts/generators/P2PMatchingANYSRCGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..ce489af9cbd77a7106e948b76e0c9a3bf1367c0f
--- /dev/null
+++ b/scripts/generators/P2PMatchingANYSRCGenerator.py
@@ -0,0 +1,108 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 4 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int src=MPI_ANY_SOURCE, dest=0;
+  int stag = 42, rtag = MPI_ANY_TAG;
+  int buff_size = 1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  int send_buffer=rank;
+
+  MPI_Datatype type = MPI_INT;
+  MPI_Comm newcom = MPI_COMM_WORLD;
+
+  @{init1}@
+  @{init2}@
+
+  if (rank == 0) {
+    for (int i = 0; i < nprocs - 1; i++) {
+      @{operation1}@ /* MBIERROR */
+      @{fini1}@
+    }
+  if (@{cond}@ != 3) {
+      printf("MBI_MSG_RACE: The last received message is not 3 but %d!\\n", buf1);
+      fflush(stdout);
+      abort();
+    }
+  }else{
+    @{operation2}@
+    @{fini2}@
+  }
+
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for s in gen.send + gen.isend:
+    for r in gen.recv + gen.irecv:
+        patterns = {}
+        patterns = {'s': s, 'r': r}
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['p2pfeature'] = 'Yes' if s in gen.send or r in gen.recv else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if s in gen.isend or r in gen.irecv else 'Lacking'
+        patterns['s'] = s
+        patterns['r'] = r
+        patterns['cond'] = 'buf1'
+        patterns['init2'] = gen.init[s]("2")
+        patterns['init1'] = gen.init[r]("1")
+        patterns['fini2'] = gen.fini[s]("2")
+        patterns['fini1'] = gen.fini[r]("1")
+        patterns['operation2'] = gen.operation[s]("2")
+        patterns['operation1'] = gen.operation[r]("1")
+
+        # Generate the incorrect matching
+        replace = patterns.copy()
+        replace['shortdesc'] = 'The message ordering is non-deterministic.'
+        replace['longdesc'] = 'The code assumes a fixed order in the reception of messages while the message ordering is non-deterministic.'
+        replace['outcome'] = 'ERROR: MessageRace'
+        replace['errormsg'] = 'P2P message race which can cause a deadlock. @{r}@ at @{filename}@:@{line:MBIERROR}@ is called with ANY_SRC.'
+        gen.make_file(template, f'MessageRace_{r}_{s}_nok.c', replace)
diff --git a/scripts/generators/P2PMatchingGenerator.py b/scripts/generators/P2PMatchingGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..a675d335cbada4d765090fff717efea34a6acf16
--- /dev/null
+++ b/scripts/generators/P2PMatchingGenerator.py
@@ -0,0 +1,145 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: @{persfeature}@
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define buff_size 1
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int its_raining = 0;
+  int src=0, dest=1;
+  int stag=0, rtag=0;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+
+  @{init1}@
+  @{init2}@
+
+  if (rank == 0) {
+    @{operation1}@ /* MBIERROR1 */
+    @{fini1}@
+  }else if (@{change_cond}@){
+    @{operation2}@ /* MBIERROR2 */
+    @{fini2}@
+  }
+
+  @{free1}@
+  @{free2}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for p in gen.send + gen.ssend + gen.bsend + gen.recv + gen.irecv + gen.isend:
+    patterns = {}
+    patterns = {'p': p}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+    patterns['p2pfeature'] = 'Yes' if p in gen.send + gen.bsend + gen.ssend + gen.recv else 'Lacking'
+    patterns['ip2pfeature'] = 'Yes' if p in gen.isend + gen.irecv else 'Lacking'
+    patterns['persfeature'] = 'Lacking'
+    # patterns['persfeature'] = 'Yes' if p in gen.psend + gen.precv else 'Lacking'
+    patterns['p'] = p
+    patterns['init1'] = gen.init[p]("1")
+    patterns['init2'] = '' #gen.init[p2]("2")
+    patterns['fini1'] = gen.fini[p]("1")
+    patterns['fini2'] = '' #gen.fini[p2]("2")
+    patterns['free1'] = gen.free[p]("1")
+    patterns['free2'] = '' #gen.free[p]("2")
+    patterns['operation1'] = gen.operation[p]("1")
+    patterns['operation2'] = '' #gen.operation[p2]("2")
+    patterns['change_cond'] = 'rank == 1'
+
+    # Generate the incorrect matching with one call
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Point to point @{p}@ is not matched'
+    replace['longdesc'] = 'Process 0 calls @{p}@ and is not matched'
+    replace['outcome'] = 'ERROR: CallMatching'
+    replace['errormsg'] = 'P2P mistmatch. @{p}@ at @{filename}@:@{line:MBIERROR1}@ is not matched.'
+    gen.make_file(template, f'CallOrdering_{p}_nok.c', replace)
+
+    # Generate the incorrect matching with two calls
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Both point to point @{p}@ are not matched'
+    replace['longdesc'] = 'Processes 0 and 1 both call @{p}@ which are not matched'
+    replace['outcome'] = 'ERROR: CallMatching'
+    replace['errormsg'] = 'P2P mismatch. @{p}@ at @{filename}@:@{line:MBIERROR1}@ and @{p}@ at @{filename}@:@{line:MBIERROR2}@ are not matched.'
+    replace['operation2'] = gen.operation[p]("1")
+    replace['fini2'] = gen.fini[p]("1")
+    #replace['free2'] = gen.free[p]("2")
+    gen.make_file(template, f'CallOrdering_{p}_{p}_nok.c', replace)
+
+for s in gen.send + gen.isend + gen.ssend + gen.bsend:
+    for r in gen.recv + gen.irecv:
+        patterns = {}
+        patterns = {'s': s, 'r': r}
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['p2pfeature'] = 'Yes' if s in gen.send or r in gen.recv else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if s in gen.isend or r in gen.irecv else 'Lacking'
+        patterns['persfeature'] = 'Lacking'
+        patterns['s'] = s
+        patterns['r'] = r
+        patterns['init1'] = gen.init[s]("1")
+        patterns['init2'] = gen.init[r]("2")
+        patterns['fini1'] = gen.fini[s]("1")
+        patterns['fini2'] = gen.fini[r]("2")
+        patterns['free1'] = gen.free[s]("1")
+        patterns['free2'] = gen.free[r]("2")
+        patterns['operation1'] = gen.operation[s]("1")
+        patterns['operation2'] = gen.operation[r]("2")
+        patterns['change_cond'] = '(rank == 1) && (its_raining)'
+
+        # Generate the incorrect matching because of the conditional
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Point to point @{r}@ is never called.'
+        replace['longdesc'] = 'Point to point @{r}@ is never executed. Process 1 calls MPI_Finalize and causes a deadlock.'
+        replace['outcome'] = 'ERROR: CallMatching'
+        replace['errormsg'] = 'P2P mistmatch. @{r}@ at @{filename}@:@{line:MBIERROR2}@ is never called because of the conditional (@{change_cond}@).'
+        replace['operation1'] = gen.operation[s]("1")
+        replace['operation2'] = gen.operation[r]("2")
+        gen.make_file(template, f'CallOrdering_{r}_{s}_nok.c', replace)
diff --git a/scripts/generators/P2PMessageRaceGenerator.py b/scripts/generators/P2PMessageRaceGenerator.py
new file mode 100644
index 0000000000000000000000000000000000000000..8003d587824d2ff46a1befb82492cc862ee43c40
--- /dev/null
+++ b/scripts/generators/P2PMessageRaceGenerator.py
@@ -0,0 +1,189 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 4 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 2
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int dest, src;
+  int i=0;
+  int root = 0;
+  int stag = 0, rtag = 0;
+  int buff_size = 1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs != 4)
+    printf("MBI ERROR: This test needs 4 processes to produce a bug!\\n");
+
+  int dbs = sizeof(int)*nprocs; /* Size of the dynamic buffers for alltoall and friends */
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+  MPI_Op op = MPI_SUM;
+
+  @{init0a}@
+  @{init0b}@
+  @{init0c}@
+  @{init1a}@
+  @{init1b}@
+  @{init3a}@
+  @{init3b}@
+  @{init3c}@
+  @{init3d}@
+
+  if (rank == 0) {
+    src = MPI_ANY_SOURCE; rtag = @{tag}@;
+    for (int i = 0; i < 2 * N; i++) {
+      @{operation0a}@ /* MBIERROR1 */
+      @{fini0a}@
+    }
+    src = 3; rtag = 0;
+    @{operation0b}@ /* MBIERROR2 */
+    @{fini0b}@
+    @{operation0c}@
+    @{fini0c}@
+  } else if (rank == 1 || rank == 2) {
+    dest = 0; stag = @{tag}@;
+    for (int i = 0; i < N; i++) {
+      @{operation1a}@
+      @{fini1a}@
+    }
+    dest = 3; stag = 0;
+    @{operation1b}@
+    @{fini1b}@
+  } else if (rank == 3) {
+    dest = 0; src = 1; rtag= 0; stag = 0;
+    @{operation3a}@
+    @{fini3a}@
+    @{operation3b}@ /* MBIERROR3 */
+    @{fini3b}@
+    src = 2;
+    @{operation3c}@
+    @{fini3c}@
+    @{operation3d}@
+    @{fini3d}@
+  }
+
+  @{free0a}@
+  @{free0b}@
+  @{free0c}@
+  @{free1a}@
+  @{free1b}@
+  @{free3a}@
+  @{free3b}@
+  @{free3c}@
+  @{free3d}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+basedesc = 'We have 4 processes (p0, p1, p2 and p3). p1 and p2 send N messages to p0 and send a last message to p3. Process p0 recv 2*N messages from p1 and p2 using MPI_ANY_SOURCE and wait messages from p3. p3 wait a message from p1 and send message to p0, before doing the same for p2.'
+
+for s in gen.send + gen.isend:
+    for r in gen.recv + gen.irecv:
+        patterns = {}
+        patterns = {'s': s, 'r': r}
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['p2pfeature'] = 'Yes' if s in gen.send or r in gen.recv else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if s in gen.isend or r in gen.irecv else 'Lacking'
+        patterns['s'] = s
+        patterns['r'] = r
+
+        patterns['init0a'] = gen.init[r]("0a")
+        patterns['init0b'] = gen.init[r]("0b")
+        patterns['init0c'] = gen.init[r]("0c")
+        patterns['operation0a'] = gen.operation[r]("0a")
+        patterns['operation0b'] = gen.operation[r]("0b")
+        patterns['operation0c'] = gen.operation[r]("0c")
+        patterns['fini0a'] = gen.fini[r]("0a")
+        patterns['fini0b'] = gen.fini[r]("0b")
+        patterns['fini0c'] = gen.fini[r]("0c")
+        patterns['free0a'] = gen.free[r]("0a")
+        patterns['free0b'] = gen.free[r]("0b")
+        patterns['free0c'] = gen.free[r]("0c")
+
+        patterns['init1a'] = gen.init[s]("1a")
+        patterns['init1b'] = gen.init[s]("1b")
+        patterns['operation1a'] = gen.operation[s]("1a")
+        patterns['operation1b'] = gen.operation[s]("1b")
+        patterns['fini1a'] = gen.fini[s]("1a")
+        patterns['fini1b'] = gen.fini[s]("1b")
+        patterns['free1a'] = gen.free[s]("1a")
+        patterns['free1b'] = gen.free[s]("1b")
+
+        patterns['init3a'] = gen.init[r]("3a")
+        patterns['init3b'] = gen.init[s]("3b")
+        patterns['init3c'] = gen.init[r]("3c")
+        patterns['init3d'] = gen.init[s]("3d")
+        patterns['operation3a'] = gen.operation[r]("3a")
+        patterns['operation3b'] = gen.operation[s]("3b")
+        patterns['operation3c'] = gen.operation[r]("3c")
+        patterns['operation3d'] = gen.operation[s]("3d")
+        patterns['fini3a'] = gen.fini[r]("3a")
+        patterns['fini3b'] = gen.fini[s]("3b")
+        patterns['fini3c'] = gen.fini[r]("3c")
+        patterns['fini3d'] = gen.fini[s]("3d")
+        patterns['free3a'] = gen.free[r]("3a")
+        patterns['free3b'] = gen.free[s]("3b")
+        patterns['free3c'] = gen.free[r]("3c")
+        patterns['free3d'] = gen.free[s]("3d")
+
+        patterns['tag'] = '1'
+
+        # Generate the correct matching because of the conditional
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Message race'
+        replace['longdesc'] = basedesc + ' In this file, different message tag are used to avoid involuntary message race.'
+        replace['outcome'] = 'OK'
+        replace['errormsg'] = 'OK'
+        gen.make_file(template, f'MessageRace_Loop_{s}_{r}_ok.c', replace)
+
+        # Generate the incorrect matching because of the conditional
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Message race'
+        replace['longdesc'] = basedesc + ' If the loop of p1 ending before the loop of p2, p0 could recv a message from process p3 into recv loop destined to message from p1 and p2. In this case, the program deadlock cause by message race at recv at line @{line:MBIERROR2}@.'
+        replace['outcome'] = 'ERROR: MessageRace'
+        replace['errormsg'] = 'Message race. The use of wildcard receive calls @{r}@ at @{filename}@:@{line:MBIERROR1}@ from @{r}@ at @{filename}@:@{line:MBIERROR3}@ and @{r}@ without wildcard at @{filename}@:@{line:MBIERROR2}@) leads to nondeterministic matching.'
+        replace['tag'] = '0'
+        gen.make_file(template, f'MessageRace_Loop_{s}_{r}_nok.c', replace)
diff --git a/scripts/generators/P2PMessageRaceTagsGenerator.py b/scripts/generators/P2PMessageRaceTagsGenerator.py
new file mode 100644
index 0000000000000000000000000000000000000000..78e3e24e9697b3ff028ff3d08e36b902afe10b95
--- /dev/null
+++ b/scripts/generators/P2PMessageRaceTagsGenerator.py
@@ -0,0 +1,161 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 3 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 10
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int dest, src;
+  int i=0;
+  int root = 0;
+  int stag = 0, rtag = 0;
+  int buff_size = 1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 3)
+    printf("MBI ERROR: This test needs at least 3 processes to produce a bug!\\n");
+
+  int dbs = sizeof(int)*nprocs; /* Size of the dynamic buffers for alltoall and friends */
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+  MPI_Op op = MPI_SUM;
+
+  @{init0}@
+  @{init1a}@
+  @{init1b}@
+  @{init2}@
+
+  if (rank == 0) {
+    dest = 1; stag = 1;
+    @{operation0}@
+    @{fini0}@
+  } else if (rank == 1) {
+    src = MPI_ANY_SOURCE;
+    rtag = @{tag1}@;
+    @{operation1a}@
+    @{fini1a}@
+    rtag = @{tag2}@;
+    @{operation1b}@ @{tagerror}@
+    @{fini1b}@
+  } else if (rank == 2) {
+    dest = 1; stag = 2;
+    @{operation2}@
+    @{fini2}@
+  }
+
+  @{free0}@
+  @{free1a}@
+  @{free1b}@
+  @{free2}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+# To be correct, this benchmark must be use wildcard on second recv
+# tag, or no wildcard on first recv tag and second recv tag must be
+# different.
+#
+# |-----+-----+----+----|
+# | x\y | ANY | 1  | 2  |
+# |-----+-----+----+----|
+# | ANY | OK  |  - |  - |
+# |   1 | OK  |  - | OK |
+# |   2 | OK  | OK |  - |
+# |-----+-----+----+----|
+
+for s in gen.send:
+    for r in gen.recv:
+        for x, y in [('MPI_ANY_TAG', 'MPI_ANY_TAG'), # OK
+                     ('MPI_ANY_TAG', '1'),           # NOK
+                     ('1', 'MPI_ANY_TAG'),           # OK
+                     ('1', '2'),                     # OK
+                     ('2', '2')]:                    # NOK
+            patterns = {}
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+            patterns['p2pfeature'] = 'Yes' if s in gen.send or r in gen.recv else 'Lacking'
+            patterns['ip2pfeature'] = 'Yes' if s in gen.isend or r in gen.irecv else 'Lacking'
+            patterns['s'] = s
+            patterns['r'] = r
+
+            patterns['tag1'] = x
+            patterns['tag2'] = y
+
+            patterns['init0'] = gen.init[s]("0")
+            patterns['operation0'] = gen.operation[s]("0")
+            patterns['fini0'] = gen.fini[s]("0")
+            patterns['free0'] = gen.free[s]("0")
+
+            patterns['init1a'] = gen.init[r]("1a")
+            patterns['init1b'] = gen.init[r]("1b")
+            patterns['operation1a'] = gen.operation[r]("1a")
+            patterns['operation1b'] = gen.operation[r]("1b")
+            patterns['fini1a'] = gen.fini[r]("1a")
+            patterns['fini1b'] = gen.fini[r]("1b")
+            patterns['free1a'] = gen.free[r]("1a")
+            patterns['free1b'] = gen.free[r]("1b")
+
+            patterns['init2'] = gen.init[s]("2")
+            patterns['operation2'] = gen.operation[s]("2")
+            patterns['fini2'] = gen.fini[s]("2")
+            patterns['free2'] = gen.free[s]("2")
+            patterns['tagerror'] = '/* MBIERROR */'
+
+            if y == 'MPI_ANY_TAG' or (x != 'MPI_ANY_TAG' and x != y):
+                # Generate the correct matching because of the conditional
+                replace = patterns.copy()
+                replace['shortdesc'] = 'Message race'
+                replace['longdesc'] = 'Correct code without message race.'
+                replace['outcome'] = 'OK'
+                replace['errormsg'] = 'OK'
+                replace['tagerror'] = ''
+                gen.make_file(template, f'MessageRace_tag_{x}_{y}_{s}_{r}_ok.c', replace)
+            else:
+                # Generate the incorrect matching because of the conditional
+                replace = patterns.copy()
+                replace['shortdesc'] = 'Message race'
+                replace['longdesc'] = 'Message race in @{r}@ with @{s}@.'
+                replace['outcome'] = 'ERROR: MessageRace'
+                replace['errormsg'] = 'Message race. The use of wildcard receive calls @{r}@ at @{filename}@:@{line:MBIERROR}@ and incorrect tag matching.'
+                gen.make_file(template, f'MessageRace_tag_{x}_{y}_{s}_{r}_nok.c', replace)
diff --git a/scripts/generators/P2PProbeGenerator.py b/scripts/generators/P2PProbeGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..917859c1b1154360904ff3f0992a7dc4a270f482
--- /dev/null
+++ b/scripts/generators/P2PProbeGenerator.py
@@ -0,0 +1,148 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  MPI_Status sta;
+  int src,dest;
+  int stag=0, rtag=0;
+  int buff_size = 1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+
+  @{init1a}@
+  @{init1b}@
+  @{init1c}@
+  @{init2a}@
+  @{init2b}@
+  @{init2c}@
+
+  if (rank == 0) {
+    dest=1, src=1;
+    @{operation1a}@ /* MBIERROR1 */
+    @{operation1b}@
+    @{operation1c}@
+    @{fini1a}@
+    @{fini1b}@
+    @{fini1c}@
+  }else if (rank == 1){
+    dest=0, src=0;
+    @{operation2a}@ /* MBIERROR2 */
+    @{operation2b}@
+    @{operation2c}@
+    @{fini2a}@
+    @{fini2b}@
+    @{fini2c}@
+  }
+  @{free1a}@
+  @{free1b}@
+  @{free1c}@
+  @{free2a}@
+  @{free2b}@
+  @{free2c}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for p in gen.probe:
+    for s in gen.send + gen.isend:
+        for r in gen.recv + gen.irecv:
+            patterns = {}
+            patterns = {'p':p, 's': s, 'r': r}
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+            patterns['p2pfeature'] = 'Yes' if s in gen.send or r in gen.recv else 'Lacking'
+            patterns['ip2pfeature'] = 'Yes' if s in gen.isend or r in gen.irecv else 'Lacking'
+            patterns['s'] = s
+            patterns['r'] = r
+            patterns['p'] = p
+            patterns['init1a'] = gen.init[p]("1")
+            patterns['init1b'] = gen.init[s]("1")
+            patterns['init1c'] = gen.init[r]("2")
+            patterns['init2a'] = gen.init[p]("1")
+            patterns['init2b'] = gen.init[r]("3")
+            patterns['init2c'] = gen.init[s]("4")
+            patterns['fini1a'] = gen.fini[p]("1")
+            patterns['fini1b'] = gen.fini[s]("1")
+            patterns['fini1c'] = gen.fini[r]("2")
+            patterns['fini2a'] = gen.fini[p]("1")
+            patterns['fini2b'] = gen.fini[r]("3")
+            patterns['fini2c'] = gen.fini[s]("4")
+            patterns['free1a'] = gen.free[p]("1")
+            patterns['free1b'] = gen.free[s]("1")
+            patterns['free1c'] = gen.free[r]("2")
+            patterns['free2a'] = gen.free[p]("1")
+            patterns['free2b'] = gen.free[r]("3")
+            patterns['free2c'] = gen.free[s]("4")
+            patterns['operation1a'] = gen.operation[p]("1")
+            patterns['operation1b'] = gen.operation[s]("1")
+            patterns['operation1c'] = gen.operation[r]("2")
+            patterns['operation2a'] = gen.operation[p]("1")
+            patterns['operation2b'] = gen.operation[r]("3")
+            patterns['operation2c'] = gen.operation[s]("4")
+
+            # Generate the incorrect matching
+            replace = patterns.copy()
+            replace['shortdesc'] = 'MPI_Probe is called before MPI_Recv.'
+            replace['longdesc'] = 'MPI_Probe is a blocking call that returns only after a matching message has been found. By calling MPI_Probe before MPI_Recv, a deadlock is created.'
+            replace['outcome'] = 'ERROR: CallMatching'
+            replace['errormsg'] = 'P2P mistmatch. @{p}@ at @{filename}@:@{line:MBIERROR1}@ and @{filename}@:@{line:MBIERROR2}@ are called before @{r}@.'
+            gen.make_file(template, f'CallOrdering_{p}_{r}_{s}_nok.c', replace)
+
+            # Generate a correct matching
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Correct use of MPI_Probe.'
+            replace['longdesc'] = 'Correct use of MPI_Probe.'
+            replace['outcome'] = 'OK'
+            replace['errormsg'] = 'OK'
+            replace['operation1a'] = gen.operation[s]("1")
+            replace['operation1b'] = gen.operation[p]("1")
+            gen.make_file(template, f'CallOrdering_{p}_{r}_{s}_ok.c', replace)
diff --git a/scripts/generators/P2PSendrecvArgGenerator.py b/scripts/generators/P2PSendrecvArgGenerator.py
new file mode 100644
index 0000000000000000000000000000000000000000..8ed991fed7371ab311c601a74230c6e7424d604e
--- /dev/null
+++ b/scripts/generators/P2PSendrecvArgGenerator.py
@@ -0,0 +1,134 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: @{origin}@
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+    Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+    P2P!basic: Yes
+    P2P!nonblocking: Lacking
+    P2P!persistent: Lacking
+    COLL!basic: Lacking
+    COLL!nonblocking: Lacking
+    COLL!persistent: Lacking
+    COLL!tools: Lacking
+    RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 3 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 10
+
+int main(int argc, char **argv) {
+  int nprocs = -1 , rank = -1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+
+  if (nprocs < 3)
+    printf("MBI ERROR: This test needs at least 3 processes to produce a bug!\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+
+  int src = 0; int dest = 0;
+  int stag = 1; int rtag = 1;
+  int buff_size = N;
+
+  @{init1}@
+  @{init2}@
+  @{init3}@
+
+  if (rank == 0) {
+    src = 1;
+    @{start1}@
+    @{operation1}@
+    @{fini1}@
+  } else if (rank == 1) {
+    src = 2; dest = 0;
+    @{start2}@
+    @{change_arg}@
+    @{operation2}@
+    @{fini2}@
+  } else if (rank == 2) {
+    dest = 1;
+    @{start3}@
+    @{operation3}@
+    @{fini3}@
+  }
+
+  @{free1}@
+  @{free2}@
+  @{free3}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+for s in gen.send:
+    for r in gen.recv:
+        for sr in gen.sendrecv:
+            patterns = {}
+            patterns = {'s': s, 'r': r, 'sr': sr}
+            patterns['origin'] = "RTED"
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+
+            patterns['init1'] = gen.init[r]("1").replace("buf1=-1", "buf1[N]={-1}")
+            patterns['start1'] = gen.start[r]("1")
+            patterns['operation1'] = gen.operation[r]("1")
+            patterns['fini1'] = gen.fini[r]("1")
+            patterns['free1'] = gen.free[r]("1")
+
+            patterns['init2'] = gen.init[sr]("2")
+            patterns['start2'] = gen.start[sr]("2")
+            patterns['operation2'] = gen.operation[sr]("2")
+            patterns['fini2'] = gen.fini[sr]("2")
+            patterns['free2'] = gen.free[sr]("2")
+
+            patterns['init3'] = gen.init[s]("3").replace("buf3=rank", "buf3[N]={rank}")
+            patterns['start3'] = gen.start[s]("3")
+            patterns['operation3'] = gen.operation[s]("3")
+            patterns['fini3'] = gen.fini[s]("3")
+            patterns['free3'] = gen.free[s]("3")
+
+            patterns['change_arg'] = ''
+
+            # Generate a code with distinct buffer
+            replace = patterns.copy()
+            replace['origin'] = 'MBI'
+            replace['shortdesc'] = 'Correct usage of Sendrecv function.'
+            replace['longdesc'] = 'Correct usage of Sendrecv function.'
+            replace['outcome'] = 'OK'
+            replace['errormsg'] = 'OK'
+            gen.make_file(template, f'InvalidParam_Buffer_{s}_{sr}_{r}_ok.c', replace)
+
+            # Generate a code with non distinct buffer
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Invalid buffer on Sendrecv function.'
+            replace['longdesc'] = 'Invalid buffer on Sendrecv, the tow buffers must be distinct.'
+            replace['outcome'] = 'ERROR: InvalidBuffer'
+            replace['errormsg'] = '@{sr}@ at @{filename}@:@{line:MBIERROR}@ send buffer and recv buffer must be distinct.'
+            replace['change_arg'] = gen.write[sr]("2")
+            replace['operation2'] = gen.operation[sr]("2")  + " /* MBIERROR */"
+            gen.make_file(template, f'InvalidParam_Buffer_{s}_{sr}_{r}_nok.c', replace)
diff --git a/scripts/generators/RMAArgGenerator.py b/scripts/generators/RMAArgGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..04cef56feb5208ccd030d57b4b76d2eab8325f71
--- /dev/null
+++ b/scripts/generators/RMAArgGenerator.py
@@ -0,0 +1,109 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: @{origin}@
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: @{rmafeature}@
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 10
+
+int main(int argc, char **argv) {
+  int rank, numProcs;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &numProcs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+
+  int *winbuf = (int *)malloc(N * sizeof(int));
+
+  MPI_Win win;
+  MPI_Win_create(&winbuf, N * sizeof(int), 1, MPI_INFO_NULL, MPI_COMM_WORLD, &win);
+
+  MPI_Datatype type = MPI_INT;
+  int target = (rank + 1) % numProcs;
+
+  if(rank == 0){
+    @{epoch}@
+    @{change_arg}@
+    @{init}@
+     @{operation}@ /* MBIERROR2 */
+
+    @{finEpoch}@
+  } else {
+    @{epoch}@
+
+    @{finEpoch}@
+  }
+
+  MPI_Win_free(&win);
+
+  free(winbuf);
+
+  MPI_Finalize();
+  return 0;
+}
+"""
+
+
+for e in gen.epoch:
+    for p in gen.rma:
+        patterns = {}
+        patterns = {'e': e, 'p': p}
+        patterns['origin'] = "MBI"
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['rmafeature'] = 'Yes'
+        patterns['p'] = p
+        patterns['e'] = e
+        patterns['epoch'] = gen.epoch[e]("1")
+        patterns['finEpoch'] = gen.finEpoch[e]("1")
+        patterns['init'] = gen.init[p]("1")
+        patterns['operation'] = gen.operation[p]("1")
+        patterns['change_arg'] = ""
+
+        # Generate a code with a null type
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Invalid argument in one-sided operation.'
+        replace['longdesc'] = 'A one-sided operation has MPI_DATATYPE_NULL as a type.'
+        replace['outcome'] = 'ERROR: InvalidDatatype'
+        replace['change_arg'] = 'type = MPI_DATATYPE_NULL;'
+        replace['errormsg'] = '@{p}@ at @{filename}@:@{line:MBIERROR}@ has MPI_DATATYPE_NULL as a type'
+        gen.make_file(template, f'InvalidParam_BufferNullCond_{e}_{p}_nok.c', replace)
+
+        # Generate a code with an invalid type
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Invalid argument in one-sided operation.'
+        replace['longdesc'] = 'Use of an invalid datatype in one-sided operation.'
+        replace['outcome'] = 'ERROR: InvalidDatatype'
+        replace['change_arg'] = 'MPI_Type_contiguous (2, MPI_INT, &type); MPI_Type_commit(&type);MPI_Type_free(&type); /* MBIERROR2 */'
+        replace['errormsg'] = 'Invalid Datatype in @{p}@ at @{filename}@:@{line:MBIERROR}@'
+        gen.make_file(template, f'InvalidParam_DatatypeCond_{e}_{p}_nok.c', replace)
diff --git a/scripts/generators/RMAInvalidArgGenerator.py b/scripts/generators/RMAInvalidArgGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..2ba7cf9adde25b691a8a0a49dc5a88d78596eebb
--- /dev/null
+++ b/scripts/generators/RMAInvalidArgGenerator.py
@@ -0,0 +1,134 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: @{origin}@
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+    Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+    P2P!basic: Lacking
+    P2P!nonblocking: Lacking
+    P2P!persistent: Lacking
+    COLL!basic: Lacking
+    COLL!nonblocking: Lacking
+    COLL!persistent: Lacking
+    COLL!tools: Lacking
+    RMA: @{rmafeature}@
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 10
+
+int main(int argc, char **argv) {
+  int nprocs = -1 , rank = -1;
+  MPI_Win win;
+  int *winbuf = (int *)@{malloc}@ // Window buffer
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Datatype type = MPI_INT;
+  int target = (rank + 1) % nprocs;
+
+  MPI_Win_create(winbuf, N * sizeof(int), 1, MPI_INFO_NULL, MPI_COMM_WORLD, &win);
+
+
+  @{epoch}@
+
+  @{init}@
+  @{change_arg}@
+  @{operation}@ /* MBIERROR */
+
+  @{finEpoch}@
+
+  MPI_Win_free(&win);
+  free(winbuf);
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for e in gen.epoch:
+    for p in gen.rma:
+        patterns = {}
+        patterns = {'e': e, 'p': p}
+        patterns['origin'] = "MBI"
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['rmafeature'] = 'Yes'
+        patterns['p'] = p
+        patterns['e'] = e
+        patterns['epoch'] = gen.epoch[e]("1")
+        patterns['finEpoch'] = gen.finEpoch[e]("1")
+        patterns['init'] = gen.init[p]("1")
+        patterns['operation'] = gen.operation[p]("1")
+        patterns['change_arg'] = ""
+        patterns['malloc'] = "malloc(N * sizeof(int));"
+
+        # Generate a code with a null type
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Invalid argument in one-sided operation.'
+        replace['longdesc'] = 'A one-sided operation has MPI_DATATYPE_NULL as a type.'
+        replace['outcome'] = 'ERROR: InvalidDatatype'
+        replace['change_arg'] = 'type = MPI_DATATYPE_NULL;'
+        replace['errormsg'] = '@{p}@ at @{filename}@:@{line:MBIERROR}@ has MPI_DATATYPE_NULL as a type'
+        gen.make_file(template, f'InvalidParam_DatatypeNull_{e}_{p}_nok.c', replace)
+
+        # Generate a code with a null buffer (move to RMAWinBufferGenerator)
+        # replace = patterns.copy()
+        # replace['origin'] = 'MPI-Corrbench'
+        # replace['shortdesc'] = 'nullptr is invalid in one-sided operation.'
+        # replace['longdesc'] = 'A one-sided operation has an invalid buffer.'
+        # replace['outcome'] = 'ERROR: InvalidBuffer'
+        # replace['init'] = 'int * localbuf1 = (int *)malloc(sizeof(int));'
+        # replace['change_arg'] = 'localbuf1 = NULL;'
+        # replace['operation'] = gen.operation[p]("1").replace('&localbuf1', 'localbuf1')
+        # replace['errormsg'] = '@{p}@ at @{filename}@:@{line:MBIERROR}@ has an invalid buffer'
+        # gen.make_file(template, f'InvalidParam_BufferNull_{e}_{p}_nok.c', replace)
+
+        # Generate a code with an invalid type
+        replace = patterns.copy()
+        replace['origin'] = 'MBI'
+        replace['shortdesc'] = 'Invalid argument in one-sided operation.'
+        replace['longdesc'] = 'Use of an invalid datatype in one-sided operation.'
+        replace['outcome'] = 'ERROR: InvalidDatatype'
+        replace['change_arg'] = 'MPI_Type_contiguous (2, MPI_INT, &type); MPI_Type_commit(&type);MPI_Type_free(&type); /* MBIERROR2 */'
+        replace['errormsg'] = 'Invalid Datatype in @{p}@ at @{filename}@:@{line:MBIERROR}@'
+        gen.make_file(template, f'InvalidParam_Datatype_{e}_{p}_nok.c', replace)
+
+        # Generate a code with invalid buffer
+        replace = patterns.copy()
+        patterns['origin'] = "MPI-Corrbench"
+        replace['shortdesc'] = 'Invalid invalid buffer (buffer must be allocated)'
+        replace['longdesc'] = 'Use of an invalid buffer in MPI_Win_create.'
+        replace['outcome'] = 'ERROR: InvalidBuffer'
+        replace['malloc'] = "NULL; /* MBIERROR2 */"
+        replace['init'] = ""
+        replace['operation'] = ""
+        replace['change_arg'] = ""
+        replace['errormsg'] = 'Invalid buffer in Win_create at @{filename}@:@{line:MBIERROR2}@'
+        gen.make_file(template, f'InvalidParam_InvalidBufferWinCreate_{e}_{p}_nok.c', replace)
diff --git a/scripts/generators/RMALocalLocalConcurrencyGenerator.py b/scripts/generators/RMALocalLocalConcurrencyGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..37c8c052e9115e053256553106aa37919e7e1ef6
--- /dev/null
+++ b/scripts/generators/RMALocalLocalConcurrencyGenerator.py
@@ -0,0 +1,184 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 2, requires MPI 3 implementation (for lock_all/unlock_all epochs)
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: @{rmafeature}@
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 1
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  MPI_Win win;
+  int winbuf[100] = {0};
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Datatype type = MPI_INT;
+  int target = 1;
+
+  MPI_Win_create(&winbuf, 100 * sizeof(int), sizeof(int), MPI_INFO_NULL, MPI_COMM_WORLD, &win);
+
+  winbuf[0] = 12345;
+  @{init1}@
+
+  @{epoch}@
+
+  if (rank == 0) {
+    @{operation1}@ /* MBIERROR1 */
+    @{syncEpoch}@
+    @{operation2}@ /* MBIERROR2 */
+  }
+
+  @{finEpoch}@
+
+  MPI_Win_free(&win);
+
+  MPI_Finalize();
+  return 0;
+}
+"""
+
+
+for e in gen.epoch:
+    for p1 in gen.get:
+        for p2 in gen.put + gen.store + gen.load + gen.get + gen.loadstore:
+            patterns = {}
+            patterns = {'e': e, 'p1': p1, 'p2': p2}
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+            patterns['rmafeature'] = 'Yes'
+            patterns['p1'] = p1
+            patterns['p2'] = p2
+            patterns['e'] = e
+            patterns['epoch'] = gen.epoch[e]("1")
+            patterns['finEpoch'] = gen.finEpoch[e]("1")
+            patterns['syncEpoch'] = ''
+            patterns['init1'] = gen.init[p1]("1")
+            patterns['operation1'] = gen.operation[p1]("1")
+            patterns['operation2'] = gen.operation[p2]("1")
+            shortdesc_rma = 'Correct code using RMA operations'
+
+            # Generate a data race (Get + Get/load/store/Put)
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Local Concurrency error.'
+            replace['longdesc'] = 'Local Concurrency error. @{p2}@ conflicts with @{p1}@'
+            replace['outcome'] = 'ERROR: LocalConcurrency'
+            replace['errormsg'] = 'Local Concurrency error. @{p2}@ at @{filename}@:@{line:MBIERROR2}@ conflicts with @{p1}@ line @{line:MBIERROR1}@'
+            gen.make_file(template, f'LocalConcurrency_lloutwindow_{e}_{p1}_{p2}_nok.c', replace)
+            # Generate a correct code by switching operation1 and  operation2
+            if p2 in gen.store + gen.load + gen.loadstore:
+                replace = patterns.copy()
+                replace['shortdesc'] = shortdesc_rma
+                replace['longdesc'] = shortdesc_rma
+                replace['outcome'] = 'OK'
+                replace['errormsg'] = 'OK'
+                replace['operation1'] = gen.operation[p2]("1")
+                replace['operation2'] = gen.operation[p1]("1")
+                gen.make_file(template, f'LocalConcurrency_lloutwindow_{e}_{p2}_{p1}_ok.c', replace)
+            # Generate a correct code by synchronizing operation1 and operation2
+            replace = patterns.copy()
+            replace['shortdesc'] = shortdesc_rma
+            replace['longdesc'] = shortdesc_rma
+            replace['outcome'] = 'OK'
+            replace['errormsg'] = 'OK'
+            replace['syncEpoch'] = gen.syncEpoch[e]("1")
+            gen.make_file(template, f'LocalConcurrency_lloutwindow_{e}_{p1}_{p2}_ok.c', replace)
+        # Generate a correct code by removing operation2
+        replace = patterns.copy()
+        replace['shortdesc'] = shortdesc_rma
+        replace['longdesc'] = shortdesc_rma
+        replace['outcome'] = 'OK'
+        replace['errormsg'] = 'OK'
+        replace['operation1'] = gen.operation[p1]("1")
+        replace['operation2'] = ''
+        gen.make_file(template, f'LocalConcurrency_{e}_{p1}_ok.c', replace)
+
+
+for e in gen.epoch:
+    for p1 in gen.put:
+        for p2 in gen.store:
+            patterns = {}
+            patterns = {'e': e, 'p1': p1, 'p2': p2}
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+            patterns['rmafeature'] = 'Yes'
+            patterns['p1'] = p1
+            patterns['p2'] = p2
+            patterns['e'] = e
+            patterns['epoch'] = gen.epoch[e]("1")
+            patterns['finEpoch'] = gen.finEpoch[e]("1")
+            patterns['syncEpoch'] = ''
+            patterns['init1'] = gen.init[p1]("1")
+            patterns['operation1'] = gen.operation[p1]("1")
+            patterns['operation2'] = gen.operation[p2]("1")
+
+            # Generate a data race (Put + store)
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Local Concurrency error.'
+            replace['longdesc'] = 'Local Concurrency error. @{p2}@ conflicts with @{p1}@'
+            replace['outcome'] = 'ERROR: LocalConcurrency'
+            replace['errormsg'] = 'Local Concurrency error. @{p2}@ at @{filename}@:@{line:MBIERROR2}@ conflicts with @{p1}@ line @{line:MBIERROR1}@'
+            gen.make_file(template, f'LocalConcurrency_lloutwindow_{e}_{p1}_{p2}_nok.c', replace)
+            # Generate a correct code by adding a synchronization between operation1 and operation2
+            replace = patterns.copy()
+            replace['shortdesc'] = shortdesc_rma
+            replace['longdesc'] = shortdesc_rma
+            replace['outcome'] = 'OK'
+            replace['errormsg'] = 'OK'
+            replace['syncEpoch'] = gen.syncEpoch[e]("1")
+            gen.make_file(template, f'LocalConcurrency_lloutwindow_{e}_{p1}_{p2}_ok.c', replace)
+            # Generate a correct code by switching operation1 and operation2
+            replace = patterns.copy()
+            replace['shortdesc'] = shortdesc_rma
+            replace['longdesc'] = shortdesc_rma
+            replace['outcome'] = 'OK'
+            replace['errormsg'] = 'OK'
+            replace['operation1'] = gen.operation[p2]("1")
+            replace['operation2'] = gen.operation[p1]("1")
+            gen.make_file(template, f'LocalConcurrency_lloutwindow_{e}_{p2}_{p1}_ok.c', replace)
+
+            # Generate a correct code by removing operation2
+            replace = patterns.copy()
+            replace['shortdesc'] = shortdesc_rma
+            replace['longdesc'] = shortdesc_rma
+            replace['outcome'] = 'OK'
+            replace['errormsg'] = 'OK'
+            replace['operation1'] = gen.operation[p1]("1")
+            replace['operation2'] = ''
+            gen.make_file(template, f'LocalConcurrency_{e}_{p1}_ok.c', replace)
diff --git a/scripts/generators/RMAP2PGlobalConcurrencyGenerator.py b/scripts/generators/RMAP2PGlobalConcurrencyGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..a6723fac19c10c4ac7f7162f3858620bdffb7e57
--- /dev/null
+++ b/scripts/generators/RMAP2PGlobalConcurrencyGenerator.py
@@ -0,0 +1,121 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 2, does not require MPI 3 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: @{rmafeature}@
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 4 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 1
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  MPI_Win win;
+  int * winbuf = (int *)malloc(N * sizeof(int)); // Window buffer
+  int buff_size = 1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 4)
+    printf("MBI ERROR: This test needs at least 4 processes to produce a bug!\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+  int stag=0, rtag=0;
+  winbuf[0] = nprocs;
+
+  MPI_Win_create(winbuf, N*sizeof(int), sizeof(int), MPI_INFO_NULL, MPI_COMM_WORLD, &win);
+
+
+  @{init1}@
+  @{init2}@
+  @{init3}@
+
+  if (rank == 0) {
+    int target=1;
+    MPI_Win_lock(MPI_LOCK_EXCLUSIVE, 1, 0, win);
+    @{operation1}@
+    localbuf1[0] = 12345; /* MBIERROR1 */
+    MPI_Win_unlock(1, win);
+  }else if (rank == 2){
+    int dest=1;
+    @{operation2}@
+    @{fini2}@
+  }else if (rank == 1){
+    int src=2;
+    buf3 = winbuf[0];
+    @{operation3}@
+    winbuf[0] = buf3; /* MBIERROR2 */
+    @{fini3}@
+  }
+
+  MPI_Win_free(&win);
+  free(winbuf);
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for p in gen.put + gen.get:
+    for s in gen.send + gen.isend:
+        for r in gen.recv + gen.irecv:
+            patterns = {}
+            patterns = {'p': p, 's': s, 'r': r}
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+            patterns['rmafeature'] = 'Yes'
+            patterns['p2pfeature'] = 'Yes' if s in gen.send or r in gen.recv  else 'Lacking'
+            patterns['ip2pfeature'] = 'Yes' if s in gen.isend or r in gen.irecv  else 'Lacking'
+            patterns['p'] = p
+            patterns['s'] = s
+            patterns['r'] = r
+            patterns['init1'] = gen.init[p]("1")
+            patterns['init2'] = gen.init[s]("2")
+            patterns['init3'] = gen.init[r]("3")
+            patterns['fini2'] = gen.fini[s]("2")
+            patterns['fini3'] = gen.fini[r]("3")
+            patterns['operation1'] = gen.operation[p]("1") #put or get
+            patterns['operation2'] = gen.operation[s]("2") #send
+            patterns['operation3'] = gen.operation[r]("3") #recv
+
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Global Concurrency error.'
+            replace['longdesc'] = 'Global Concurrency error. Concurrent access of variable winbuf by @{p}@ and @{r}@'
+            replace['outcome'] = 'ERROR: GlobalConcurrency'
+            replace['errormsg'] = 'Global Concurrency error. @{p}@ at @{filename}@:@{line:MBIERROR1}@ accesses the window of process 1. Process 1 receives data from process 2 and uses variable winbuf. winbuf in process 1 is then nondeterministic.'
+            gen.make_file(template, f'GlobalConcurrency_{p}_{s}_{r}_nok.c', replace)
diff --git a/scripts/generators/RMAP2PLocalConcurrencyGenerator.py b/scripts/generators/RMAP2PLocalConcurrencyGenerator.py
new file mode 100644
index 0000000000000000000000000000000000000000..0f9f92e1f4c23fd87a971460e127f0e603df9452
--- /dev/null
+++ b/scripts/generators/RMAP2PLocalConcurrencyGenerator.py
@@ -0,0 +1,134 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: @{origin}@
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 2, does not require MPI 3 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: @{rmafeature}@
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 3 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 1
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  MPI_Win win;
+  int * winbuf = (int *)malloc(N * sizeof(int)); // Window buffer
+  int buff_size = N;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 3)
+    printf("MBI ERROR: This test needs at least 3 processes to produce a bug!\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+  int stag=0, rtag=0;
+  winbuf[0] = nprocs;
+
+  MPI_Win_create(winbuf, N*sizeof(int), sizeof(int), MPI_INFO_NULL, MPI_COMM_WORLD, &win);
+
+
+  @{init1}@
+  @{init2}@
+  @{init3}@
+
+  @{comment_fence}@MPI_Win_fence(0, win);
+
+  if (rank == 0) {
+    int target=1, dest=2;
+
+    @{comment_lock}@MPI_Win_lock(MPI_LOCK_EXCLUSIVE, 1, 0, win);
+    @{operation1}@
+    @{operation2}@ /* MBIERROR */
+    @{comment_lock}@MPI_Win_unlock(1, win);
+
+    @{fini2}@
+  }else if (rank == 2){
+    int src=0;
+    @{operation3}@
+    @{fini3}@
+  }
+
+  @{comment_fence}@MPI_Win_fence(0, win);
+
+  MPI_Win_free(&win);
+  free(winbuf);
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for p in gen.get:
+    for s in gen.send + gen.isend:
+         for r in gen.recv + gen.irecv:
+             patterns = {}
+             patterns = {'p': p, 's': s, 'r': r}
+             patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+             patterns['origin'] = 'RTED'
+             patterns['shortdesc'] = 'Local Concurrency error.'
+             patterns['longdesc'] = 'Local Concurrency error. Concurrent access of variable localbuf1 by @{p}@ (write) and @{s}@ (read)'
+             patterns['rmafeature'] = 'Yes'
+             patterns['p2pfeature'] = 'Yes' if s in gen.send or r in gen.recv  else 'Lacking'
+             patterns['ip2pfeature'] = 'Yes' if s in gen.isend or r in gen.irecv  else 'Lacking'
+             patterns['p'] = p
+             patterns['s'] = s
+             patterns['r'] = r
+             patterns['init1'] = gen.init[p]("1")
+             patterns['init2'] = gen.init[s]("2")
+             patterns['init3'] = gen.init[r]("3")
+             patterns['fini2'] = gen.fini[s]("2")
+             patterns['fini3'] = gen.fini[r]("3")
+             patterns['operation1'] = gen.operation[p]("1")
+             patterns['operation2'] = gen.operation[s]("2").replace("buf2", "localbuf1")
+             patterns['operation3'] = gen.operation[r]("3")
+             patterns['comment_lock'] = ''
+             patterns['comment_fence'] = ''
+
+             # Use fence epoch
+             replace = patterns.copy()
+             replace['outcome'] = 'ERROR: LocalConcurrency'
+             replace['errormsg'] = 'Local Concurrency error. @{p}@ at @{filename}@:@{line:MBIERROR}@ .'
+             replace['comment_lock'] = '// '
+             gen.make_file(template, f'LocalConcurrency_fence_{p}_{s}_{r}_nok.c', replace)
+
+             # Use lock epoch
+             replace = patterns.copy()
+             replace['outcome'] = 'ERROR: LocalConcurrency'
+             replace['errormsg'] = 'Local Concurrency error. @{p}@ at @{filename}@:@{line:MBIERROR}@ .'
+             replace['comment_fence'] = '// '
+             gen.make_file(template, f'LocalConcurrency_lock_{p}_{s}_{r}_nok.c', replace)
diff --git a/scripts/generators/RMARemoteLocalConcurrencyGenerator.py b/scripts/generators/RMARemoteLocalConcurrencyGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..3b1ef07ebe4f64cac2dfd1d399998d9f4e341b45
--- /dev/null
+++ b/scripts/generators/RMARemoteLocalConcurrencyGenerator.py
@@ -0,0 +1,165 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 2, requires MPI 3 implementation (for lock_all/unlock_all epochs)
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: @{rmafeature}@
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 10
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  MPI_Win win;
+  int winbuf[100] = {0};
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Datatype type = MPI_INT;
+  int target = 1 - rank;
+
+  MPI_Win_create(&winbuf, 100 * sizeof(int), sizeof(int), MPI_INFO_NULL, MPI_COMM_WORLD, &win);
+
+  @{init1}@
+  @{init2}@
+
+  @{epoch}@
+
+  if (rank == 0) {
+    @{operation1}@ /* MBIERROR1 */
+  }
+  if(rank == 1){
+    @{operation2}@ /* MBIERROR2 */
+  }
+
+  @{finEpoch}@
+
+  MPI_Win_free(&win);
+
+  MPI_Finalize();
+  return 0;
+}
+"""
+
+
+for e in gen.epoch:
+    for p1 in gen.get:
+        for p2 in gen.put + gen.rstore + gen.rload + gen.get :
+            patterns = {}
+            patterns = {'e': e, 'p1': p1, 'p2': p2}
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+            patterns['rmafeature'] = 'Yes'
+            patterns['p1'] = p1
+            patterns['p2'] = p2
+            patterns['e'] = e
+            patterns['epoch'] = gen.epoch[e]("1")
+            patterns['finEpoch'] = gen.finEpoch[e]("1")
+            patterns['init1'] = gen.init[p1]("1")
+            patterns['operation1'] = 'MPI_Get(&winbuf[5], N, MPI_INT, target, 5, N, type, win);'
+            patterns['init2'] = gen.init[p2]("2")
+            patterns['operation2'] = gen.operation[p2]("2")
+
+            # Generate a data race (Get + Get/load/store/Put)
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Global Concurrency error.'
+            replace['longdesc'] = 'Global Concurrency error. @{p2}@ conflicts with @{p1}@'
+            replace['outcome'] = 'ERROR: GlobalConcurrency'
+            replace['errormsg'] = 'Global Concurrency error. @{p2}@ at @{filename}@:@{line:MBIERROR2}@ conflicts with @{p1}@ line @{line:MBIERROR1}@'
+
+            # Replace Put and Get first argument
+            if p2 in gen.put:
+                replace['operation2'] = 'MPI_Put(&localbuf1, N, MPI_INT, target, 5, N, type, win);'
+                replace['outcome'] = 'ERROR: GlobalConcurrency'
+                replace['errormsg'] = 'Global Concurrency error. @{p2}@ at @{filename}@:@{line:MBIERROR2}@ conflicts with @{p1}@ line @{line:MBIERROR1}@'
+                gen.make_file(template, f'GlobalConcurrency_rl_{e}_{p1}_{p2}_nok.c', replace)
+                replace['operation1'] = gen.operation[p1]("1")
+                replace['operation2'] = 'MPI_Put(&localbuf1, N, MPI_INT, target, 0, N, type, win);'
+                replace['outcome'] = 'OK'
+                replace['errormsg'] = 'OK'
+                gen.make_file(template, f'GlobalConcurrency_rl_{e}_{p1}_{p2}_ok.c', replace)
+            else:
+                if p2 in gen.get:
+                    replace['operation2'] = 'MPI_Get(&winbuf[5], N, MPI_INT, target, 0, N, type, win);'
+                    replace['shortdesc'] = 'No error'
+                    replace['longdesc'] = ''
+                    replace['outcome'] = 'OK'
+                    replace['errormsg'] = 'OK'
+                elif p2 in gen.rload:
+                    replace['shortdesc'] = 'No error'
+                    replace['longdesc'] = ''
+                    replace['outcome'] = 'OK'
+                    replace['errormsg'] = 'OK'
+
+                ok = 'ok' if replace['outcome'] == 'OK' else 'nok'
+                gen.make_file(template, f'GlobalConcurrency_rl_{e}_{p1}_{p2}_{ok}.c', replace)
+
+
+for e in gen.epoch:
+    for p1 in gen.put:
+        for p2 in gen.rstore + gen.rload + gen.put + gen.get:
+            patterns = {}
+            patterns = {'e': e, 'p1': p1, 'p2': p2}
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+            patterns['rmafeature'] = 'Yes'
+            patterns['p1'] = p1
+            patterns['p2'] = p2
+            patterns['e'] = e
+            patterns['epoch'] = gen.epoch[e]("1")
+            patterns['finEpoch'] = gen.finEpoch[e]("1")
+            patterns['init1'] = gen.init[p1]("1")
+            patterns['operation1'] = gen.operation[p1]("1")
+            patterns['init2'] = gen.init[p2]("2")
+            patterns['operation2'] = gen.operation[p2]("2")
+
+            # Generate a data race (Put + store)
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Global Concurrency error.'
+            replace['longdesc'] = 'Global Concurrency error. @{p2}@ conflicts with @{p1}@'
+            replace['outcome'] = 'ERROR: GlobalConcurrency'
+            replace['errormsg'] = 'Global Concurrency error. @{p2}@ at @{filename}@:@{line:MBIERROR2}@ conflicts with @{p1}@ line @{line:MBIERROR1}@'
+
+            # Replace Put/Get first argument
+            if p2 in gen.put:
+              replace['operation1'] = 'MPI_Put(&localbuf1, N, MPI_INT, target, 5, N, type, win);'
+              replace['operation2'] = 'MPI_Put(&winbuf[5], N, MPI_INT, target, 0, N, type, win);'
+            elif p2 in gen.get:
+              replace['operation1'] = 'MPI_Put(&localbuf1, N, MPI_INT, target, 5, N, type, win);'
+              replace['operation2'] = 'MPI_Get(&winbuf[5], N, MPI_INT, target, 0, N, type, win);'
+
+            gen.make_file(template, f'GlobalConcurrency_rl_{e}_{p1}_{p2}_nok.c', replace)
diff --git a/scripts/generators/RMARemoteRemoteConcurrencyGenerator.py b/scripts/generators/RMARemoteRemoteConcurrencyGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..9361187b4154e97a4fa4e740f68719fa1e95e4c6
--- /dev/null
+++ b/scripts/generators/RMARemoteRemoteConcurrencyGenerator.py
@@ -0,0 +1,104 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 2, requires MPI 3 implementation (for lock_all/unlock_all epochs)
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: @{rmafeature}@
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 3 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 1
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  MPI_Win win;
+  int winbuf[100] = {0};
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Datatype type = MPI_INT;
+  int target = 1;
+
+  MPI_Win_create(&winbuf, 100 * sizeof(int), sizeof(int), MPI_INFO_NULL, MPI_COMM_WORLD, &win);
+
+  @{init1}@
+  @{init2}@
+
+  @{epoch}@
+
+  if (rank == 0) {
+    @{operation1}@ /* MBIERROR1 */
+  }
+  else if (rank == 2) {
+    @{operation2}@ /* MBIERROR2 */
+  }
+
+  @{finEpoch}@
+
+  MPI_Win_free(&win);
+
+  MPI_Finalize();
+  return 0;
+}
+"""
+
+
+for e in gen.epoch:
+    for p1 in gen.get + gen.put:
+        for p2 in gen.put:
+            patterns = {}
+            patterns = {'e': e, 'p1': p1, 'p2': p2}
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+            patterns['rmafeature'] = 'Yes'
+            patterns['p1'] = p1
+            patterns['p2'] = p2
+            patterns['e'] = e
+            patterns['epoch'] = gen.epoch[e]("1")
+            patterns['finEpoch'] = gen.finEpoch[e]("1")
+            patterns['init1'] = gen.init[p1]("1")
+            patterns['operation1'] = gen.operation[p1]("1")
+            patterns['init2'] = gen.init[p2]("2")
+            patterns['operation2'] = gen.operation[p2]("2")
+
+            # Generate a data race
+            replace = patterns.copy()
+            replace['shortdesc'] = 'Global Concurrency error.'
+            replace['longdesc'] = 'Global Concurrency error. Both processes 0 and 2 access the window in process 1 with @{p1}@'
+            replace['outcome'] = 'ERROR: GlobalConcurrency'
+            replace['errormsg'] = 'Global Concurrency error. @{p1}@ at @{filename}@:@{line:MBIERROR1}@ and @{p2}@ at @{filename}@:@{line:MBIERROR2}@ conflicts in process 1'
+            gen.make_file(template, f'GlobalConcurrency_rr_{e}_{p1}_{p2}_nok.c', replace)
diff --git a/scripts/generators/RMAReqLifecycleGenerator.py b/scripts/generators/RMAReqLifecycleGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..82ee3fd99d4fcd35363f01c5bb18e6320e3de7f7
--- /dev/null
+++ b/scripts/generators/RMAReqLifecycleGenerator.py
@@ -0,0 +1,150 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: @{origin}@
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+    Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+    P2P!basic: Lacking
+    P2P!nonblocking: Lacking
+    P2P!persistent: Lacking
+    COLL!basic: Lacking
+    COLL!nonblocking: Lacking
+    COLL!persistent: Lacking
+    COLL!tools: Lacking
+    RMA: @{rmafeature}@
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 20
+
+int main(int argc, char **argv) {
+  int rank, numProcs;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &numProcs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+
+  if (numProcs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  int *winbuf = (int *)malloc(N * sizeof(int));
+
+  MPI_Win win;
+  MPI_Win_create(winbuf, N * sizeof(int), 1, MPI_INFO_NULL, MPI_COMM_WORLD, &win);
+
+  MPI_Datatype type = MPI_INT;
+  int target = 1;
+
+  @{epoch}@
+
+  if (rank == 0) {
+    @{epoch2}@
+
+    @{init}@
+    @{operation}@
+
+    @{finEpoch2}@
+  }
+
+  @{finEpoch}@
+
+  MPI_Win_free(&win);
+
+  free(winbuf);
+
+  MPI_Finalize();
+
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+
+for e1 in gen.epoch:
+    for p in gen.rma:
+        patterns = {}
+        patterns = {'e1': e1, 'p': p}
+        patterns['origin'] = "MPI-Corrbench"
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['rmafeature'] = 'Yes'
+        patterns['p'] = p
+        patterns['e1'] = e1
+        patterns['epoch'] = gen.epoch[e1]("1")
+        patterns['finEpoch'] = gen.finEpoch[e1]("1")
+        patterns['epoch2'] = ""
+        patterns['finEpoch2'] = ""
+        patterns['init'] = gen.init[p]("1")
+        patterns['operation'] = gen.operation[p]("1")
+
+        # Generate a code correct
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Correct code'
+        replace['longdesc'] = 'Correct code'
+        replace['outcome'] = 'OK'
+        replace['errormsg'] = 'OK'
+        gen.make_file(template, f'EpochLifecycle_RMA_{e1}_{p}_ok.c', replace)
+
+        # Generate a code with missing open epoch
+        replace = patterns.copy()
+        replace['shortdesc'] = f"Request lifecycle, missing open {e1} epoch"
+        replace['longdesc'] = f"Request lifecycle, missing open {e1} epoch"
+        replace['outcome'] = 'ERROR: MissingEpoch'
+        replace['errormsg'] = '@{e1}@ at @{filename}@:@{line:MBIERROR}@ has missing'
+        replace['epoch'] = f"/* MBIERROR MISSING: {gen.epoch[e1]('1')} */"
+        gen.make_file(template, f'EpochLifecycle_RMA_MissingOpen_{e1}_{p}_nok.c', replace)
+
+        # Generate a code with missing close epoch
+        replace = patterns.copy()
+        replace['shortdesc'] = f"Request lifecycle, missing close {e1} epoch"
+        replace['longdesc'] = f"Request lifecycle, missing close {e1} epoch"
+        replace['outcome'] = 'ERROR: MissingEpoch'
+        replace['errormsg'] = '@{e1}@ at @{filename}@:@{line:MBIERROR}@ has missing'
+        replace['epoch'] = gen.epoch[e1]("1")
+        replace['finEpoch'] = f"/* MBIERROR MISSING: {gen.finEpoch[e1]('1')} */"
+        gen.make_file(template, f'EpochLifecycle_RMA_MissingClose_{e1}_{p}_nok.c', replace)
+
+for e1 in gen.epoch:
+    for e2 in gen.epoch:
+        for p in gen.rma:
+            patterns = {}
+            patterns = {'e1': e1, 'e2': e2, 'p': p}
+            patterns['origin'] = "MPI-Corrbench"
+            patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+            patterns['rmafeature'] = 'Yes'
+            patterns['p'] = p
+            patterns['e1'] = e1
+            patterns['e2'] = e2
+            patterns['epoch'] = gen.epoch[e1]("1")
+            patterns['finEpoch'] = gen.finEpoch[e1]("1")
+            patterns['epoch2'] = gen.epoch[e2]("1") + " /* MBIERROR */"
+            patterns['finEpoch2'] = gen.finEpoch[e2]("1") + " /* MBIERROR */"
+            patterns['init'] = gen.init[p]("1")
+            patterns['operation'] = gen.operation[p]("1")
+
+            # Generate a code with epoch into an epoch
+            replace = patterns.copy()
+            replace['shortdesc'] = f"Request lifecycle, {e2} epoch into {e1} epoch"
+            replace['longdesc'] = f"Request lifecycle, {e2} epoch into {e1} epoch"
+            replace['outcome'] = 'ERROR: DoubleEpoch'
+            replace['errormsg'] = '@{e2}@ at @{filename}@:@{line:MBIERROR}@ has in an other epoch'
+            gen.make_file(template, f'EpochLifecycle_RMA_doubleEpoch_{e1}_{e2}_{p}_nok.c', replace)
diff --git a/scripts/generators/RMAWinBufferGenerator.py b/scripts/generators/RMAWinBufferGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..8ad4a4f501460d92786055a8f3cd102fc157785d
--- /dev/null
+++ b/scripts/generators/RMAWinBufferGenerator.py
@@ -0,0 +1,121 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: @{origin}@
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: @{rmafeature}@
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 10
+
+int * buffer;
+
+static void get_win(MPI_Win *win) {
+  @{bufferalloc}@
+
+  MPI_Win_create(@{buffer}@, N * sizeof(int), 1, MPI_INFO_NULL, MPI_COMM_WORLD, win);
+
+  return;
+}
+
+int main(int argc, char *argv[]) {
+  int rank, numProcs;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &numProcs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (numProcs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  MPI_Win win;
+
+  get_win(&win);
+
+  MPI_Win_fence(0, win);
+
+  if (rank == 0) {
+    int localbuf[N] = {12345};
+    MPI_Put(&localbuf, N, MPI_INT, 1, 0, N, MPI_INT, win);
+  }
+
+  MPI_Win_fence(0, win);
+
+  MPI_Win_free(&win);
+
+  @{bufferfree}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+
+"""
+
+
+for b in ['missing', 'null',  'malloc', 'bufferSize']:
+    patterns = {}
+    patterns = {'b': b}
+    patterns['origin'] = "MPI-CorrBench"
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+    patterns['rmafeature'] = 'Yes'
+
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Invalid buffer in window creation.'
+    replace['longdesc'] = 'Invalid buffer in window creation.'
+    replace['outcome'] = 'ERROR: InvalidBuffer'
+    replace['errormsg'] = '@{b}@ at @{filename}@:@{line:MBIERROR}@ has an invalid buffer'
+    replace['bufferfree'] = ''
+
+    ok = 'nok'
+    replace['buffer'] = 'buffer'
+
+    if b == 'missing':
+        replace['bufferalloc'] = '/* MBIERROR1 */'
+        replace['longdesc'] = 'Uninitialized buffer in window creation.'
+    elif b == 'null':
+        replace['bufferalloc'] = 'buffer = NULL; /* MBIERROR1 */'
+        replace['longdesc'] = 'Use NULL buffer in window creation.'
+    elif b == 'bufferSize':
+        replace['bufferalloc'] = 'buffer = (int *)malloc((N/2) * sizeof(int)); /* MBIERROR1 */'
+        replace['bufferfree'] = 'free(buffer);'
+        replace['longdesc'] = 'Unmatched size of buffer in window creation.'
+    else:
+        replace['bufferalloc'] = 'buffer = (int *)malloc(N * sizeof(int));'
+        replace['bufferfree'] = 'free(buffer);'
+        replace['longdesc'] = 'Correct initialized buffer in window creation.'
+        replace['outcome'] = 'OK'
+        replace['errormsg'] = ''
+        ok = 'ok'
+
+    gen.make_file(template, f'InvalidParam_WinBuffer_{b}_{ok}.c', replace)
diff --git a/scripts/generators/ResleakGenerator.py b/scripts/generators/ResleakGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..c33d1978fb38fc8c040fec7770f1422ee7a52a01
--- /dev/null
+++ b/scripts/generators/ResleakGenerator.py
@@ -0,0 +1,126 @@
+#! /usr/bin/python3
+
+# Copyright 2021-2022. The MBI project. All rights reserved.
+# This program is free software; you can redistribute it and/or modify it under the terms of the license (GNU GPL).
+
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: Lacking
+  P2P!nonblocking: Lacking
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: @{toolfeature}@
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE}
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+
+#define ITERATIONS 100
+#define PARAM_PER_ITERATION 3
+#define PARAM_LOST_PER_ITERATION 1
+
+static void myOp(int *invec, int *inoutvec, int *len, MPI_Datatype *dtype) {
+  for (int i = 0; i < *len; i++)
+    inoutvec[i] += invec[i];
+}
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int i=1;
+  int j=0;
+  int size=1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  @{change_size}@
+  @{init}@
+  @{loop}@
+  @{operation}@
+  @{cond}@
+  @{fini}@
+  @{end}@
+
+  @{free}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+# Generate code with one collective
+for call in gen.tcoll:
+    patterns = {}
+    patterns = {'call': call}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+    patterns['toolfeature'] = 'Yes'
+    patterns['call'] = call
+    patterns['operation'] = gen.operation[call]("1")
+    patterns['init'] = gen.init[call]("1")
+    patterns['fini'] = gen.fini[call]("1")
+    patterns['free'] = gen.free[call]("1")
+    missing = patterns['fini']
+    patterns['loop'] = ''
+    patterns['cond'] = ''
+    patterns['change_size'] = ''
+    patterns['end'] = ''
+
+    # Generate the correct code
+    replace = patterns.copy()
+    replace['shortdesc'] = '@{call}@ is correctly used'
+    replace['longdesc'] = f'{call} correctly used'
+    replace['outcome'] = 'OK'
+    replace['errormsg'] = ''
+    gen.make_file(template, f'ResLeak_{call}_ok.c', replace)
+
+    # Generate the resleak
+    replace = patterns.copy()
+    replace['shortdesc'] = '@{call}@ has no free'
+    replace['longdesc'] = '@{call}@ has no free'
+    replace['outcome'] = f'ERROR: {gen.error[call]}'
+    replace['errormsg'] = 'Resleak. @{call}@ at @{filename}@:@{line:MBIERROR}@ has no free.'
+    replace['fini'] = ' /* MBIERROR MISSING: ' + missing + ' */'
+    gen.make_file(template, f'ResLeak_{call}_nok.c', replace)
+
+    # Generate multiple resleak
+    replace = patterns.copy()
+    replace['shortdesc'] = '@{call}@ lacks several free'
+    replace['longdesc'] = '@{call}@ lacks several free'
+    replace['outcome'] = f'ERROR: {gen.error[call]}'
+    replace['errormsg'] = 'Resleak. @{call}@ at @{filename}@:@{line:MBIERROR}@ lacks several free.'
+    replace['change_size'] = 'size=PARAM_PER_ITERATION;'
+    replace['loop'] = 'for (i = 0; i < ITERATIONS; i++) {\n    for (j = 0; j < PARAM_PER_ITERATION; j++) {'
+    replace['cond'] = '      if (j < PARAM_PER_ITERATION - PARAM_LOST_PER_ITERATION) {'
+    replace['fini'] = gen.fini[call]("1") + ' /* MBIERROR */'
+    replace['end'] = '      }\n     }\n   }'
+    gen.make_file(template, f'ResLeak_multiple_{call}_nok.c', replace)
diff --git a/scripts/generators/generator_utils.py b/scripts/generators/generator_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..7b610baaf6e03f314702ef6561a15c7a6177151f
--- /dev/null
+++ b/scripts/generators/generator_utils.py
@@ -0,0 +1,481 @@
+# Copyright 2021-2022. The MBI project. All rights reserved.
+# This program is free software; you can redistribute it and/or modify it under the terms of the license (GNU GPL).
+
+# This is a simple templating system, dedicated to the systematic generation of MPI source code
+
+import os
+import re
+
+# Collectives
+coll = ['MPI_Barrier', 'MPI_Bcast', 'MPI_Reduce', 'MPI_Gather', 'MPI_Scatter', 'MPI_Scan', 'MPI_Exscan', 'MPI_Allgather', 'MPI_Allreduce', 'MPI_Allgatherv', 'MPI_Alltoall', 'MPI_Alltoallv']
+icoll = ['MPI_Ibcast', 'MPI_Ireduce', 'MPI_Igather', 'MPI_Iscatter', 'MPI_Iscan', 'MPI_Iexscan', 'MPI_Iallgather', 'MPI_Iallreduce', 'MPI_Iallgatherv', 'MPI_Ialltoall', 'MPI_Ialltoallv']
+barrier = ['MPI_Barrier']
+ibarrier = ['MPI_Ibarrier']
+coll4op = ['MPI_Reduce', 'MPI_Allreduce']
+icoll4op = ['MPI_Ireduce', 'MPI_Iallreduce']
+coll4root =  ['MPI_Reduce', 'MPI_Bcast', 'MPI_Gather', 'MPI_Scatter']
+icoll4root = ['MPI_Ireduce', 'MPI_Ibcast', 'MPI_Igather', 'MPI_Iscatter']
+pcoll = []
+tcoll = ['MPI_Comm_split', 'MPI_Op_create', 'MPI_Comm_dup', 'MPI_Type_contiguous', 'MPI_Comm_create', 'MPI_Group_excl'] # MPI_Comm_dup removed
+tcoll4color = ['MPI_Comm_split']
+tcoll4topo = ['MPI_Cart_get']
+
+# P2P
+allsend = ['MPI_Send', 'MPI_Isend', 'MPI_Ssend', 'MPI_Bsend', 'MPI_Send_init']
+allrecv = ['MPI_Recv', 'MPI_Irecv', 'MPI_Recv_init']
+send = ['MPI_Send']
+ssend = ['MPI_Ssend']
+bsend = ['MPI_Bsend']
+isend = ['MPI_Isend']
+psend = ['MPI_Send_init']
+recv = ['MPI_Recv']
+irecv = ['MPI_Irecv']
+precv = ['MPI_Recv_init']
+probe = ['MPI_Probe']
+sendrecv = ['MPI_Sendrecv']
+
+# RMA
+epoch = ['MPI_Win_fence', 'MPI_Win_lock', 'MPI_Win_lock_all']
+rma = ['MPI_Get', 'MPI_Put']
+get = ['MPI_Get']
+put = ['MPI_Put']
+store = ['store']
+load = ['load']
+rstore = ['rstore']
+rload = ['rload']
+loadstore = ['loadstore']
+
+
+# setup
+init = {}
+start = {}
+operation = {}
+fini = {}
+free = {}
+write = {}
+error = {}
+epoch = {}
+finEpoch = {}
+syncEpoch = {}
+
+
+### COLL:basic
+
+init['MPI_Bcast'] = lambda n: f'int buf{n}[buff_size];'
+start['MPI_Bcast'] = lambda n: ""
+operation['MPI_Bcast'] = lambda n: f'MPI_Bcast(buf{n}, buff_size, type, root, newcom);'
+fini['MPI_Bcast'] = lambda n: ""
+free['MPI_Bcast'] = lambda n: ""
+write['MPI_Bcast'] = lambda n: ""
+
+init['MPI_Barrier'] = lambda n: ""
+start['MPI_Barrier'] = lambda n: ""
+operation['MPI_Barrier'] = lambda n: 'MPI_Barrier(newcom);'
+fini['MPI_Barrier'] = lambda n: ""
+free['MPI_Barrier'] = lambda n: ""
+write['MPI_Barrier'] = lambda n: ""
+
+init['MPI_Reduce'] = lambda n: f"int sum{n}, val{n} = 1;"
+start['MPI_Reduce'] = lambda n: ""
+operation['MPI_Reduce'] = lambda n: f"MPI_Reduce(&val{n}, &sum{n}, 1, type, op, root, newcom);"
+fini['MPI_Reduce'] = lambda n: ""
+free['MPI_Reduce'] = lambda n: ""
+write['MPI_Reduce'] = lambda n: ""
+
+init['MPI_Gather'] = lambda n: f"int val{n}=1, buf{n}[buff_size];"
+start['MPI_Gather'] = lambda n: ""
+operation['MPI_Gather'] = lambda n: f"MPI_Gather(&val{n}, 1, type, buf{n},1, type, root, newcom);"
+fini['MPI_Gather'] = lambda n: ""
+free['MPI_Gather'] = lambda n: ""
+write['MPI_Gather'] = lambda n: ""
+
+init['MPI_Scatter'] = lambda n: f"int val{n}, buf{n}[buff_size];\n  memset(buf{n}, 0, sizeof(int)*buff_size);"
+start['MPI_Scatter'] = lambda n: ""
+operation['MPI_Scatter'] = lambda n: f"MPI_Scatter(&buf{n}, 1, type, &val{n}, 1, type, root, newcom);"
+fini['MPI_Scatter'] = lambda n: ""
+free['MPI_Scatter'] = lambda n: ""
+write['MPI_Scatter'] = lambda n: ""
+
+init['MPI_Allreduce'] = lambda n: f"int sum{n}, val{n} = 1;"
+start['MPI_Allreduce'] = lambda n: ""
+operation['MPI_Allreduce'] = lambda n: f"MPI_Allreduce(&val{n}, &sum{n}, 1, type, op, newcom);"
+fini['MPI_Allreduce'] = lambda n: ""
+free['MPI_Allreduce'] = lambda n: ""
+write['MPI_Allreduce'] = lambda n: ""
+
+init['MPI_Scan'] = lambda n: f"int outbuf{n}[buff_size];\n  memset(outbuf{n}, 0, buff_size*sizeof(int));\n  int inbuf{n}[buff_size];"
+start['MPI_Scan'] = lambda n: ""
+operation['MPI_Scan'] = lambda n: f"MPI_Scan(&outbuf{n}, inbuf{n}, buff_size, type, op, newcom);"
+fini['MPI_Scan'] = lambda n: ""
+free['MPI_Scan'] = lambda n: ""
+write['MPI_Scan'] = lambda n: ""
+
+init['MPI_Exscan'] = lambda n: f"int outbuf{n}[buff_size];\n  memset(outbuf{n}, 0, buff_size*sizeof(int));\n  int inbuf{n}[buff_size];"
+start['MPI_Exscan'] = lambda n: ""
+operation['MPI_Exscan'] = lambda n: f"MPI_Exscan(&outbuf{n}, inbuf{n}, buff_size, type, op, newcom);"
+fini['MPI_Exscan'] = lambda n: ""
+free['MPI_Exscan'] = lambda n: ""
+write['MPI_Exscan'] = lambda n: ""
+
+init['MPI_Allgather'] = lambda n: f"int val{n}=1, *rbuf{n} = (int*)malloc(dbs);"
+start['MPI_Allgather'] = lambda n: ""
+operation['MPI_Allgather'] = lambda n: f"MPI_Allgather(&val{n}, 1, type, rbuf{n}, 1, type, newcom);"
+fini['MPI_Allgather'] = lambda n: ""
+free['MPI_Allgather'] = lambda n: f"free(rbuf{n});"
+write['MPI_Allgather'] = lambda n: ""
+
+init['MPI_Alltoallv'] = lambda n: (f"int *sbuf{n}=(int*)malloc(dbs*2), *rbuf{n}=(int*)malloc(dbs*2), *scounts{n}=(int*)malloc(dbs), *rcounts{n}=(int*)malloc(dbs), *sdispls{n}=(int*)malloc(dbs), *rdispls{n}=(int*)malloc(dbs);\n"
+  +  "  for (int i = 0; i < nprocs; i++) {\n"
+  + f"    scounts{n}[i] = 2;\n"
+  + f"    rcounts{n}[i] = 2;\n"
+  + f"    sdispls{n}[i] = (nprocs - (i + 1)) * 2;\n"
+  + f"    rdispls{n}[i] = i * 2;\n"
+  +  "  }")
+start['MPI_Alltoallv'] = lambda n: ""
+operation['MPI_Alltoallv'] = lambda n: f"MPI_Alltoallv(sbuf{n}, scounts{n}, sdispls{n}, type, rbuf{n}, rcounts{n}, rdispls{n}, type, newcom);"
+fini['MPI_Alltoallv'] = lambda n: ""
+free['MPI_Alltoallv'] = lambda n: f"free(sbuf{n});free(rbuf{n});free(scounts{n});free(rcounts{n});free(sdispls{n});free(rdispls{n});"
+write['MPI_Alltoallv'] = lambda n: ""
+
+init['MPI_Alltoall'] = lambda n: f"int *sbuf{n} = (int*)malloc(dbs), *rbuf{n} = (int*)malloc(dbs);"
+start['MPI_Alltoall'] = lambda n: ""
+operation['MPI_Alltoall'] = lambda n: f"MPI_Alltoall(sbuf{n}, 1, type, rbuf{n}, 1, type, newcom);"
+fini['MPI_Alltoall'] = lambda n: ""
+free['MPI_Alltoall'] = lambda n: f"free(sbuf{n});free(rbuf{n});"
+write['MPI_Alltoall'] = lambda n: ""
+
+init['MPI_Allgatherv'] = lambda n: (f"int *rbuf{n} = (int*)malloc(dbs*2), *rcounts{n}=(int*)malloc(dbs),  *displs{n}=(int*)malloc(dbs);\n"
+  +  "  for (int i = 0; i < nprocs; i++) {\n"
+  + f"    rcounts{n}[i] = 1;\n"
+  + f"    displs{n}[i] = 2 * (nprocs - (i + 1));\n"
+  +  "  }")
+start['MPI_Allgatherv'] = lambda n: ""
+operation['MPI_Allgatherv'] = lambda n: f"MPI_Allgatherv(&rank, 1, type, rbuf{n}, rcounts{n}, displs{n}, type, newcom);"
+fini['MPI_Allgatherv'] = lambda n: ""
+free['MPI_Allgatherv'] = lambda n: f"free(rbuf{n});free(rcounts{n});free(displs{n});"
+write['MPI_Allgatherv'] = lambda n: ""
+
+
+### COLL:nonblocking
+
+init['MPI_Ibarrier'] = lambda n: f"MPI_Request req{n}=MPI_REQUEST_NULL; MPI_Status stat{n};"
+start['MPI_Ibarrier'] = lambda n: ""
+operation['MPI_Ibarrier'] = lambda n: f'MPI_Ibarrier(newcom, &req{n});'
+fini['MPI_Ibarrier'] = lambda n: f"MPI_Wait(&req{n}, &stat{n});"
+free['MPI_Ibarrier'] = lambda n: f'if(req{n} != MPI_REQUEST_NULL) MPI_Request_free(&req{n});'
+write['MPI_Ibarrier'] = lambda n: ""
+
+init['MPI_Ireduce'] = lambda n: f"MPI_Request req{n}=MPI_REQUEST_NULL; MPI_Status stat{n}; int sum{n}, val{n} = 1;"
+start['MPI_Ireduce'] = lambda n: ""
+operation['MPI_Ireduce'] = lambda n: f"MPI_Ireduce(&val{n}, &sum{n}, 1, type, op, root, newcom, &req{n});"
+fini['MPI_Ireduce'] = lambda n: f"MPI_Wait(&req{n}, &stat{n});"
+free['MPI_Ireduce'] = lambda n: f'if(req{n} != MPI_REQUEST_NULL) MPI_Request_free(&req{n});'
+write['MPI_Ireduce'] = lambda n: f"sum{n}++;"
+
+init['MPI_Iallreduce'] = lambda n: f'MPI_Request req{n}=MPI_REQUEST_NULL;MPI_Status stat{n}; int sum{n}, val{n} = 1;'
+start['MPI_Iallreduce'] = lambda n: ""
+operation['MPI_Iallreduce'] = lambda n: f'MPI_Iallreduce(&val{n}, &sum{n}, 1, type, op, newcom, &req{n});'
+fini['MPI_Iallreduce'] = lambda n: f'MPI_Wait(&req{n}, &stat{n});'
+free['MPI_Iallreduce'] = lambda n: f"if(req{n} != MPI_REQUEST_NULL) MPI_Request_free(&req{n});"
+write['MPI_Iallreduce'] = lambda n: f"sum{n}++;"
+
+init['MPI_Ibcast'] = lambda n: f'MPI_Request req{n}=MPI_REQUEST_NULL; MPI_Status sta{n};int buf{n}[buff_size];'
+start['MPI_Ibcast'] = lambda n: ""
+operation['MPI_Ibcast'] = lambda n: f'MPI_Ibcast(buf{n}, buff_size, type, root, newcom, &req{n});'
+fini['MPI_Ibcast'] = lambda n: f"MPI_Wait(&req{n},&sta{n});"
+free['MPI_Ibcast'] = lambda n: f'if(req{n} != MPI_REQUEST_NULL) MPI_Request_free(&req{n});'
+write['MPI_Ibcast'] = lambda n: f'buf{n}[0]++;'
+
+init['MPI_Igather'] = lambda n: f"int val{n}=1, buf{n}[buff_size];MPI_Request req{n}=MPI_REQUEST_NULL;MPI_Status sta{n};"
+start['MPI_Igather'] = lambda n: ""
+operation['MPI_Igather'] = lambda n: f'MPI_Igather(&val{n}, 1, type, &buf{n},1, type, root, newcom, &req{n});'
+write['MPI_Igather'] = lambda n: f'val{n}=3;'
+fini['MPI_Igather'] = lambda n: f'MPI_Wait(&req{n},&sta{n});'
+free['MPI_Igather'] = lambda n: f'if(req{n} != MPI_REQUEST_NULL) MPI_Request_free(&req{n});'
+
+init['MPI_Iscatter'] = lambda n: f"MPI_Request req{n} = MPI_REQUEST_NULL;\n  MPI_Status sta{n};\n  int val{n};\n  int buf{n}[buff_size];\n  memset(buf{n}, 0, buff_size*sizeof(int));"
+start['MPI_Iscatter'] = lambda n: ""
+operation['MPI_Iscatter'] = lambda n: f"MPI_Iscatter(&buf{n}, 1, type, &val{n}, 1, type, root, newcom,&req{n});"
+fini['MPI_Iscatter'] = lambda n: f"MPI_Wait(&req{n},&sta{n});"
+free['MPI_Iscatter'] = lambda n: f'if(req{n} != MPI_REQUEST_NULL) MPI_Request_free(&req{n});'
+write['MPI_Iscatter'] = lambda n: f'buf{n}[0]++;'
+
+init['MPI_Iscan'] = lambda n: f"MPI_Request req{n} = MPI_REQUEST_NULL;\n  MPI_Status sta{n};\n  int outbuf{n}[buff_size];\n  memset(outbuf{n}, 0, buff_size*sizeof(int));\n  int inbuf{n}[buff_size];"
+start['MPI_Iscan'] = lambda n: ""
+operation['MPI_Iscan'] = lambda n: f"MPI_Iscan(&outbuf{n}, inbuf{n}, buff_size, type, op, newcom,&req{n});"
+fini['MPI_Iscan'] = lambda n: f"MPI_Wait(&req{n},&sta{n});"
+free['MPI_Iscan'] = lambda n: f'if(req{n} != MPI_REQUEST_NULL) MPI_Request_free(&req{n});'
+write['MPI_Iscan'] = lambda n: f'outbuf{n}[0]++;'
+
+init['MPI_Iexscan'] = lambda n: f"MPI_Request req{n}=MPI_REQUEST_NULL;MPI_Status sta{n};\n  int outbuf{n}[buff_size];\n  memset(outbuf{n}, 0, buff_size*sizeof(int));\n  int inbuf{n}[buff_size];"
+start['MPI_Iexscan'] = lambda n: ""
+operation['MPI_Iexscan'] = lambda n: f"MPI_Iexscan(&outbuf{n}, inbuf{n}, buff_size, type, op, newcom,&req{n});"
+fini['MPI_Iexscan'] = lambda n: f"MPI_Wait(&req{n},&sta{n});"
+free['MPI_Iexscan'] = lambda n: f'if(req{n} != MPI_REQUEST_NULL) MPI_Request_free(&req{n});'
+write['MPI_Iexscan'] = lambda n: f'outbuf{n}[0]++;'
+
+init['MPI_Iallgather'] = lambda n: f"MPI_Request req{n}=MPI_REQUEST_NULL;MPI_Status sta{n};int val{n}=1, *rbuf{n} = (int*)malloc(dbs);"
+start['MPI_Iallgather'] = lambda n: ""
+operation['MPI_Iallgather'] = lambda n: f"MPI_Iallgather(&val{n}, 1, type, rbuf{n}, 1, type, newcom,&req{n});"
+fini['MPI_Iallgather'] = lambda n: f"MPI_Wait(&req{n},&sta{n});"
+free['MPI_Iallgather'] = lambda n: f"free(rbuf{n});"
+write['MPI_Iallgather'] = lambda n: f'val{n}++;'
+
+init['MPI_Iallgatherv'] = lambda n: (f"MPI_Request req{n}=MPI_REQUEST_NULL;MPI_Status sta{n};int *rbuf{n} = (int*)malloc(dbs*2), *rcounts{n}=(int*)malloc(dbs),  *displs{n}=(int*)malloc(dbs);\n"
+  +  "  for (int i = 0; i < nprocs; i++) {\n"
+  + f"    rcounts{n}[i] = 1;\n"
+  + f"    displs{n}[i] = 2 * (nprocs - (i + 1));\n"
+  +  "  }")
+start['MPI_Iallgatherv'] = lambda n: ""
+operation['MPI_Iallgatherv'] = lambda n: f"MPI_Iallgatherv(&rank, 1, type, rbuf{n}, rcounts{n}, displs{n}, type, newcom,&req{n});"
+fini['MPI_Iallgatherv'] = lambda n: f"MPI_Wait(&req{n},&sta{n});"
+free['MPI_Iallgatherv'] = lambda n: f"free(rbuf{n});free(rcounts{n});free(displs{n});"
+write['MPI_Iallgatherv'] = lambda n: f"rbuf{n}[0]++;"
+
+init['MPI_Ialltoall'] = lambda n: f"MPI_Request req{n}=MPI_REQUEST_NULL;MPI_Status sta{n};int *sbuf{n} = (int*)malloc(dbs), *rbuf{n} = (int*)malloc(dbs);"
+start['MPI_Ialltoall'] = lambda n: ""
+operation['MPI_Ialltoall'] = lambda n: f"MPI_Ialltoall(sbuf{n}, 1, type, rbuf{n}, 1, type, newcom, &req{n});"
+fini['MPI_Ialltoall'] = lambda n: f"MPI_Wait(&req{n},&sta{n});"
+free['MPI_Ialltoall'] = lambda n: f"free(sbuf{n});free(rbuf{n});"
+write['MPI_Ialltoall'] = lambda n: f"rbuf{n}[0]++;"
+
+init['MPI_Ialltoallv'] = lambda n: (f"MPI_Request req{n}=MPI_REQUEST_NULL;MPI_Status sta{n};int *sbuf{n}=(int*)malloc(dbs*2), *rbuf{n}=(int*)malloc(dbs*2), *scounts{n}=(int*)malloc(dbs), *rcounts{n}=(int*)malloc(dbs), *sdispls{n}=(int*)malloc(dbs), *rdispls{n}=(int*)malloc(dbs);\n"
+  +  "  for (int i = 0; i < nprocs; i++) {\n"
+  + f"    scounts{n}[i] = 2;\n"
+  + f"    rcounts{n}[i] = 2;\n"
+  + f"    sdispls{n}[i] = (nprocs - (i + 1)) * 2;\n"
+  + f"    rdispls{n}[i] = i * 2;\n"
+  +  "  }")
+start['MPI_Ialltoallv'] = lambda n: ""
+operation['MPI_Ialltoallv'] = lambda n: f"MPI_Ialltoallv(sbuf{n}, scounts{n}, sdispls{n}, type, rbuf{n}, rcounts{n}, rdispls{n}, type, newcom,&req{n});"
+fini['MPI_Ialltoallv'] = lambda n: f"MPI_Wait(&req{n},&sta{n});"
+free['MPI_Ialltoallv'] = lambda n: f"free(sbuf{n});free(rbuf{n});free(scounts{n});free(rcounts{n});free(sdispls{n});free(rdispls{n});"
+write['MPI_Ialltoallv'] = lambda n: f"rbuf{n}[0]++;"
+
+### COLL:persistent
+
+
+
+### COLL:tools
+
+init['MPI_Comm_split'] = lambda n: 'MPI_Comm com[size]; int color = rank % 2; int key = 1;'
+start['MPI_Comm_split'] = lambda n: ""
+operation['MPI_Comm_split'] = lambda n: 'MPI_Comm_split(MPI_COMM_WORLD,color,key, &com[j]);'
+error['MPI_Comm_split'] = 'CommunicatorLeak'
+fini['MPI_Comm_split'] = lambda n: "if(com[j] != MPI_COMM_NULL) MPI_Comm_free(&com[j]);"
+free['MPI_Comm_split'] = lambda n: ""
+
+
+init['MPI_Cart_get'] = lambda n: ""
+start['MPI_Cart_get'] = lambda n: ""
+operation['MPI_Cart_get'] = lambda n: 'MPI_Cart_get(newcom, 2, dims, periods, coords);'
+write['MPI_Cart_get'] = lambda n: ""
+fini['MPI_Cart_get'] = lambda n: ""
+free['MPI_Cart_get'] = lambda n: ""
+
+
+init['MPI_Op_create'] = lambda n: 'MPI_Op op[size];'
+operation['MPI_Op_create'] = lambda n: 'MPI_Op_create((MPI_User_function *)myOp, 0, &op[j]);'
+error['MPI_Op_create'] = 'OperatorLeak'
+fini['MPI_Op_create'] = lambda n: "MPI_Op_free(&op[j]);"
+free['MPI_Op_create'] = lambda n: ""
+
+init['MPI_Comm_group'] = lambda n: 'MPI_Group grp[size];'
+operation['MPI_Comm_group'] = lambda n: 'MPI_Comm_group(MPI_COMM_WORLD, &grp[j]);'
+error['MPI_Comm_group'] = 'GroupLeak'
+fini['MPI_Comm_group'] = lambda n: "MPI_Group_free(&grp[j]);"
+free['MPI_Comm_group'] = lambda n: ""
+
+init['MPI_Group_excl'] = lambda n: 'MPI_Group worldgroup, grp[size];\n MPI_Comm_group(MPI_COMM_WORLD, &worldgroup);'
+operation['MPI_Group_excl'] = lambda n: 'MPI_Group_excl(worldgroup, 1, &rank, &grp[j]);'
+error['MPI_Group_excl'] = 'GroupLeak'
+fini['MPI_Group_excl'] = lambda n: "MPI_Group_free(&grp[j]);"
+free['MPI_Group_excl'] = lambda n: "MPI_Group_free(&worldgroup);"
+
+init['MPI_Comm_create'] = lambda n: 'MPI_Comm com[size]; MPI_Group grp[size];'
+operation['MPI_Comm_create'] = lambda n: 'MPI_Comm_group(MPI_COMM_WORLD, &grp[j]);\n MPI_Comm_create(MPI_COMM_WORLD, grp[j], &com[j]);\n MPI_Group_free(&grp[j]);'
+error['MPI_Comm_create'] = 'CommunicatorLeak'
+fini['MPI_Comm_create'] = lambda n: "MPI_Comm_free(&com[j]);"
+free['MPI_Comm_create'] = lambda n: ""
+
+init['MPI_Comm_dup'] = lambda n: 'MPI_Comm com[size];'
+operation['MPI_Comm_dup'] = lambda n: 'MPI_Comm_dup(MPI_COMM_WORLD, &com[j]);'
+error['MPI_Comm_dup'] = 'CommunicatorLeak'
+fini['MPI_Comm_dup'] = lambda n: "MPI_Comm_free(&com[j]);"
+free['MPI_Comm_dup'] = lambda n: ""
+
+init['MPI_Type_contiguous'] = lambda n: 'MPI_Datatype type[size];'
+operation['MPI_Type_contiguous'] = lambda n: 'MPI_Type_contiguous(2, MPI_DOUBLE, &type[j]);'
+error['MPI_Type_contiguous'] = 'TypeLeak'
+fini['MPI_Type_contiguous'] = lambda n: "MPI_Type_free(&type[j]);"
+free['MPI_Type_contiguous'] = lambda n: ""
+
+
+
+
+### P2P:basic
+
+init['MPI_Send'] = lambda n: f'int buf{n}=rank;'
+start['MPI_Send'] = lambda n: ""
+operation['MPI_Send'] = lambda n: f'MPI_Send(&buf{n}, buff_size, type, dest, stag, newcom);'
+fini['MPI_Send'] = lambda n: ""
+free['MPI_Send'] = lambda n: ""
+write['MPI_Send'] = lambda n: ""
+
+init['MPI_Ssend'] = lambda n: f'int buf{n}=rank;'
+start['MPI_Ssend'] = lambda n: ""
+operation['MPI_Ssend'] = lambda n: f'MPI_Ssend(&buf{n}, buff_size, type, dest, stag, newcom);'
+fini['MPI_Ssend'] = lambda n: ""
+free['MPI_Ssend'] = lambda n: ""
+write['MPI_Ssend'] = lambda n: ""
+
+init['MPI_Bsend'] = lambda n: (f'int buf{n}=rank;\n'
+            + f'int buffer_attached_size{n} = MPI_BSEND_OVERHEAD + sizeof(int);\n'
+            + f'char* buffer_attached{n} = (char*)malloc(buffer_attached_size{n});\n'
+            + f'MPI_Buffer_attach(buffer_attached{n}, buffer_attached_size{n});')
+start['MPI_Bsend'] = lambda n: ""
+operation['MPI_Bsend'] = lambda n: f'MPI_Bsend(&buf{n}, buff_size, type, dest, stag, newcom);'
+fini['MPI_Bsend'] = lambda n: ""
+free['MPI_Bsend'] = (lambda n: f'MPI_Buffer_detach(&buffer_attached{n}, &buffer_attached_size{n});\n'
+            + f'free(buffer_attached{n});')
+write['MPI_Bsend'] = lambda n: ""
+
+init['MPI_Recv'] = lambda n: f'int buf{n}=-1; MPI_Status sta{n};'
+start['MPI_Recv'] = lambda n: ""
+operation['MPI_Recv'] = lambda n: f'MPI_Recv(&buf{n}, buff_size, type, src, rtag, newcom, &sta{n});'
+fini['MPI_Recv'] = lambda n: ""
+free['MPI_Recv'] = lambda n: ""
+write['MPI_Recv'] = lambda n: ""
+
+init['MPI_Probe'] = lambda n: ""
+start['MPI_Probe'] = lambda n: ""
+operation['MPI_Probe'] = lambda n: 'MPI_Probe(src, 0, newcom, &sta);'
+fini['MPI_Probe'] = lambda n: ""
+free['MPI_Probe'] = lambda n: ""
+write['MPI_Probe'] = lambda n: ""
+
+init['MPI_Sendrecv'] = lambda n: f'int sbuf{n}[N+2]={{rank}}; int rbuf{n}[N]={{rank}}; int * psbuf{n} = &sbuf{n}[0]; int * prbuf{n} = &rbuf{n}[0]; MPI_Status sta{n};'
+start['MPI_Sendrecv'] = lambda n: ""
+operation['MPI_Sendrecv'] = lambda n: f'MPI_Sendrecv(psbuf{n}, buff_size, type, dest, stag, prbuf{n}, buff_size, type, src, rtag, newcom, &sta{n});'
+fini['MPI_Sendrecv'] = lambda n: ""
+free['MPI_Sendrecv'] = lambda n: ""
+write['MPI_Sendrecv'] = lambda n: f"prbuf{n} = &sbuf{n}[2];"
+
+
+### P2P:nonblocking
+
+init['MPI_Isend'] = lambda n: f'int buf{n}=rank; MPI_Request req{n}=MPI_REQUEST_NULL;'
+start['MPI_Isend'] = lambda n: ""
+operation['MPI_Isend'] = lambda n: f'MPI_Isend(&buf{n}, buff_size, type, dest, stag, newcom, &req{n});'
+fini['MPI_Isend'] = lambda n: f'MPI_Wait(&req{n}, MPI_STATUS_IGNORE);'
+free['MPI_Isend'] = lambda n: f'if(req{n} != MPI_REQUEST_NULL) MPI_Request_free(&req{n});'
+write['MPI_Isend'] = lambda n: f'buf{n}=4;'
+
+init['MPI_Irecv'] = lambda n: f'int buf{n}=-1; MPI_Request req{n}=MPI_REQUEST_NULL;'
+start['MPI_Irecv'] = lambda n: ""
+operation['MPI_Irecv'] = lambda n: f'MPI_Irecv(&buf{n}, buff_size, type, src, rtag, newcom, &req{n});'
+fini['MPI_Irecv'] = lambda n: f' MPI_Wait(&req{n}, MPI_STATUS_IGNORE);'
+free['MPI_Irecv'] = lambda n: f'if(req{n} != MPI_REQUEST_NULL) MPI_Request_free(&req{n});'
+write['MPI_Irecv'] = lambda n: f'buf{n}++;'
+
+### P2P:persistent
+
+init['MPI_Send_init'] = lambda n: f'int buf{n}=rank; MPI_Request req{n}=MPI_REQUEST_NULL;'
+operation['MPI_Send_init'] = lambda n: f'MPI_Send_init(&buf{n}, buff_size, type, dest, stag, newcom, &req{n});'
+start['MPI_Send_init'] = lambda n: f'MPI_Start(&req{n});'
+fini['MPI_Send_init'] = lambda n: f'MPI_Wait(&req{n}, MPI_STATUS_IGNORE);'
+free['MPI_Send_init'] = lambda n: f'if(req{n} != MPI_REQUEST_NULL) MPI_Request_free(&req{n});'
+write['MPI_Send_init'] = lambda n: f'buf{n}=4;'
+
+init['MPI_Recv_init'] = lambda n: f'int buf{n}=-1; MPI_Request req{n}=MPI_REQUEST_NULL;'
+start['MPI_Recv_init'] = lambda n: f'MPI_Start(&req{n});'
+operation['MPI_Recv_init'] = lambda n: f'MPI_Recv_init(&buf{n}, buff_size, type, src, rtag, newcom, &req{n});'
+fini['MPI_Recv_init'] = lambda n: f'MPI_Wait(&req{n}, MPI_STATUS_IGNORE);'
+free['MPI_Recv_init'] = lambda n: f'if(req{n} != MPI_REQUEST_NULL) MPI_Request_free(&req{n});'
+write['MPI_Recv_init'] = lambda n: f'buf{n}++;'
+
+### RMA
+
+epoch['MPI_Win_fence'] = lambda n: 'MPI_Win_fence(0, win);'
+finEpoch['MPI_Win_fence'] = lambda n: 'MPI_Win_fence(0, win);'
+syncEpoch['MPI_Win_fence'] = lambda n: 'MPI_Win_fence(0, win);'
+#epoch['MPI_Win_lock'] = lambda n: 'MPI_Win_lock(MPI_LOCK_SHARED, target, 0, win);'
+#finEpoch['MPI_Win_lock'] = lambda n: 'MPI_Win_unlock(target, win);'
+#syncEpoch['MPI_Win_lock'] = lambda n: ''
+epoch['MPI_Win_lock_all'] = lambda n: 'MPI_Win_lock_all(0,win);'
+finEpoch['MPI_Win_lock_all'] = lambda n: 'MPI_Win_unlock_all(win);'
+syncEpoch['MPI_Win_lock_all'] = lambda n: 'MPI_Win_flush(1,win);'
+
+init['MPI_Put'] = lambda n: f'int localbuf{n}[N] = {{12345}};'
+operation['MPI_Put'] = lambda n: f'MPI_Put(&localbuf{n}, N, MPI_INT, target, 0, N, type, win);'
+
+init['MPI_Get'] = lambda n: f'int localbuf{n}[N] = {{54321}};'
+operation['MPI_Get'] = lambda n: f'MPI_Get(&localbuf{n}, N, MPI_INT, target, 0, N, type, win);'
+
+init['store'] = lambda n: f'int localbuf{n}[N] = {{0}};'
+operation['store'] = lambda n: f'localbuf{n}[0] = 8;'
+
+init['rstore'] = lambda n: ""
+operation['rstore'] = lambda n: f'winbuf[5] = 12346;'
+
+init['load'] = lambda n: f'int localbuf{n}[N] = {{0}};'
+operation['load'] = lambda n: f'int load = localbuf{n}[0];'
+
+init['rload'] = lambda n: ""
+operation['rload'] = lambda n: "int load = winbuf[5];"
+
+init['loadstore'] = lambda n: f'int localbuf{n}[N] = {{0}};'
+operation['loadstore'] = lambda n: f'if (localbuf{n}[0] % 2 == 0)  localbuf{n}[0]++; '
+
+
+
+
+def find_line(content, target, filename):
+    res = 1
+    for line in content.split('\n'):
+        if re.search(f'[^:]{target}', line):
+            #print(f'Found {target} at {line}')
+            return res
+        res += 1
+    raise ValueError(f"Line target {target} not found in {filename}.")
+
+
+def make_file(template, filename, replace):
+    output = template
+    filename = filename.replace("_MPI_", "_")
+    replace['filename'] = filename
+    # Replace all variables that don't have a ':' in their name
+    while re.search(r'@\{[^@:]*\}@', output):
+        m = re.search(r'@\{([^@:]*)\}@', output)
+        target = m.group(1)
+        #print(f"Replace @{{{target}}}@")
+        if target in replace.keys():
+            output = re.sub(fr'@\{{{target}\}}@', replace[target], output)
+            #print(f"Replace {target} -> {replace[target]}")
+        else:
+            raise ValueError(f"Variable {target} used in template, but not defined.")
+    # Now replace all variables with a ':' in their name: line targets are like that, and we don't want to resolve them before the others change the lines
+    while re.search(r'@\{([^:@]*):([^@]*)\}@', output):
+        m = re.search(r'@\{([^:@]*):([^@]*)\}@', output)
+        (kind, target) = (m.group(1), m.group(2))
+        if kind == 'line':
+            replace = f'{find_line(output, target, filename)}'
+            #print(f"Replace @{{line:{target}}}@ with '{replace}'")
+            output = re.sub(fr'@\{{line:{target}\}}@', replace, output)
+        else:
+            raise ValueError(f"Unknown variable kind: {kind}:{target}")
+
+    if os.path.exists(filename):
+        with open(filename, 'r') as file:
+            prev = file.read().split('\n')[0]
+            prev = re.sub('^.*?scripts/', 'scripts/', prev)
+            prev = re.sub('. DO NOT EDIT.', '', prev)
+        now = output.split('\n')[0]
+        now = re.sub('^.*?scripts/', 'scripts/', now)
+        now = re.sub('. DO NOT EDIT.', '', now)
+
+        print(f'WARNING: overwriting {filename}. Previously generated by: {prev}; regenerated by {now}')
+
+    # Ready to output it
+    with open(filename, 'w') as outfile:
+        outfile.write(output)
diff --git a/scripts/test-all b/scripts/test-all
new file mode 100755
index 0000000000000000000000000000000000000000..c4859dbced0a371a507187bbc3a7cb826a53b70d
--- /dev/null
+++ b/scripts/test-all
@@ -0,0 +1,38 @@
+#! /bin/sh
+
+set -e # Fail on error
+
+# Go to the main MBI/ directory
+cd "$(dirname "$0")"/..
+
+cmd_docker() {
+    docker build -f Dockerfile -t mpi-bugs-initiative:latest .
+    docker pull mpisv/mpi-sv
+    docker pull ubuntu:18.04
+}
+
+cmd_run() {
+#    rm -rf gencodes
+    docker run -it --rm --name MIB --volume $(pwd):/MBI mpi-bugs-initiative /MBI/MBI.py -c generate
+
+    docker run -it --rm --name MIB --volume $(pwd):/MBI mpi-bugs-initiative /MBI/MBI.py -x parcoach -c run $@
+    docker run -it --rm --name MIB --volume $(pwd):/MBI mpi-bugs-initiative /MBI/MBI.py -x simgrid -c run $@
+    docker run -it --rm --name MIB --volume $(pwd):/MBI mpi-bugs-initiative /MBI/MBI.py -x isp -c run $@
+    docker run -it --rm --name MIB --volume $(pwd):/MBI mpi-bugs-initiative /MBI/MBI.py -x smpi -c run $@
+    docker run -it --rm --name MIB --volume $(pwd):/MBI mpi-bugs-initiative /MBI/MBI.py -x smpivg -c run $@
+    docker run -it --rm --name MIB --volume $(pwd):/MBI ubuntu:18.04 /MBI/scripts/ensure_python3 /MBI/MBI.py -x aislinn -c run $@
+    docker run -it --rm --name MIB --volume $(pwd):/MBI mpi-bugs-initiative /MBI/MBI.py -x civl -c run $@
+    docker run -it --rm --name MIB --volume $(pwd):/MBI --shm-size=512m mpisv/mpi-sv /MBI/scripts/ensure_python3 /MBI/MBI.py -x mpisv -c run $@
+    docker run -it --rm --name MIB --volume $(pwd):/MBI --shm-size=512m mpi-bugs-initiative /MBI/MBI.py -x itac -c run $@
+    docker run -it --rm --name MIB --volume $(pwd):/MBI mpi-bugs-initiative /MBI/MBI.py -x must -c run $@
+    docker run -it --rm --name MIB --volume $(pwd):/MBI mpi-bugs-initiative /MBI/MBI.py -x hermes
+}
+
+cmd_stats() {
+    docker run -it --rm --name MIB --volume $(pwd):/MBI mpi-bugs-initiative /MBI/MBI.py -c html
+    docker run -it --rm --name MIB --volume $(pwd):/MBI mpi-bugs-initiative /MBI/MBI.py -c latex
+}
+
+#cmd_docker
+cmd_run $@
+cmd_stats
diff --git a/scripts/tools/aislinn.py b/scripts/tools/aislinn.py
new file mode 100644
index 0000000000000000000000000000000000000000..5f45b992c8d2e39b70c8b66e377d2bda582e53d3
--- /dev/null
+++ b/scripts/tools/aislinn.py
@@ -0,0 +1,161 @@
+import re
+import os
+import sys
+from MBIutils import *
+
+class Tool(AbstractTool):
+    def identify(self):
+        return "Aislinn wrapper"
+
+    def ensure_image(self):
+        id = subprocess.run("grep '^ID=' /etc/os-release|sed 's/.*=//'", shell=True, capture_output=True, text=True)
+        ver = subprocess.run("grep '^VERSION_ID=' /etc/os-release|sed 's/.*=//'", shell=True, capture_output=True, text=True)
+        if id.stdout == "ubuntu\n" and ver.stdout == '"18.04"\n':
+            print("This is an Ubuntu 18.04 OS. Good.")
+        else:
+            print(f"id: '{id.stdout}'; version: '{ver.stdout}'")
+            print("Please run this script in a ubuntu:18.04 image. Run these commands:")
+            print("  docker image pull ubuntu:18.04")
+            print("  docker run -it --rm --name MIB --volume $(pwd):/MBI ubuntu:18.04 /MBI/MBI.py -x aislinn")
+            sys.exit(1)
+
+    def build(self, rootdir, cached=True):
+        print("Aislinn is only built during setup, if really needed (it's not using the same docker image).")
+
+    def setup(self):
+        os.environ['PATH'] = os.environ['PATH'] + ":/MBI-builds/aislinn/bin/"
+
+        if os.path.exists("/tmp/aislinn.configured"):
+            return
+        subprocess.run("touch /tmp/aislinn.configured", shell=True, check=True)
+
+        subprocess.run("apt-get install -y --force-yes gcc python2.7 python-jinja2", shell=True, check=True)
+        if not os.path.exists(f"/MBI-builds/aislinn/bin/aislinn-cc"):
+            print("XX Building aislinn")
+            subprocess.run("apt-get update && apt-get install -y --force-yes gcc python3.8 autotools-dev automake build-essential git", shell=True, check=True)
+
+            # Get a GIT checkout. Either create it, or refresh it
+            if os.path.exists("/MBI-builds/aislinn/.git"):
+                subprocess.run("cd /MBI-builds/aislinn && git pull &&  cd ../..", shell=True, check=True)
+            else:
+                subprocess.run(f"rm -rf /MBI-builds/aislinn; mkdir -p /MBI-builds", shell=True, check=True)
+                subprocess.run(f"git clone --depth=1 https://github.com/spirali/aislinn.git /MBI-builds/aislinn", shell=True, check=True)
+                subprocess.run(f"git clone --depth=1 --recursive https://github.com/spirali/aislinn-valgrind /MBI-builds/aislinn/valgrind", shell=True, check=True)
+
+            # Build it
+            here = os.getcwd() # Save where we were
+            os.chdir(f"/MBI-builds/aislinn/valgrind")
+            with open('patchfile', 'w') as outfile:
+                outfile.write("--- a/configure.ac\n")
+                outfile.write("+++ b/configure.ac\n")
+                outfile.write("@@ -160,7 +160,7 @@\n")
+                outfile.write("      icc-1[[3-9]].*)\n")
+                outfile.write(" \tAC_MSG_RESULT([ok (ICC version ${gcc_version})])\n")
+                outfile.write(" \t;;\n")
+                outfile.write("-     notclang-[[3-9]].*|notclang-[[1-9][0-9]]*)\n")
+                outfile.write("+     notclang-[[3-9]]*|notclang-[[1-9][0-9]]*)\n")
+                outfile.write(" \tAC_MSG_RESULT([ok (${gcc_version})])\n")
+                outfile.write(" \t;;\n")
+                outfile.write("      clang-2.9|clang-[[3-9]].*|clang-[[1-9][0-9]]*)\n")
+            subprocess.run("patch -p1 < patchfile", shell=True, check=True)
+
+            subprocess.run("sh autogen.sh && ./configure && make -j$(nproc)", shell=True, check=True)
+
+            os.chdir(f"/MBI-builds/aislinn")
+            subprocess.run("./waf configure && ./waf", shell=True, check=True)
+
+            # Back to our previous directory
+            os.chdir(here)
+            print("XX Done building aislinn")
+        
+    def run(self, execcmd, filename, binary, id, timeout, batchinfo):
+        cachefile = f'{binary}_{id}'
+
+        execcmd = re.sub("mpirun", "aislinn", execcmd)
+        execcmd = re.sub('\${EXE}', binary, execcmd)
+        execcmd = re.sub('\$zero_buffer', "--send-protocol=rendezvous", execcmd)
+        execcmd = re.sub('\$infty_buffer', "--send-protocol=eager", execcmd)
+        execcmd = re.sub('-np ', '-p=', execcmd)
+
+        ran = self.run_cmd(
+            buildcmd=f"aislinn-cc -g {filename} -o {binary}",
+            execcmd=execcmd,
+            cachefile=cachefile,
+            filename=filename,
+            binary=binary,
+            timeout=timeout,
+            batchinfo=batchinfo)
+
+        if os.path.exists("./report.html"):
+            os.rename("./report.html", f"{binary}_{id}.html")
+
+        if ran:
+            subprocess.run(f"rm -f {binary} vgcore.*", shell=True, check=True) # Save disk space ASAP
+
+    def teardown(self): # Remove generated cruft (binary files)
+        subprocess.run("find -type f -a -executable | xargs rm -f", shell=True, check=True)
+
+    def parse(self, cachefile):
+        if os.path.exists(f'{cachefile}.timeout') or os.path.exists(f'logs/aislinn/{cachefile}.timeout'):
+            return 'timeout'
+        if not (os.path.exists(f'{cachefile}.txt') or os.path.exists(f'logs/aislinn/{cachefile}.txt')):
+            return 'failure'
+
+        with open(f'{cachefile}.txt' if os.path.exists(f'{cachefile}.txt') else f'logs/aislinn/{cachefile}.txt', 'r') as infile:
+            output = infile.read()
+
+        if re.search('MBI_MSG_RACE', output):
+            return 'MBI_MSG_RACE'
+
+        if re.search('No errors found', output):
+            return 'OK'
+
+        if re.search("INFO: Found error 'Deadlock'", output):
+            return 'deadlock'
+        if re.search("INFO: Found error 'Pending message'", output):
+            return 'Pending message'
+
+        if re.search("INFO: Found error 'Invalid color'", output):
+            return 'Invalid color'
+        if re.search("INFO: Found error 'Invalid communicator'", output):
+            return 'Invalid communicator'
+        if re.search("INFO: Found error 'Invalid count'", output):
+            return 'Invalid count'
+        if re.search("INFO: Found error 'Invalid datatype'", output):
+            return 'Invalid datatype'
+        if re.search("INFO: Found error 'Invalid group'", output):
+            return 'Invalid group'
+        if re.search("INFO: Found error 'Invalid operation'", output):
+            return 'Invalid operation'
+        if re.search("INFO: Found error 'Invalid rank'", output):
+            return 'Invalid rank'
+        if re.search("INFO: Found error 'Invalid request'", output):
+            return 'Invalid request'
+        if re.search("INFO: Found error 'Invalid tag'", output):
+            return 'Invalid tag'
+
+        if re.search("INFO: Found error 'Invalid write'", output):
+            return 'concurrency error'
+        if re.search("INFO: Found error 'Request is not persistent'", output):
+            return 'Request is not persistent'
+        if re.search("INFO: Found error 'Pending request'", output):
+            return 'Pending request'
+
+        if re.search("INFO: Found error 'Collective operation: root mismatch'", output):
+            return 'Collective operation: root mismatch'
+        if re.search("INFO: Found error 'Collective operation mismatch'", output):
+            return 'Collective operation mismatch'
+        if re.search("INFO: Found error 'Mixing blocking and nonblocking collective operation'", output):
+            return 'Mixing blocking and nonblocking collective operation'
+
+
+        if re.search('Unkn?own function call', output) or re.search('Compilation of .*? raised an error \(retcode: ', output):
+            return 'UNIMPLEMENTED'
+
+        if re.search("Traceback \(most recent call last\):", output):
+            return 'failure'
+
+        print (f">>>>[ INCONCLUSIVE ]>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> (aislinn/{cachefile})")
+        print(output)
+        print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
+        return 'other'
diff --git a/scripts/tools/civl.py b/scripts/tools/civl.py
new file mode 100644
index 0000000000000000000000000000000000000000..8491ebb86f6278357f2a33e498ce49398dc74969
--- /dev/null
+++ b/scripts/tools/civl.py
@@ -0,0 +1,142 @@
+import re
+import os
+from MBIutils import *
+
+class Tool(AbstractTool):
+    def identify(self):
+        return "CIVL wrapper"
+
+    def ensure_image(self):
+        AbstractTool.ensure_image(self, "-x civl")
+
+    def build(self, rootdir, cached=True):
+        if cached and os.path.exists(f"/MBI-builds/civl.jar"):
+            return
+
+        print("XX Building CIVL")
+        subprocess.run(f"rm -rf {rootdir}/tools/CIVL && mkdir -p {rootdir}/tools/CIVL", shell=True, check=True)
+        here = os.getcwd() # Save where we were
+        os.chdir(f"{rootdir}/tools/CIVL")
+        subprocess.run(f"wget http://vsl.cis.udel.edu:8080/lib/sw/civl/1.21/r5476/release/CIVL-1.21_5476.tgz", shell=True, check=True)
+        subprocess.run(f"tar xf CIVL-*.tgz", shell=True, check=True)
+        if not os.path.exists('/MBI-builds'):
+            subprocess.run(f"mkdir /MBI-builds", shell=True, check=True)
+        subprocess.run(f"mv CIVL-*/lib/civl-*.jar /MBI-builds/civl.jar", shell=True, check=True)
+        subprocess.run(f"cd /MBI-builds && java -jar civl.jar config", shell=True, check=True)
+        subprocess.run(f"rm -rf {rootdir}/tools/CIVL", shell=True, check=True)
+
+        # Back to our previous directory
+        os.chdir(here)
+
+    def run(self, execcmd, filename, binary, id, timeout, batchinfo):
+        cachefile = f'{binary}_{id}'
+
+        execcmd = re.sub("mpirun", "java -jar /MBI-builds/civl.jar verify", execcmd)
+        execcmd = re.sub('-np ', "-input_mpi_nprocs=", execcmd)
+        execcmd = re.sub('\${EXE}.*', filename, execcmd)
+        execcmd = re.sub('\$zero_buffer', "", execcmd)
+        execcmd = re.sub('\$infty_buffer', "", execcmd)
+
+
+        if self.run_cmd(buildcmd=None,
+                   execcmd=execcmd,
+                   cachefile=cachefile,
+                   filename=filename,
+                   binary=binary,
+                   timeout=timeout,
+                   batchinfo=batchinfo):
+            # the test was actually run
+            subprocess.run("killall -9 java 2>/dev/null", shell=True)
+
+
+    def parse(self, cachefile):
+        if os.path.exists(f'{cachefile}.timeout') or os.path.exists(f'logs/civl/{cachefile}.timeout'):
+            return 'timeout'
+        if not (os.path.exists(f'{cachefile}.txt') or os.path.exists(f'logs/civl/{cachefile}.txt')):
+            return 'failure'
+
+        with open(f'{cachefile}.txt' if os.path.exists(f'{cachefile}.txt') else f'logs/civl/{cachefile}.txt', 'r') as infile:
+            output = infile.read()
+
+        if re.search('Compilation of .*? raised an error \(retcode: ', output):
+            return 'UNIMPLEMENTED'
+        if re.search("cannot be invoked without MPI_Init\(\) being called before", output):
+            return 'mpierr'
+
+        if re.search('DEADLOCK', output):
+            return 'deadlock'
+
+        if re.search('MBI_MSG_RACE', output):
+            return 'MBI_MSG_RACE'
+
+        if re.search('reaches an MPI collective routine .*? while at least one of others are collectively reaching MPI_', output):
+            return 'collective mismatch'
+        if re.search('which has an inconsistent datatype specification with at least one of others', output):
+            return 'datatype mismatch'
+        if re.search('of MPI routines is not consistent with the specified MPI_Datatype', output):
+            return 'datatype mismatch'
+        if re.search('which has a different root with at least one of others', output):
+            return 'root mismatch'
+        if re.search('has a different MPI_Op', output):
+            return 'various'
+
+        if re.search('MPI message leak', output):
+            return 'resleak'
+
+        if re.search('MEMORY_LEAK', output):
+            return 'resleak'
+
+        if re.search('The standard properties hold for all executions', output):
+            return 'OK'
+
+        if re.search('A CIVL internal error has occurred', output):
+            return 'failure'
+
+        if re.search('kind: UNDEFINED_VALUE, certainty: MAYBE', output) or re.search('kind: UNDEFINED_VALUE, certainty: PROVEABLE', output):
+            return 'UNDEFINED_VALUE'
+        if re.search('kind: DEREFERENCE, certainty: MAYBE', output) or re.search('kind: DEREFERENCE, certainty: PROVEABLE', output):
+            return 'DEREFERENCE'
+        if re.search('kind: MPI_ERROR, certainty: MAYBE', output) or re.search('kind: MPI_ERROR, certainty: PROVEABLE', output):
+            return 'MPI_ERROR'
+
+        if re.search('ASSERTION_VIOLATION', output):
+            return 'ASSERTION_VIOLATION'
+
+        if re.search('OUT_OF_BOUNDS', output):
+            return 'OUT_OF_BOUNDS'
+
+        if re.search('This feature is not yet implemented', output):
+            return 'UNIMPLEMENTED'
+        if re.search('doesn.t have a definition', output):
+            return 'UNIMPLEMENTED'
+        if re.search('Undeclared identifier', output):
+            return 'UNIMPLEMENTED'
+
+        # The following is categorized as a CIVL bug, because it reports an inexistant error when a communicator is tested for inequality
+        #
+        # Error: Incompatible types for operator NEQ:
+        # struct MPI_Comm
+        # struct MPI_Comm
+        # at CollInvalidDim_Cart_create_nok.c:67.7-27
+        # if (comm != MPI_COMM_NULL)
+        #     ^^^^^^^^^^^^^^^^^^^^^
+        if re.search('Error: Incompatible types for operator NEQ:\nstruct MPI_Comm\nstruct MPI_Comm\nat', output):
+            return 'failure'
+
+        #  $ java -jar ../../tools/CIVL-1.20_5259/lib/civl-1.20_5259.jar verify -input_mpi_nprocs=2 /MBI/gencodes/CollOpNull_Reduce_nok.c
+        # CIVL v1.20 of 2019-09-27 -- http://vsl.cis.udel.edu/civl
+        # Hello from rank 0
+        # Hello from rank 1
+        # Exception in thread "main" java.lang.ArrayIndexOutOfBoundsException: Index -1 out of bounds for length 16
+        #        at edu.udel.cis.vsl.civl.library.common.LibraryComponent.translateOperator(LibraryComponent.java:544)
+        # (REPORTED to the CIVL authos on June 18 2021)
+        if re.search('Exception in thread "main" java.lang.ArrayIndexOutOfBoundsException', output):
+            return 'failure'
+
+        if re.search(('java.lang.ClassCastException'), output):
+            return 'failure'
+
+        print (f">>>>[ INCONCLUSIVE ]>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> (civl/{cachefile})")
+        print(output)
+        print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
+        return 'other'
diff --git a/scripts/tools/gen_latex.py b/scripts/tools/gen_latex.py
new file mode 100755
index 0000000000000000000000000000000000000000..03756520675378bf5c369be5a8a3ec2e54ad8bd9
--- /dev/null
+++ b/scripts/tools/gen_latex.py
@@ -0,0 +1,308 @@
+#! /usr/bin/python3
+import os,re,sys
+
+possible_features=['P2P!basic', 'P2P!nonblocking', 'P2P!persistent', 'COLL!basic', 'COLL!nonblocking', 'COLL!persistent', 'COLL!tools', 'RMA']
+possible_characterization=["Lacking", "Yes"]
+
+possible_details = {
+    # scope limited to one call
+    'InvalidCommunicator':'AInvalidParam', 'InvalidDatatype':'AInvalidParam', 'InvalidRoot':'AInvalidParam', 'InvalidTag':'AInvalidParam', 'InvalidWindow':'AInvalidParam', 'InvalidOperator':'AInvalidParam', 'InvalidOtherArg':'AInvalidParam', 'ActualDatatype':'AInvalidParam',
+    'InvalidSrcDest':'AInvalidParam',
+    # scope: Process-wide
+#    'OutOfInitFini':'BInitFini',
+    'CommunicatorLeak':'BResLeak', 'DatatypeLeak':'BResLeak', 'GroupLeak':'BResLeak', 'OperatorLeak':'BResLeak', 'TypeLeak':'BResLeak', 'RequestLeak':'BResLeak',
+    'MissingStart':'BReqLifecycle', 'MissingWait':'BReqLifecycle',
+    'LocalConcurrency':'BLocalConcurrency',
+    # scope: communicator
+    'CallMatching':'DMatch',
+    'CommunicatorMatching':'CMatch', 'DatatypeMatching':'CMatch', 'OperatorMatching':'CMatch', 'RootMatching':'CMatch', 'TagMatching':'CMatch',
+    'MessageRace':'DRace',
+
+    'GlobalConcurrency':'DGlobalConcurrency',
+    # larger scope
+    'BufferingHazard':'EBufferingHazard',
+    'OK':'FOK'}
+
+displayed_name = {
+    'AInvalidParam':'Invalid parameter',
+    'BResLeak':'Resource leak',
+#    'BInitFini':'MPI call before initialization/after finalization',
+    'BReqLifecycle':'Request lifecycle',
+    'BLocalConcurrency':'Local concurrency',
+    'CMatch':'Parameter matching',
+    'DMatch':"Call matching",
+    'DRace':'Message race',
+    'DGlobalConcurrency':'Global concurrency',
+    'EBufferingHazard':'Buffering hazard',
+    'FOK':"Correct code",
+
+    'P2P!basic':'P2P', 'P2P!nonblocking':'iP2P', 'P2P!persistent':'pP2P',
+    'COLL!basic':'Coll', 'COLL!nonblocking':'iColl', 'COLL!tools':'Coll+',
+    'RMA':'RMA',
+
+    'aislinn':'Aislinn','civl':'CIVL', 'isp':'ISP', 'simgrid':'Mc SimGrid','smpi':'SMPI', 'mpisv':'MPI-SV', 'must':'MUST', 'parcoach':'PARCOACH'
+}
+
+
+#feat_to_color = {'P2P!basic':'red', 'iP2P':'red!80', 'PERS':'purple', 'COLL':'green', 'iCOLL':'green!80', 'TOPO':'purple!20', 'RMA':'black',
+#    "PROB":'black', "COM":'black', "GRP":'black', "DATA":'black', "OP":'black'}
+feat_to_color = {'P2P!basic':'viridis0', 'P2P!nonblocking':'viridis1', 'P2P!persistent':'viridis3',
+    'RMA':'viridis10',
+    "COLL!basic":'viridis15', "COLL!nonblocking":'viridis16', "COLL!tools":'viridis17'}
+feat_to_bgcolor = {'P2P!basic':'white', 'P2P!nonblocking':'white', 'P2P!persistent':'white',
+    'RMA':'black',
+    "COLL!basic":'black', "COLL!nonblocking":'black', "COLL!tools":'black'}
+
+def parse_file_features(file):
+    """Takes a filename and returns a tuple (correct, lacking) of lists of features"""
+    correct = []
+    lacking = []
+    with open(file, 'r') as f:
+        line = f.readline()
+
+        # Search for the feature block
+        while line != 'BEGIN_MPI_FEATURES\n':
+            if line == '':
+                raise Exception("Impossible to find the feature block in {}".format(file))
+            line = f.readline()
+
+        while line != 'END_MPI_FEATURES\n':
+            if line == '':
+                raise Exception("Impossible to find the end of the feature block in {}".format(file))
+
+            line = line.strip()
+            matching = re.match("^ *([!a-zA-Z0-9]*): ([a-zA-Z0-9]*)$", line)
+            if matching is not None:
+                (feat, chara) = (matching.group(1), matching.group(2))
+                if feat not in possible_features:
+                    raise Exception("ERROR: file {} contains an unknown feature: '{}'".format(file, feat))
+                if chara not in possible_characterization:
+                    raise Exception("ERROR: file {} have feature {} with unknown characterization: '{}'".format(file, feat, chara))
+                if chara == 'Yes':
+                    correct.append(feat)
+                elif chara == 'Lacking':
+                    lacking.append(feat)
+                else:
+                    raise Exception("Impossible")
+            line = f.readline()
+    if len(correct) > 4:
+        raise Exception(f"ERROR: file {file} has more than one 4 features: {correct}")
+    return (correct, lacking)
+
+def parse_file_expected(file):
+    """Takes a file name, and returns the list of Expect headers (there may be more than one per file)"""
+    res  = list(filter(lambda line: line.startswith("  | ERROR: "), open(file, 'r').readlines()))
+    res += list(filter(lambda line: line.startswith("  | OK"), open(file, 'r').readlines()))
+    if len(res)==0:
+        raise Exception("No 'ERROR' nor 'OK' header in {}".format(file))
+    res = list(map(lambda line: re.sub("[| ]*ERROR: ", "", line.strip()), res))
+    res = list(map(lambda line: re.sub("[| ]*OK *", "OK", line), res))
+    for expected in res:
+        if expected not in possible_details:
+            raise Exception("Unexpected expectation header in {}: '{}'".format(file, expected))
+    res = list(map(lambda line: possible_details[line], res))
+    return res
+
+def get_C_files_from_dir(dir):
+    files = []
+    if dir[-1] != '/': # dir must be ended by a / for later separation between the path and basename
+        dir = "{}/".format(dir)
+    for filename in os.listdir(dir):
+        if filename.endswith(".c"):
+            files.append("{}/{}".format(dir,filename))
+    return files
+def filename_to_binary(file):
+    return re.sub("_", "\\_", re.sub(".*?//", "", re.sub("\.c","", file)))
+
+def parse_files_per_expected(list):
+    """
+    Reads all C files from the list and returns a hash [expected -> list_of_lists_of_files_having_that_feature].
+    list_of_lists_files elements are of type [file, test_number_in_that_file]
+    """
+    result = {}
+    for expected in possible_details:
+        result[ possible_details[expected] ] = []
+    for file in list:
+        test = 0
+        for expected in parse_file_expected(file):
+            result[expected].append([file, test])
+            test += 1
+    return result
+
+def generate_features(files, outfile):
+    lineheight = 0.4
+    feat_width = 0.7
+    cell_width = feat_width * 3
+    cell_per_line = 10
+    files_per_expected = parse_files_per_expected(files)
+
+    line = 800
+    with open(outfile, 'w') as output:
+        output.write("\\resizebox{\\linewidth}{!}{\\begin{tikzpicture}\n")
+        categories = []
+        for expected in possible_details:
+            if not possible_details[expected] in categories:
+                categories.append(possible_details[expected])
+        for expected in sorted(categories):
+            output.write(f" \\draw({cell_width*cell_per_line/2},{line*lineheight}) node {{\\large{{{displayed_name[expected]}}}}};\n")
+            line -= 1
+            cell = 0 # Position of this file on the line
+            # Draw the boxes
+            initial_line = line
+            for (file,test) in files_per_expected[expected]:
+                (features, _) = parse_file_features(file)
+                file = f'{filename_to_binary(file)}\\#{test}'
+                output.write(f" \\draw ({cell*cell_width-(0.4*feat_width)}, {(line+0.4)*lineheight}) rectangle ({cell*cell_width+(3.45*feat_width)}, {(line-0.4)*lineheight});\n")
+                xpos = 0
+#                for feat in incorrect_feat:
+#                    output.write(f"  \\draw [fill={feat_to_color[feat]}] ({cell*cell_width + xpos-(0.4*feat_width)}, {(line-0.4)*lineheight}) rectangle ({cell*cell_width + xpos + (0.45*feat_width)}, {(line+0.4)*lineheight});\n")
+#                    xpos += feat_width
+                for feat in features:
+                    output.write(f"  \\draw [fill={feat_to_color[feat]}] ({cell*cell_width + xpos-(0.4*feat_width)}, {(line-0.4)*lineheight}) rectangle ({cell*cell_width + xpos + (0.45*feat_width)}, {(line+0.4)*lineheight});\n")
+                    xpos += feat_width
+                if cell+1 == cell_per_line:
+                    cell = 0
+                    line -= 1
+                    if line < 0:
+                        raise Exception("Too much lines. Please increase the initial value of line")
+                else :
+                    cell += 1
+
+            # Put the texts (must come after all boxes for the tooltip to not be hidden behind)
+            cell = 0
+            line = initial_line
+            for (file,test) in files_per_expected[expected]:
+                (features,  _) = parse_file_features(file)
+                file = f'{filename_to_binary(file)}\\#{test}'
+                xpos = 0
+#                for feat in incorrect_feat:
+#                    output.write(f"  \\draw ({cell*cell_width + xpos}, {line*lineheight}) node {{\\scriptsize{{\\tooltip****[{feat_to_bgcolor[feat]}]{{\\sout{{{feat}}}}}{{{file} -- incorrect: {feat}}}}}}};\n")
+#                    xpos += feat_width
+                for feat in features:
+#                    output.write(f"  \\draw ({cell*cell_width + xpos}, {line*lineheight}) node {{\\scriptsize{{\\tooltip****[{feat_to_bgcolor[feat]}]{{{feat}}}{{{file} -- correct: {feat}}}}}}};\n")
+                    output.write(f"  \\draw ({cell*cell_width + xpos}, {line*lineheight}) node {{\\scriptsize{{\\color{{{feat_to_bgcolor[feat]}}}{{{displayed_name[feat]}}}}}}};\n")
+                    xpos += feat_width
+                if cell+1 == cell_per_line:
+                    cell = 0
+                    line -= 1
+                    if line < 0:
+                        raise Exception("Too much lines. Please increase the initial value of line")
+                else :
+                    cell += 1
+            if cell != 0: # we did not output anything on the new line, no need to go further
+                line -= 1
+        output.write("\\end{tikzpicture}}\n")
+
+def generate_labels(files, outfile):
+    files_per_expected = parse_files_per_expected(files)
+
+    # Get the data
+    OK = {'total':0}
+    Error = {'total':0}
+    for feat in possible_features:
+        OK[feat] = 0
+        Error[feat] = 0
+    seen = []
+    for detail in possible_details:
+        category = possible_details[detail]
+        for (file,test) in files_per_expected[category]:
+            if not file in seen:
+                seen.append(file)
+                (features,  _) = parse_file_features(file)
+                if detail == 'OK':
+                    OK['total'] += 1
+                    for feat in features:
+                        OK[feat] += 1
+                else:
+                    Error['total'] += 1
+                    for feat in features:
+                        Error[feat] += 1
+            else:
+                print(f"Ignore duplicate {file} while counting files per label.")
+
+    # Produce the output
+    with open(outfile, 'w') as output:
+        output.write('\\begin{tabular}{|l| l | l | c | c |}\\hline\n')
+        output.write('\\multicolumn{2}{|c|}{ \\textbf{MPI}} & \\multirow{2}{*}{\\textbf{Description}} & \\multicolumn{2}{c|}{\\textbf{Number of codes using the label}} \\\\')
+        output.write('\\multicolumn{2}{|c|}{ \\textbf{Feature Label}} &  & \\# Incorrect codes & \\# Correct codes \\\\ \\hline\n')
+
+        output.write("\\parbox[t]{4mm}{\\multirow{3}{*}{\\R{P2P}}} & base calls & Use of blocking point-to-point communication)")
+        output.write(f" & {Error['P2P!basic']} & {OK['P2P!basic']} \\\\ \n")
+        output.write("& nonblocking & Use of  nonblocking point-to-point communication")
+        output.write(f" & {Error['P2P!nonblocking']} & {OK['P2P!nonblocking']} \\\\ \n")
+#        output.write(f" &  116 &  19 \\\\ \n")
+        output.write("& persistent & Use of point-to-point persistent communications")
+        output.write(f" & {Error['P2P!persistent']} & {OK['P2P!persistent']} \\\\ \\hline \n")
+#        output.write(f" &  45 &  8 \\\\ \\hline \n")
+        output.write("\\parbox[t]{2mm}{\\multirow{3}{*}{\\R{COLL}}} & base calls & Use of blocking collective communication")
+        output.write(f" & {Error['COLL!basic']} & {OK['COLL!basic']} \\\\ \n")
+#        output.write(f" &  312 &  202 \\\\ \n")
+        output.write("& nonblocking & Use of nonblocking collective communication")
+        output.write(f" & {Error['COLL!nonblocking']} & {OK['COLL!nonblocking']} \\\\ \n")
+#        output.write(f" &  129 &   114 \\\\ \n")
+        output.write("& tools & Use of resource function (e.g.,  communicators, datatypes)")
+        output.write(f" & {Error['COLL!tools']} & {OK['COLL!tools']} \\\\ \\hline \n")
+#        output.write(f" &  94 &  23 \\\\ \\hline \n")
+        output.write("\\multicolumn{2}{|c|}{RMA} & Use of Remote Memory Access")
+        output.write(f" & {Error['RMA']} & {OK['RMA']} \\\\ \\hline \n")
+#        output.write(f" &  30 &  3 \\\\ \\hline \n")
+        output.write("\\end{tabular}\n")
+
+
+    def show_counts(categories):
+        count = get_counts(categories)
+        output.write(f"{count['P2P!basic']}&{count['P2P!nonblocking']}&{count['P2P!persistent']}&")
+        output.write(f"{count['COLL!basic']}&{count['COLL!nonblocking']}&{count['COLL!tools']} & {count['RMA']} & {count['total']} \\\\")
+
+def generate_errors(files, outfile):
+    files_per_expected = parse_files_per_expected(files)
+    def get_counts(categories):
+        count = {'total':0}
+        for feat in possible_features:
+            count[feat] = 0
+        seen = []
+        for category in categories:
+            for (file,test) in files_per_expected[category]:
+                if not file in seen:
+                    seen.append(file)
+                    (features,  _) = parse_file_features(file)
+                    count['total'] += 1
+                    for feat in features:
+                        count[feat] += 1
+                else:
+                    print(f"Ignore duplicate {file} while counting files per feature.")
+        return count
+    def show_counts(categories):
+        count = get_counts(categories)
+        output.write(f"{count['P2P!basic']}&{count['P2P!nonblocking']}&{count['P2P!persistent']}&")
+        output.write(f"{count['COLL!basic']}&{count['COLL!nonblocking']}&{count['COLL!tools']} & {count['RMA']} & {count['total']} \\\\")
+
+    with open(outfile, 'w') as output:
+        output.write('\\begin{tabular}{|l|l|c|c|c| c|c|c |c||c|}\\cline{3-10}\n')
+        output.write('\\multicolumn{2}{c|}{}&\\multicolumn{3}{c|}{Point-to-point}&\\multicolumn{3}{c|}{Collective}&\multirow{6}{*}{RMA}&\multirow{6}{*}{Unique files}\\\\\\cline{3-8}\n')
+        output.write('\\multicolumn{2}{c|}{}&\\R{base calls}&\\R{~nonblocking~}&\R{persistent} & \\R{base calls}&\R{~nonblocking~}& \\R{tools} &&\\\\\\hline\n')
+
+        output.write('\\multirow{1}{*}{{Single call}} &Invalid Parameter & ');   show_counts(['AInvalidParam']); output.write(' \\hline')
+
+        output.write('\\multirow{3}{*}{{Single process}}&Resource Leak    & ');  show_counts(['BResLeak'])     ; output.write('\\cline{2-10}\n')
+        output.write( '                                 &Request lifecycle& ');  show_counts(['BReqLifecycle']); output.write('\\cline{2-10}\n')
+        output.write( '                                 &Local concurrency& ');  show_counts(['BLocalConcurrency']); output.write('\\hline\n')
+
+        output.write('\\multirow{4}{*}{{Multi-processes}}&Parameter matching& ');  show_counts(['CMatch'])        ; output.write('\\cline{2-10}\n')
+        output.write( '                                  &Message Race      & ');  show_counts(['DRace'])        ; output.write('\\cline{2-10}\n')
+        output.write( '                                  &Call ordering     & ');  show_counts(['DMatch'])       ; output.write('\\cline{2-10}\n')
+        output.write( '                                  &Global concurrency& ');  show_counts(['DGlobalConcurrency']); output.write('\\hline\n')
+
+        output.write( '      System & Buffering Hazard    &') ; show_counts(['EBufferingHazard']);output.write('\\hline\\hline\n')
+        output.write('\\multicolumn{2}{|c|}{Correct codes}&') ; show_counts(['FOK']);output.write('\\hline\\hline\n')
+
+        output.write('\\multicolumn{2}{|c|}{\\textbf{Total}}&')
+        show_counts(['AInvalidParam', 'BResLeak','BReqLifecycle','BLocalConcurrency', 'CMatch', 'DRace','DMatch','DGlobalConcurrency', 'EBufferingHazard', 'FOK'])
+        output.write('\\hline\n')
+
+        output.write('\\end{tabular}\n')
+
+# Obsolete view with all collored boxes
+#generate_features(get_C_files_from_dir("../gencodes"), "../latex/features.tex")
+generate_labels(get_C_files_from_dir("../gencodes"), "../latex/labels.tex")
+generate_errors(get_C_files_from_dir("../gencodes"), "../latex/errors.tex")
\ No newline at end of file
diff --git a/scripts/tools/gen_plots_radar.py b/scripts/tools/gen_plots_radar.py
new file mode 100644
index 0000000000000000000000000000000000000000..c6a4dcf0cf52ad3423b665435edd538de52908ff
--- /dev/null
+++ b/scripts/tools/gen_plots_radar.py
@@ -0,0 +1,106 @@
+# Source of code:
+#   https://matplotlib.org/stable/gallery/specialty_plots/radar_chart.html
+#
+# Access: 15 april 2022
+
+import numpy as np
+
+import matplotlib.pyplot as plt
+from matplotlib.patches import Circle, RegularPolygon
+from matplotlib.path import Path
+from matplotlib.projections.polar import PolarAxes
+from matplotlib.projections import register_projection
+from matplotlib.spines import Spine
+from matplotlib.transforms import Affine2D
+
+
+def radar_factory(num_vars, frame='circle'):
+    """
+    Create a radar chart with `num_vars` axes.
+
+    This function creates a RadarAxes projection and registers it.
+
+    Parameters
+    ----------
+    num_vars : int
+        Number of variables for radar chart.
+    frame : {'circle', 'polygon'}
+        Shape of frame surrounding axes.
+
+    """
+    # calculate evenly-spaced axis angles
+    theta = np.linspace(0, 2*np.pi, num_vars, endpoint=False)
+
+    class RadarTransform(PolarAxes.PolarTransform):
+
+        def transform_path_non_affine(self, path):
+            # Paths with non-unit interpolation steps correspond to gridlines,
+            # in which case we force interpolation (to defeat PolarTransform's
+            # autoconversion to circular arcs).
+            if path._interpolation_steps > 1:
+                path = path.interpolated(num_vars)
+            return Path(self.transform(path.vertices), path.codes)
+
+    class RadarAxes(PolarAxes):
+
+        name = 'radar'
+        # use 1 line segment to connect specified points
+        RESOLUTION = 1
+        PolarTransform = RadarTransform
+
+        def __init__(self, *args, **kwargs):
+            super().__init__(*args, **kwargs)
+            # rotate plot such that the first axis is at the top
+            self.set_theta_zero_location('N')
+
+        def fill(self, *args, closed=True, **kwargs):
+            """Override fill so that line is closed by default"""
+            return super().fill(closed=closed, *args, **kwargs)
+
+        def plot(self, *args, **kwargs):
+            """Override plot so that line is closed by default"""
+            lines = super().plot(*args, **kwargs)
+            for line in lines:
+                self._close_line(line)
+
+        def _close_line(self, line):
+            x, y = line.get_data()
+            # FIXME: markers at x[0], y[0] get doubled-up
+            if x[0] != x[-1]:
+                x = np.append(x, x[0])
+                y = np.append(y, y[0])
+                line.set_data(x, y)
+
+        def set_varlabels(self, labels):
+            self.set_thetagrids(np.degrees(theta), labels)
+
+        def _gen_axes_patch(self):
+            # The Axes patch must be centered at (0.5, 0.5) and of radius 0.5
+            # in axes coordinates.
+            if frame == 'circle':
+                return Circle((0.5, 0.5), 0.5)
+            elif frame == 'polygon':
+                return RegularPolygon((0.5, 0.5), num_vars,
+                                      radius=.5, edgecolor="k")
+            else:
+                raise ValueError("Unknown value for 'frame': %s" % frame)
+
+        def _gen_axes_spines(self):
+            if frame == 'circle':
+                return super()._gen_axes_spines()
+            elif frame == 'polygon':
+                # spine_type must be 'left'/'right'/'top'/'bottom'/'circle'.
+                spine = Spine(axes=self,
+                              spine_type='circle',
+                              path=Path.unit_regular_polygon(num_vars))
+                # unit_regular_polygon gives a polygon of radius 1 centered at
+                # (0, 0) but we want a polygon of radius 0.5 centered at (0.5,
+                # 0.5) in axes coordinates.
+                spine.set_transform(Affine2D().scale(.5).translate(.5, .5)
+                                    + self.transAxes)
+                return {'polar': spine}
+            else:
+                raise ValueError("Unknown value for 'frame': %s" % frame)
+
+    register_projection(RadarAxes)
+    return theta
diff --git a/scripts/tools/hermes.py b/scripts/tools/hermes.py
new file mode 100644
index 0000000000000000000000000000000000000000..9e08a099670b48a728e5f1f9fb0c2574bba50913
--- /dev/null
+++ b/scripts/tools/hermes.py
@@ -0,0 +1,150 @@
+import re
+import os
+import sys
+import tempfile
+import shutil
+from MBIutils import *
+
+class Tool(AbstractTool):
+    def identify(self):
+        return "Hermes wrapper"
+
+    def ensure_image(self):
+        AbstractTool.ensure_image(self, "-x hermes")
+
+    def build(self, rootdir, cached=True):
+        if cached and os.path.exists('/MBI-builds/hermes/bin/ispcc') and os.path.exists('/MBI-builds/hermes/clangTool/clangTool'):
+            return
+        print(f"Building Hermes. Files exist: {os.path.exists('/MBI-builds/hermes/bin/ispcc')}, {os.path.exists('/MBI-builds/hermes/clangTool/clangTool')}")
+
+        # Get a GIT checkout. Either create it, or refresh it
+        subprocess.run(f"rm -rf /MBI-builds/hermes && mkdir -p /MBI-builds && git clone --depth=1 https://github.com/DhritiKhanna/Hermes.git /tmp/hermes", shell=True, check=True)
+
+        # Build it
+        here = os.getcwd() # Save where we were
+        os.chdir(f"/tmp/hermes")
+        subprocess.run("cd clangTool/ && make -j$(nproc) clangTool", shell=True, check=True)
+        subprocess.run("cp -r clangTool/ /MBI-builds/hermes", shell=True, check=True)
+        subprocess.run("autoreconf --install", shell=True, check=True)
+        subprocess.run(f"./configure --disable-gui --prefix='/MBI-builds/hermes/' --enable-optional-ample-set-fix --with-mpi-inc-dir=/usr/lib/x86_64-linux-gnu/mpich/include CXXFLAGS='-fPIC' LDFLAGS='-lz3'", shell=True, check=True)
+        subprocess.run("make -j$(nproc)", shell=True, check=True)
+        subprocess.run("make -j$(nproc) install", shell=True, check=True)
+
+        # Back to our previous directory
+        os.chdir(here)
+
+    def setup(self):
+        os.environ['PATH'] = f"{os.environ['PATH']}:/MBI-builds/hermes/bin/"
+        if os.path.exists("compile_commands.json"):
+            return
+        with open('compile_commands.json', 'w') as outfile:
+            outfile.write("[{")
+            outfile.write(f'  "command": "/usr/bin/cxx -c -I/usr/lib/x86_64-linux-gnu/openmpi/include/openmpi -I/MBI-builds/hermes/clangTool/ -I/usr/lib/x86_64-linux-gnu/openmpi/include -I/usr/lib/x86_64-linux-gnu/mpich/include -I/MBI-builds/hermes/clangTool/clang+llvm-3.8.0-x86_64-linux-gnu-debian8/lib/clang/3.8.0/include/ -I/usr/include/linux/ -fpermissive -pthread source.c",\n')
+            outfile.write(f'          "directory": "{self.rootdir}/logs/hermes",\n')
+            outfile.write(f'          "file": "{self.rootdir}/logs/hermes/source.c"\n')
+            outfile.write('}]')
+        subprocess.run("update-alternatives --set mpi /usr/bin/mpicc.mpich", shell=True, check=True)
+        subprocess.run("update-alternatives --set mpirun /usr/bin/mpirun.mpich", shell=True, check=True)
+
+    def run(self, execcmd, filename, binary, id, timeout, batchinfo):
+        os.environ['PATH'] = f"{os.environ['PATH']}:/MBI-builds/hermes/bin/"
+        cachefile = f'{binary}_{id}'
+
+        execcmd = re.sub("mpirun", "isp.exe", execcmd)
+        execcmd = re.sub('-np', '-n', execcmd)
+        execcmd = re.sub('\${EXE}', f"./{binary}", execcmd)
+        execcmd = re.sub('\$zero_buffer', "-b", execcmd)
+        execcmd = re.sub('\$infty_buffer', "-g", execcmd)
+
+        with tempfile.TemporaryDirectory() as tmpdirname:
+            self.run_cmd(
+               buildcmd=f"cp {filename} source.c &&"
+                       +"/MBI-builds/hermes/clangTool/clangTool source.c &&"
+                       +f"ispcxx -I/MBI-builds/hermes/clangTool/ -o {tmpdirname}/{binary} i_source.c /MBI-builds/hermes/clangTool/GenerateAssumes.cpp /MBI-builds/hermes/clangTool/IfInfo.cpp /MBI-builds/hermes/profiler/Client.c",
+               execcmd=execcmd,
+               cachefile=cachefile,
+               filename=filename,
+               binary=binary,
+               timeout=timeout,
+               cwd=tmpdirname,
+               batchinfo=batchinfo)
+
+            if os.path.exists(f"{tmpdirname}/report.html"):
+               os.rename(f"{tmpdirname}/report.html", f"{binary}_{id}.html")
+
+#        subprocess.run("rm -f core vgcore.*", shell=True, check=True) # Save disk space ASAP
+#        subprocess.run("find -type f -a -executable | xargs rm -f", shell=True, check=True)
+
+    def parse(self, cachefile):
+        if os.path.exists(f'{cachefile}.timeout') or os.path.exists(f'logs/hermes/{cachefile}.timeout'):
+            return 'timeout'
+        if not (os.path.exists(f'{cachefile}.txt') or os.path.exists(f'logs/hermes/{cachefile}.txt')):
+            return 'failure'
+
+        with open(f'{cachefile}.txt' if os.path.exists(f'{cachefile}.txt') else f'logs/hermes/{cachefile}.txt', 'r') as infile:
+            output = infile.read()
+
+        if re.search('MBI_MSG_RACE', output):
+            return 'MBI_MSG_RACE'
+
+        # Hermes-specific
+        if re.search('Detected a DEADLOCK in interleaving', output):
+            return 'deadlock'
+
+        # Inherited from ISP
+        if (re.search('resource leaks detected', output) and
+            not re.search('No resource leaks detected', output)):
+            return 'resleak'
+
+        if re.search('Rank [0-9]: WARNING: Waited on non-existant request in', output):
+            return 'mpierr'
+        if re.search('Rank [0-9]: Invalid rank in MPI_.*? at ',output):
+            return 'invalid rank'
+        # Invalid argument/tag/datatype/etc.
+        if re.search('Fatal error in P?MPI_.*?: Invalid', output):
+            if re.search('Invalid argument', output):
+                return 'invalid argument'
+            if re.search('Invalid communicator', output):
+                return 'invalid communicator'
+            if re.search('Invalid datatype', output):
+                return 'invalid datatype'
+            if re.search('Invalid MPI_Op', output):
+                return 'invalid mpi operator'
+            if re.search('Invalid tag', output):
+                return 'invalid tag'
+            else:
+                return 'mpierr'
+        if re.search('Fatal error in PMPI', output):
+            return 'mpierr'
+        if re.search('Fatal error in MPI', output):
+            return 'mpierr'
+
+        # https://github.com/DhritiKhanna/Hermes/issues/2
+        if re.search("isp.exe: ServerSocket.cpp:220: int ServerSocket::Receive.*?: Assertion `iter != _cli_socks.end", output):
+            return 'failure'
+        if re.search('Command killed by signal 15, elapsed time: 300', output):
+            return 'timeout'
+
+        if re.search('Assertion failed', output):
+            return 'failure'
+
+        if re.search('Segmentation fault', output):
+            return 'segfault'
+
+        if re.search('1 warning generated', output):
+            if not re.search('implicitly declaring', output):
+                return 'other'
+
+        if re.search('Command return code: 22', output):
+            return 'failure'
+
+        if re.search('Command return code: 0', output):
+            return 'OK'
+
+        if re.search('No resource leaks detected', output):
+            return 'OK'
+
+        print (f">>>>[ INCONCLUSIVE ]>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> (hermes/{cachefile})")
+        print(output)
+        print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
+        return 'other'
diff --git a/scripts/tools/isp.py b/scripts/tools/isp.py
new file mode 100644
index 0000000000000000000000000000000000000000..0025c18c9619af1f892dc2f44fa8ff0a86bdaf3c
--- /dev/null
+++ b/scripts/tools/isp.py
@@ -0,0 +1,126 @@
+import re
+import os
+from MBIutils import *
+
+class Tool(AbstractTool):
+    def identify(self):
+        return "ISP 0.3.1 wrapper"
+
+    def ensure_image(self):
+        AbstractTool.ensure_image(self, "-x isp")
+
+    def build(self, rootdir, cached=True):
+        if cached and os.path.exists("/MBI-builds/ISP/bin/ispcc"):
+            return
+
+        # Build it
+        here = os.getcwd() # Save where we were
+        subprocess.run(f"mkdir -p {rootdir}/tmp-isp", shell=True, check=True)
+        os.chdir(f"{rootdir}/tmp-isp")
+        subprocess.run(f"wget http://www.cs.utah.edu/formal_verification/ISP-release/tarball/isp-0.3.1.tar.gz", shell=True, check=True)
+        subprocess.run(f"tar xf isp-*tar.gz", shell=True, check=True)
+        os.chdir(f"{rootdir}/tmp-isp/isp-0.3.1")
+        subprocess.run(f"./configure --prefix=/MBI-builds/ISP --with-mpi-inc-dir=/usr/lib/x86_64-linux-gnu/mpich/include --enable-optional-ample-set-fix", shell=True, check=True)
+        subprocess.run(f'sed -i "s/-source 1.5 -target 1.5 -classpath/-source 1.7 -target 1.7 -classpath/" UI/Makefile*', shell=True, check=True)
+        subprocess.run("make -j$(nproc) install", shell=True, check=True)
+
+        # Back to our previous directory
+        os.chdir(here)
+        subprocess.run(f"rm -rf {rootdir}/tmp-isp", shell=True, check=True)
+
+    def setup(self):
+        os.environ['PATH'] = f"{os.environ['PATH']}:/MBI-builds/ISP/bin/"
+        subprocess.run("update-alternatives --set mpi /usr/bin/mpicc.mpich", shell=True, check=True)
+        subprocess.run("update-alternatives --set mpirun /usr/bin/mpirun.mpich", shell=True, check=True)
+
+    def run(self, execcmd, filename, binary, id, timeout, batchinfo):
+        cachefile = f'{binary}_{id}'
+
+        execcmd = re.sub("mpirun", "isp.exe", execcmd)
+        execcmd = re.sub('-np', '-n', execcmd)
+        execcmd = re.sub('\${EXE}', f"./{binary}", execcmd)
+        execcmd = re.sub('\$zero_buffer', "-b", execcmd)
+        execcmd = re.sub('\$infty_buffer', "-g", execcmd)
+
+
+        if self.run_cmd(buildcmd=f"ispcc -o {binary} {filename}",
+                   execcmd=execcmd,
+                   cachefile=cachefile,
+                   filename=filename,
+                   binary=binary,
+                   timeout=timeout,
+                   batchinfo=batchinfo):
+
+            # The test was actually run
+            print("\nClearing port after executing ISP\n")
+            subprocess.run("kill -9 $(lsof -t -i:9999) 2>/dev/null", shell=True)
+
+    def teardown(self): # Remove generated cruft (binary files)
+        subprocess.run("find -type f -a -executable | xargs rm -f", shell=True, check=True)
+
+    def parse(self, cachefile):
+        if os.path.exists(f'{cachefile}.timeout') or os.path.exists(f'logs/isp/{cachefile}.timeout'):
+            return 'timeout'
+        if not (os.path.exists(f'{cachefile}.txt') or os.path.exists(f'logs/isp/{cachefile}.txt')):
+            return 'failure'
+
+        with open(f'{cachefile}.txt' if os.path.exists(f'{cachefile}.txt') else f'logs/isp/{cachefile}.txt', 'r') as infile:
+            output = infile.read()
+
+        if re.search('Compilation of .*? raised an error \(retcode: ', output):
+            return 'UNIMPLEMENTED'
+
+        if re.search('ISP detected deadlock!!!', output):
+            return 'deadlock'
+        if re.search('Detected a DEADLOCK in interleaving', output):
+            return 'deadlock'
+
+        if re.search('MBI_MSG_RACE', output):
+            return 'MBI_MSG_RACE'
+
+        if (re.search('resource leaks detected', output) and
+            not re.search('No resource leaks detected', output)):
+            return 'resleak'
+
+        if re.search("Attempting to use an MPI routine after finalizing MPI", output):
+            return 'mpierr'
+
+        if re.search('Rank [0-9]: WARNING: Waited on non-existant request in', output):
+            return 'mpierr'
+        if re.search('Rank [0-9]: Invalid rank in MPI_.*? at ',output):
+            return 'mpierr'
+        # Invalid argument/tag/datatype/etc.
+        if re.search('Fatal error in P?MPI_.*?: Invalid', output):
+            if re.search('Invalid argument', output):
+                return 'invalid argument'
+            if re.search('Invalid communicator', output):
+                return 'invalid communicator'
+            if re.search('Invalid datatype', output):
+                return 'invalid datatype'
+            if re.search('Invalid MPI_Op', output):
+                return 'invalid mpi operator'
+            if re.search('Invalid tag', output):
+                return 'invalid tag'
+            else:
+                return 'mpierr'
+        if re.search('Fatal error in PMPI', output):
+            return 'mpierr'
+        if re.search('Fatal error in MPI', output):
+            return 'mpierr'
+
+        if re.search('Assertion failed', output):
+            return 'failure'
+
+        if re.search('ISP detected no deadlocks', output):
+            return 'OK'
+
+        if re.search('BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES', output):
+            return 'failure'
+
+        if re.search('Command killed by signal 15, elapsed time: 300', output):
+            return 'timeout'
+
+        print (f">>>>[ INCONCLUSIVE ]>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> (isp/{cachefile})")
+        print(output)
+        print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
+        return 'other'
diff --git a/scripts/tools/itac.py b/scripts/tools/itac.py
new file mode 100644
index 0000000000000000000000000000000000000000..b79ccab39b12de83da6ce95f01ab1ce786e3eccb
--- /dev/null
+++ b/scripts/tools/itac.py
@@ -0,0 +1,99 @@
+import re
+import os
+import distutils.spawn
+
+from MBIutils import *
+
+class Tool(AbstractTool):
+    def identify(self):
+        return "Intel TAC"
+
+    def ensure_image(self):
+        AbstractTool.ensure_image(self, dockerparams="--shm-size=512m ", params="-x itac")
+
+    def setup(self):
+        if not os.path.exists("environment.txt"):
+            print("Installing ITAC...\n")
+            subprocess.run("apt update && apt install wget", shell=True, check=True)
+            subprocess.run("wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB -O- | apt-key add -", shell=True, check=True)
+            subprocess.run("echo 'deb https://apt.repos.intel.com/oneapi all main' > /etc/apt/sources.list.d/oneAPI.list", shell=True, check=True)
+            subprocess.run("apt update && apt install -y intel-oneapi-itac intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic intel-oneapi-mpi-devel", shell=True, check=True)
+            # Take the environment set by the /opt/intel/oneapi/setvars.sh shell script for us in this python script. Gosh, Intel...
+            subprocess.run('bash -c "source /opt/intel/oneapi/setvars.sh && printenv" > environment.txt', shell=True, check=True)
+        with open('environment.txt', "r") as input:
+            for line in input:
+                if re.search('=', line) is not None:
+                    m = re.match('([^=]*)=(.*)', line)
+                    if m is None:
+                        raise Exception(f"Parse error while trying to integrating the Intel environment: {line}")
+                    # print(f"os.environ[{m.group(1)}]={m.group(2)}")
+                    os.environ[m.group(1)] = m.group(2)
+
+        if (not distutils.spawn.find_executable("mpiicc")):
+            # mpiicc still not usable. Maybe that's the environment.txt from a previous run. Try again to install the tools
+            os.unlink("environment.txt")
+            self.setup()
+
+    def run(self, execcmd, filename, binary, id, timeout, batchinfo):
+        cachefile = f'{binary}_{id}'
+
+        execcmd = re.sub("mpirun", "mpirun -check_mpi -genv VT_CHECK_TRACING on", execcmd)
+        execcmd = re.sub('\${EXE}', f'./{binary}', execcmd)
+        execcmd = re.sub('\$zero_buffer', "", execcmd)
+        execcmd = re.sub('\$infty_buffer', "", execcmd)
+
+        ran = self.run_cmd(
+            buildcmd=f"mpiicc {filename} -O0 -g -o {binary}",
+            execcmd=execcmd,
+            cachefile=cachefile,
+            filename=filename,
+            binary=binary,
+            timeout=timeout,
+            batchinfo=batchinfo)
+
+        if ran:
+            subprocess.run(f"rm -f core vgcore.* {binary}", shell=True, check=True) # Save disk space ASAP
+
+    def teardown(self):
+        subprocess.run("find -type f -a -executable | xargs rm -f", shell=True, check=True) # Remove generated cruft (binary files)
+        subprocess.run("rm -f core", shell=True, check=True)
+
+    def parse(self, cachefile):
+        if os.path.exists(f'{cachefile}.timeout') or os.path.exists(f'logs/itac/{cachefile}.timeout'):
+            return 'timeout'
+        if not (os.path.exists(f'{cachefile}.txt') or os.path.exists(f'logs/itac/{cachefile}.txt')):
+            return 'failure'
+
+        with open(f'{cachefile}.txt' if os.path.exists(f'{cachefile}.txt') else f'logs/itac/{cachefile}.txt', 'r') as infile:
+            output = infile.read()
+
+        if re.search('mpiicc: not found', output):
+            return 'failure'
+
+        if re.search('Compilation of .*? raised an error \(retcode: ', output):
+            return 'UNIMPLEMENTED'
+
+        if re.search('MBI_MSG_RACE', output):
+            return 'MBI_MSG_RACE'
+
+        match = re.search('ERROR: (.*?): (fatal )?error', output)
+        if match:
+#            print ('<Match: %r, groups=%r>' % (match.group(), match.groups()))
+            return match.group(1)
+
+        match = re.search('WARNING: (.*?): (fatal )?warning', output)
+        if match:
+            return match.group(1)
+        if re.search('Command return code: 0,', output):
+            return 'OK'
+
+        if re.search('Command killed by signal 15, elapsed time: 300', output):
+            return 'timeout'
+
+        if re.search('ERROR: Giving up now...', output):
+            return 'failure'
+
+        print (f">>>>[ INCONCLUSIVE ]>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> (itac/{cachefile})")
+        print(output)
+        print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
+        return 'other'
diff --git a/scripts/tools/mpi_checker.py b/scripts/tools/mpi_checker.py
new file mode 100644
index 0000000000000000000000000000000000000000..a6fbb4207349005487f5d63cc54fa002c3be808b
--- /dev/null
+++ b/scripts/tools/mpi_checker.py
@@ -0,0 +1,100 @@
+import re
+import os
+from MBIutils import *
+
+class Tool(AbstractTool):
+    def identify(self):
+        return "MPI-Checker wrapper"
+
+    def ensure_image(self):
+        AbstractTool.ensure_image(self, "-x mpi-checker")
+
+    def build(self, rootdir, cached=True):
+        here = os.getcwd()
+        os.chdir(rootdir)
+
+        subprocess.run("clang-tidy-11 --list-checks", shell=True, check=True)
+
+        os.chdir(here)
+
+
+    def run(self, execcmd, filename, binary, id, timeout, batchinfo):
+        cachefile = f'{binary}_{id}'
+
+        with open("/MBI/compile_commands.json", "w") as out:
+            out.write(f'[{{"directory": "/MBI/", "command": "mpicc {filename} -I/usr/include/x86_64-linux-gnu/mpich/", "file": "{filename}"}}]')
+
+        ran = self.run_cmd(
+            buildcmd=f"run-clang-tidy-11.py -export-fixes='/MBI/logs/mpi-checker/{binary}_{id}.yaml' -checks='-*,mpi-type-mismatch,mpi-buffer-deref'",
+            execcmd=f"analyze-build --output /MBI/logs/mpi-checker/{cachefile} --cdb /MBI/compile_commands.json --enable-checker optin.mpi.MPI-Checker",
+            cachefile=cachefile,
+            filename=filename,
+            binary=binary,
+            timeout=timeout,
+            batchinfo=batchinfo)
+
+        subprocess.run(f"chmod -R +r /MBI/logs/mpi-checker/{cachefile}", shell=True, check=True)
+        subprocess.run("rm /MBI/compile_commands.json", shell=True, check=True)
+
+    def parse(self, cachefile):
+        if os.path.exists(f'{cachefile}.timeout') or os.path.exists(f'logs/mpi-checker/{cachefile}.timeout'):
+            outcome = 'timeout'
+        if not (os.path.exists(f'{cachefile}.txt') or os.path.exists(f'logs/mpi-checker/{cachefile}.txt')):
+            return 'failure'
+
+        # Read text report
+        with open(f'{cachefile}.txt' if os.path.exists(f'{cachefile}.txt') else f'logs/mpi-checker/{cachefile}.txt', 'r') as infile:
+            output = infile.read()
+
+        # with open(f'{cachefile}.yaml' if os.path.exists(f'{cachefile}.yaml') else f'logs/mpi-checker/{cachefile}.yaml', 'r') as infile:
+        #     output = infile.read()
+
+        if re.search('no matching wait', output):
+            return 'Missing wait'
+
+        if re.search('no matching nonblocking call', output):
+            return 'Unmatched wait'
+
+        # Non catched errors
+        if (re.search('error:', output)
+            or re.search('warning:', output)):
+            return 'failure'
+
+        # Read HTML report
+        report = []
+        for root, dirs, files in os.walk(f"logs/mpi-checker/{cachefile}"):
+            if "index.html" in files:
+                report.append(os.path.join(root, "index.html"))
+
+        for html in report:
+            with open(html, 'r') as infile:
+                output = infile.read()
+
+            if re.search('Missing wait', output):
+                return 'Missing wait'
+
+            if re.search('Unmatched wait', output):
+                return 'Unmatched wait'
+
+            if re.search('MPI Error', output):
+                return 'mpierror'
+
+        # if re.search('warning', output):
+        #     return 'SUPPRESSED_WARNING'
+
+        return 'OK'
+
+    def is_correct_diagnostic(self, test_id, res_category, expected, detail):
+        if res_category != 'TRUE_POS':
+            return True
+
+        out = self.parse(test_id)
+
+        if out == 'Missing wait' and detail != 'MissingWait':
+            return False
+
+        if (out in ['Missing wait', 'Unmatched wait'] and
+            possible_details[detail] != "BReqLifecycle"):
+            return False
+
+        return True
diff --git a/scripts/tools/mpisv.py b/scripts/tools/mpisv.py
new file mode 100644
index 0000000000000000000000000000000000000000..5ace8ccf739c922cdd42bddbad3a6f56a6a9cb44
--- /dev/null
+++ b/scripts/tools/mpisv.py
@@ -0,0 +1,80 @@
+import re
+import os
+import sys
+from MBIutils import *
+
+class Tool(AbstractTool):
+    def identify(self):
+        return "MPI-SV wrapper"
+
+    def ensure_image(self):
+        if os.path.exists("/root/mpi-sv/mpisv"):
+            print("This is the docker image of MPI-SV. Good.")
+        else:
+            print("Please run this script in a mpisv/mpi-sv image. Run these commands:")
+            print("  docker image pull mpisv/mpi-sv")
+            print("  docker run -it --rm --name MIB --shm-size=512m --volume $(pwd):/MBI mpisv/mpi-sv  /MBI/scripts/ensure_python3 /MBI/MBI.py -x mpisv")
+            sys.exit(1)
+
+    def run(self, execcmd, filename, binary, id, timeout, batchinfo):
+        cachefile = f'{binary}_{id}'
+
+        execcmd = re.sub("mpirun", "mpisv", execcmd)
+        execcmd = re.sub('-np ', "", execcmd)
+        execcmd = re.sub('\${EXE}', f'{binary}.bc', execcmd)
+        execcmd = re.sub('\$zero_buffer', "", execcmd)
+        execcmd = re.sub('\$infty_buffer', "", execcmd)
+
+        ran = self.run_cmd(
+            buildcmd=f"mpisvcc {filename} -o {binary}.bc",
+            execcmd=execcmd,
+            cachefile=cachefile,
+            filename=filename,
+            binary=binary,
+            timeout=timeout,
+            batchinfo=batchinfo)
+
+        if os.path.exists('klee-last') and not os.path.exists(f"{binary}_{id}-klee-out"):
+            os.rename(os.readlink('klee-last'), f"{binary}_{id}-klee-out")
+            os.remove('klee-last')
+
+        # save disk space ASAP if ran
+        if ran:
+            subprocess.run("find -type f -a -executable | xargs rm -f", shell=True, check=True) # Remove generated cruft (binary files)
+            subprocess.run("find -name '*.bin' -o -name '*.istats' -o -name 'pid' -o -name '*.ll' | xargs rm -f", shell=True, check=True) # Remove cruft generated by Klee
+            subprocess.run("rm -rf klee-last core", shell=True, check=True)
+
+    def parse(self, cachefile):
+        if os.path.exists(f'{cachefile}.timeout') or os.path.exists(f'logs/mpisv/{cachefile}.timeout'):
+            return 'timeout'
+        if not (os.path.exists(f'{cachefile}.txt') or os.path.exists(f'logs/mpisv/{cachefile}.txt')):
+            return 'failure'
+        if not (os.path.exists(f'{cachefile}-klee-out/info') or os.path.exists(f'logs/mpisv/{cachefile}-klee-out/info')):
+            return 'failure'
+
+        with open(f'{cachefile}.txt' if os.path.exists(f'{cachefile}.txt') else f'logs/mpisv/{cachefile}.txt', 'r') as infile:
+            output = infile.read()
+        with open(f'{cachefile}-klee-out/info' if os.path.exists(f'{cachefile}-klee-out/info') else f'logs/mpisv/{cachefile}-klee-out/info', 'r') as infile:
+            info = infile.read()
+
+        if re.search('Compilation of .*? raised an error \(retcode: ', output) or re.search('failed external call', output):
+            return 'UNIMPLEMENTED'
+
+        if re.search('found deadlock', output):
+            return 'deadlock'
+
+        if re.search('MBI_MSG_RACE', output):
+            return 'MBI_MSG_RACE'
+
+        if re.search('klee: .*? Assertion `.*? failed.', output):
+            return 'failure'
+
+        if re.search('No Violation detected by MPI-SV', info) or re.search('No Violation detected by MPI-SV', output):
+            return 'OK'
+        if re.search('/root/mpi-sv/mpisv.*?Illegal instruction', output):
+            return 'failure'
+
+        print (f">>>>[ INCONCLUSIVE ]>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> (mpisv/{cachefile})")
+        print(output)
+        print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
+        return 'other'
diff --git a/scripts/tools/must.py b/scripts/tools/must.py
new file mode 100644
index 0000000000000000000000000000000000000000..724329f596a580b0daf9c5ccdd2780c5a7309139
--- /dev/null
+++ b/scripts/tools/must.py
@@ -0,0 +1,174 @@
+import re
+import os
+import tempfile
+import shutil
+from MBIutils import *
+
+def must_filter(line, process):
+    if re.search("ERROR: MUST detected a deadlock", line):
+        pid = process.pid
+        pgid = os.getpgid(pid)
+        try:
+            process.terminate()
+            # Send the signal to all the processes in the group. The command and everything it forked
+            os.killpg(pgid, signal.SIGTERM)
+        except ProcessLookupError:
+            pass  # Ok, it's gone now
+
+class V18(AbstractTool):
+    def identify(self):
+        return "MUST v1.9.0 wrapper"
+
+    def ensure_image(self):
+        AbstractTool.ensure_image(self, "-x must")
+
+    def build(self, rootdir, cached=True):
+        if cached and os.path.exists("/MBI-builds/MUST19/bin/mustrun"):
+            os.environ['PATH'] = os.environ['PATH'] + ":/MBI-builds/MUST19/bin/"
+            return
+
+        subprocess.run(f"rm -rf /MBI-builds/MUST19", shell=True, check=True) # MUST sometimes fails when reinstalling over the same dir
+
+        # Build it
+        here = os.getcwd() # Save where we were
+        subprocess.run(f"rm -rf /tmp/build-must ; mkdir /tmp/build-must", shell=True, check=True)
+        os.chdir("/tmp/build-must")
+        subprocess.run(f"wget https://hpc.rwth-aachen.de/must/files/MUST-v1.9.0.tar.gz", shell=True, check=True)
+        subprocess.run(f"tar xfz MUST-*.tar.gz", shell=True, check=True)
+        subprocess.run(f"mkdir -p /tmp/build-must/build", shell=True, check=True)
+        os.chdir("/tmp/build-must/build")
+
+        subprocess.run(f"CC=$(which gcc) CXX=$(which gcc++) FC=$(which gfortran) cmake ../MUST-v1.9.0 -DCMAKE_INSTALL_PREFIX=/MBI-builds/MUST19 -DCMAKE_BUILD_TYPE=Release  -DENABLE_FORTRAN=OFF", shell=True, check=True)
+        subprocess.run(f"make -j$(nproc) install VERBOSE=1", shell=True, check=True)
+        subprocess.run(f"make -j$(nproc) install-prebuilds VERBOSE=1", shell=True, check=True)
+        subprocess.run(f"rm -rf /tmp/build-must", shell=True, check=True)
+
+        # Back to our previous directory
+        os.chdir(here)
+
+    def setup(self):
+        os.environ['PATH'] = os.environ['PATH'] + ":/MBI-builds/MUST19/bin/"
+        os.environ['OMPI_ALLOW_RUN_AS_ROOT'] = "1"
+        os.environ['OMPI_ALLOW_RUN_AS_ROOT_CONFIRM'] = "1"
+        subprocess.run("update-alternatives --set mpi /usr/bin/mpicc.openmpi", shell=True, check=True)
+        subprocess.run("update-alternatives --set mpirun /usr/bin/mpirun.openmpi", shell=True, check=True)
+
+    def run(self, execcmd, filename, binary, id, timeout, batchinfo):
+        cachefile = f'{binary}_{id}'
+
+        subprocess.run("killall -9 mpirun 2>/dev/null", shell=True)
+
+        execcmd = re.sub("mpirun", "mustrun --must:distributed", execcmd)
+        execcmd = re.sub('\${EXE}', f'./{binary}', execcmd)
+        execcmd = re.sub('\$zero_buffer', "", execcmd)
+        execcmd = re.sub('\$infty_buffer', "", execcmd)
+
+        with tempfile.TemporaryDirectory() as tmpdirname:
+            ran = self.run_cmd(
+                    buildcmd=f"mpicc {filename} -L/MBI-builds/MUST19/lib -lpnmpi -o {tmpdirname}/{binary}",
+                    execcmd=execcmd,
+                    cachefile=cachefile,
+                    filename=filename,
+                    binary=binary,
+                    timeout=timeout,
+                    batchinfo=batchinfo,
+                    cwd=tmpdirname,
+                    read_line_lambda=must_filter)
+
+            if os.path.isfile(f"{tmpdirname}/MUST_Output.html"):
+                shutil.copyfile(f"{tmpdirname}/MUST_Output.html", f"{cachefile}.html")
+
+    def teardown(self):
+        subprocess.run("find -type f -a -executable | xargs rm -f", shell=True, check=True) # Remove generated (binary files)
+        subprocess.run("rm -rf must_temp core", shell=True, check=True)
+
+    def parse(self, cachefile):
+        # do not report timeouts ASAP, as MUST still deadlocks when it detects a root mismatch
+        if not (os.path.exists(f'{cachefile}.txt') or os.path.exists(f'logs/must/{cachefile}.txt')):
+            return 'failure'
+        if not (os.path.exists(f'{cachefile}.html') or os.path.exists(f'logs/must/{cachefile}.html')):
+            return 'failure'
+
+        with open(f'{cachefile}.html' if os.path.exists(f'{cachefile}.html') else f'logs/must/{cachefile}.html', 'r') as infile:
+            html = infile.read()
+
+        if re.search('deadlock', html):
+            return 'deadlock'
+
+        if re.search('not freed', html):
+            return 'resleak'
+
+        if re.search('conflicting roots', html):
+            return 'various'
+
+        if re.search('unknown datatype', html) or re.search('has to be a non-negative integer', html) or re.search('must use equal type signatures', html):
+            return 'conflicting roots'
+
+        with open(f'{cachefile}.txt' if os.path.exists(f'{cachefile}.txt') else f'logs/must/{cachefile}.txt', 'r') as infile:
+            output = infile.read()
+
+        if re.search('MBI_MSG_RACE', output):
+            return 'MBI_MSG_RACE'
+
+        if re.search('Compilation of .*? raised an error \(retcode: ', output):
+            return 'UNIMPLEMENTED'
+
+        if re.search('caught MPI error', output):
+            return 'mpierr'
+
+        if re.search('Error', html):
+            return 'mpierr'
+
+        if re.search('MUST detected no MPI usage errors nor any suspicious behavior during this application run', html):
+            return 'OK'
+
+        if re.search('YOUR APPLICATION TERMINATED WITH THE EXIT STRING: Segmentation fault', output):
+            return 'segfault'
+        if re.search('caught signal nr 11', output) or re.search('caught signal nr 6', output):
+            return 'segfault'
+
+        if re.search('internal ABORT - process ', output):
+            return 'failure'
+
+        # No interesting output found, so return the timeout as is if it exists
+        if os.path.exists(f'{cachefile}.timeout') or os.path.exists(f'logs/must/{cachefile}.timeout'):
+            return 'timeout'
+
+        print (f">>>>[ INCONCLUSIVE ]>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> ({self.identify()}/{cachefile})")
+        print(output)
+        print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
+        return 'other'
+
+# No beta release of MUST for now
+#
+#class V19(V18):
+#    def identify(self):
+#        return "MUST v1.8.0 wrapper"
+#
+#    def ensure_image(self):
+#        AbstractTool.ensure_image(self, "-x must18")
+#
+#    def build(self, rootdir, cached=True):
+#        if cached and os.path.exists("/MBI-builds/MUST18/bin/mustrun"):
+#            return
+#
+#        subprocess.run(f"rm -rf /MBI-builds/MUST18", shell=True, check=True) # MUST sometimes fails when reinstalling over the same dir
+#
+#        # Build it
+#        here = os.getcwd() # Save where we were
+#        if not os.path.exists((f"{rootdir}/tools/MUST-v1.8.0.tar.gz")):
+#            subprocess.run(f"cd {rootdir}/tools; wget https://hpc.rwth-aachen.de/must/files/MUST-v1.8.0.tar.gz", shell=True, check=True)
+#        subprocess.run(f"rm -rf /tmp/build-must ; mkdir /tmp/build-must", shell=True, check=True)
+#        os.chdir("/tmp/build-must")
+#        subprocess.run(f"tar xfz {rootdir}/tools/MUST-v1.8.0.tar.gz", shell=True, check=True)
+#
+#        subprocess.run(f"CC=$(which clang) CXX=$(which clang++) OMPI_CC=$(which clang) OMPI_CXX=$(which clang++) FC=$(which gfortran) cmake MUST-v1.8.0 -DCMAKE_INSTALL_PREFIX=/MBI-builds/MUST18 -DCMAKE_BUILD_TYPE=Release", shell=True, check=True)
+#        subprocess.run(f"make -j$(nproc) install VERBOSE=1", shell=True, check=True)
+#        subprocess.run(f"make -j$(nproc) install-prebuilds VERBOSE=1", shell=True, check=True)
+#
+#        # Back to our previous directory
+#        os.chdir(here)
+#
+#    def setup(self):
+#        os.environ['PATH'] = os.environ['PATH'] + ":/MBI-builds/MUST18/bin/"
+#
diff --git a/scripts/tools/parcoach.py b/scripts/tools/parcoach.py
new file mode 100644
index 0000000000000000000000000000000000000000..ab3b40e2c928e293f13f1bd5a1de52d864ac678c
--- /dev/null
+++ b/scripts/tools/parcoach.py
@@ -0,0 +1,111 @@
+import re
+import os
+from MBIutils import *
+
+class Tool(AbstractTool):
+    def identify(self):
+        return "PARCOACH wrapper"
+
+    def ensure_image(self):
+        AbstractTool.ensure_image(self, "-x parcoach")
+
+    def build(self, rootdir, cached=True):
+        if cached and os.path.exists(f"/MBI-builds/parcoach/bin/parcoach"):
+            print("No need to rebuild PARCOACH.")
+            os.environ['PATH'] = os.environ['PATH'] + ":/MBI-builds/parcoach/bin/"
+            subprocess.run("export OMPI_CC=clang-15", shell=True, check=True)
+            return
+        if not os.path.exists("/usr/lib/llvm-15/bin/clang"):
+            subprocess.run("ln -s $(which clang) /usr/lib/llvm-15/bin/clang", shell=True, check=True)
+
+        here = os.getcwd() # Save where we were
+        # Get a GIT checkout.
+        ##subprocess.run("rm -rf /tmp/parcoach && git clone --depth=1 https://github.com/parcoach/parcoach.git /tmp/parcoach", shell=True, check=True)
+        #subprocess.run("rm -rf /tmp/parcoach ; mkdir /tmp/parcoach", shell=True, check=True)
+        # Go to where we want to install it, and build it out-of-tree (we're in the docker)
+        #subprocess.run("mkdir -p /MBI-builds/parcoach", shell=True, check=True)
+        subprocess.run(f"wget https://gitlab.inria.fr/api/v4/projects/12320/packages/generic/parcoach/2.3.0/parcoach-2.3.0-shared-Linux.tar.gz", shell=True, check=True)
+        subprocess.run(f"tar xfz parcoach-*.tar.gz", shell=True, check=True)
+        if not os.path.exists('/MBI-builds'):
+            subprocess.run(f"mkdir /MBI-builds", shell=True, check=True)
+        #os.chdir('/MBI-builds/parcoach')
+	# We use PARCOACH binaries
+        subprocess.run(f"mv parcoach-*/ /MBI-builds/parcoach/ ", shell=True, check=True)
+	
+        #subprocess.run(f"cmake /tmp/parcoach -DPARCOACH_ENABLE_TESTS=OFF -DCMAKE_C_COMPILER=clang-15 -DCMAKE_CXX_COMPILER=clang++-15 -DLLVM_DIR={rootdir}/tools/Parcoach/llvm-project/build", shell=True, check=True)
+        #subprocess.run("make -j$(nproc) VERBOSE=1", shell=True, check=True)
+        #subprocess.run("rm -rf /tmp/parcoach", shell=True, check=True)
+
+        os.environ['PATH'] = os.environ['PATH'] + ":/MBI-builds/parcoach/bin/"
+        subprocess.run("export OMPI_CC=clang-15", shell=True, check=True)
+
+        # Back to our previous directory
+        os.chdir(here)
+
+    def run(self, execcmd, filename, binary, id, timeout, batchinfo):
+        cachefile = f'{binary}_{id}'
+
+        if filename.find('Win')>=0:
+            self.run_cmd(
+                #buildcmd=f"clang -c -g -emit-llvm {filename} -I/usr/lib/x86_64-linux-gnu/openmpi/include/ -o {binary}.bc",
+                #buildcmd=f"parcoachcc clang {filename} -check-rma -c -o {binary}.o",
+                buildcmd=f"parcoachcc -check=rma --args clang -I/usr/lib/x86_64-linux-gnu/openmpi/include -I/usr/lib/x86_64-linux-gnu/openmpi/include/openmpi {filename} -c -o {binary}.o",
+                execcmd=f"parcoachcc -check=rma --args clang -I/usr/lib/x86_64-linux-gnu/openmpi/include -I/usr/lib/x86_64-linux-gnu/openmpi/include/openmpi {filename} -c -o {binary}.o",
+                cachefile=cachefile,
+                filename=filename,
+                binary=binary,
+                timeout=timeout,
+                batchinfo=batchinfo)
+        else:
+            self.run_cmd(
+                #buildcmd=f"clang -c -g -emit-llvm {filename} -I/usr/lib/x86_64-linux-gnu/openmpi/include/ -o {binary}.bc",
+                #buildcmd=f"parcoachcc clang {filename} -check-rma -c -o {binary}.o",
+                buildcmd=f"parcoachcc clang -I/usr/lib/x86_64-linux-gnu/openmpi/include -I/usr/lib/x86_64-linux-gnu/openmpi/include/openmpi {filename} -c -o {binary}.o",
+                execcmd=f"parcoachcc clang -I/usr/lib/x86_64-linux-gnu/openmpi/include -I/usr/lib/x86_64-linux-gnu/openmpi/include/openmpi {filename} -c -o {binary}.o",
+                cachefile=cachefile,
+                filename=filename,
+                binary=binary,
+                timeout=timeout,
+                batchinfo=batchinfo)
+
+
+        subprocess.run("rm -f *.bc core", shell=True, check=True)
+
+    def parse(self, cachefile):
+        if os.path.exists(f'{cachefile}.timeout') or os.path.exists(f'logs/parcoach/{cachefile}.timeout'):
+            return 'timeout'
+        if not (os.path.exists(f'{cachefile}.txt') or os.path.exists(f'logs/parcoach/{cachefile}.txt')):
+            return 'failure'
+
+        with open(f'{cachefile}.txt' if os.path.exists(f'{cachefile}.txt') else f'logs/parcoach/{cachefile}.txt', 'r') as infile:
+            output = infile.read()
+
+        if re.search('MBI_MSG_RACE', output):
+            return 'MBI_MSG_RACE'
+
+        if re.search('warning', output):
+            return 'deadlock'
+
+        if re.search('Local concurrency', output):
+            return 'local concurrency'
+
+        if re.search('Compilation of .*? raised an error \(retcode: ', output):
+            return 'UNIMPLEMENTED'
+
+        if re.search('No issues found', output):
+            return 'OK'
+
+        return 'other'
+
+    def is_correct_diagnostic(self, test_id, res_category, expected, detail):
+        # PARCOACH detect only call ordering errors
+        if res_category != 'TRUE_POS':
+            return True
+
+        if possible_details[detail] == "DMatch":
+            return True
+
+        if possible_details[detail] == "InputHazard":
+            return True
+
+        return False
diff --git a/scripts/tools/simgrid.py b/scripts/tools/simgrid.py
new file mode 100644
index 0000000000000000000000000000000000000000..3617407f35088436d2f4e520b7329d81a65e50b5
--- /dev/null
+++ b/scripts/tools/simgrid.py
@@ -0,0 +1,214 @@
+# Copyright 2021-2022. The MBI project. All rights reserved.
+# This program is free software; you can redistribute it and/or modify it under the terms of the license (GNU GPL).
+
+import re
+import os
+import tempfile
+from MBIutils import *
+
+class Tool(AbstractTool):
+    name_ext = ""
+    version = ""
+    install_path = "/usr"
+    exec_cfg = "--cfg=smpi/finalization-barrier:on --cfg=smpi/list-leaks:10 --cfg=model-check/max-depth:10000 --cfg=smpi/pedantic:true"
+
+    def identify(self):
+        return "SimGrid wrapper"
+
+    def build(self, rootdir, cached=True):
+        if cached and os.path.exists('/usr/bin/simgrid-mc'):
+            return
+
+        here = os.getcwd() # Save where we were
+        os.chdir(rootdir)
+
+        # Install the dependencies
+        subprocess.run("apt-get -y install cmake gfortran libboost-dev libunwind-dev git   openmpi-bin libunwind8 libopenmpi-dev libdw-dev", shell=True, check=True)
+
+        # Get a GIT checkout
+        subprocess.run("rm -rf /tmp/simgrid && git clone --depth=1 https://github.com/simgrid/simgrid.git /tmp/simgrid", shell=True, check=True)
+
+        # Build and install it
+        os.chdir("/tmp/simgrid")
+        subprocess.run(f"cmake -DCMAKE_INSTALL_PREFIX=/usr -Denable_compile_optimizations=ON -Denable_model-checking=ON .", shell=True, check=True)
+        subprocess.run("make -j$(nproc) install VERBOSE=1", shell=True, check=True)
+
+        # Back to our previous directory
+        os.chdir(here)
+        subprocess.run("rm -rf /tmp/simgrid", shell=True, check=True)
+
+        # Remove the build-deps
+        subprocess.run("apt-get -y remove cmake libboost-dev libunwind-dev git", shell=True, check=True)
+        subprocess.run("apt-get autoremove -yq && apt-get clean -yq", shell=True, check=True)
+
+
+    def ensure_image(self):
+        AbstractTool.ensure_image(self, "-x simgrid")
+
+    def setup(self):
+        os.environ['VERBOSE'] = '1'
+        if not os.path.exists("/MBI/cluster.xml"):
+            with open('/MBI/cluster.xml', 'w') as outfile:
+                outfile.write("<?xml version='1.0'?>\n")
+                outfile.write("<!DOCTYPE platform SYSTEM \"https://simgrid.org/simgrid.dtd\">\n")
+                outfile.write('<platform version="4.1">\n')
+                outfile.write(' <cluster id="acme" prefix="node-" radical="0-99" suffix="" speed="1Gf" bw="125MBps" lat="50us"/>\n')
+                outfile.write('</platform>\n')
+
+
+    def run(self, execcmd, filename, binary, id, timeout, batchinfo):
+        cachefile = f'{binary}_{id}'
+
+        execcmd = execcmd.replace(f"mpirun", f"{self.install_path}/bin/smpirun -wrapper {self.install_path}/bin/simgrid-mc -platform /MBI/cluster.xml -analyze {self.exec_cfg}")
+        execcmd = execcmd.replace('${EXE}', binary)
+        execcmd = execcmd.replace('$zero_buffer', "--cfg=smpi/buffering:zero")
+        execcmd = execcmd.replace('$infty_buffer', "--cfg=smpi/buffering:infty")
+
+        with tempfile.TemporaryDirectory() as tmpdirname:
+            self.run_cmd(
+                buildcmd=f"{self.install_path}/bin/smpicc {filename} -trace-call-location -g -Wl,-znorelro -Wl,-znoseparate-code -o {tmpdirname}/{binary}",
+                execcmd=execcmd,
+                cachefile=cachefile,
+                filename=filename,
+                binary=binary,
+                timeout=timeout,
+                cwd=tmpdirname,
+                batchinfo=batchinfo)
+
+    def teardown(self):
+        subprocess.run("find -type f -a -executable | xargs rm -f", shell=True, check=True) # Remove generated cruft (binary files)
+        subprocess.run("rm -f smpitmp-* core", shell=True, check=True)
+
+    def parse(self, cachefile):
+        if os.path.exists(f'{cachefile}.timeout') or os.path.exists(f'logs/simgrid{self.name_ext}/{cachefile}.timeout'):
+            return 'timeout'
+        if not (os.path.exists(f'{cachefile}.txt') or os.path.exists(f'logs/simgrid{self.name_ext}/{cachefile}.txt')):
+            return 'failure'
+
+        with open(f'{cachefile}.txt' if os.path.exists(f'{cachefile}.txt') else f'logs/simgrid{self.name_ext}/{cachefile}.txt', 'r') as infile:
+            output = infile.read()
+
+        if re.search('Compilation of .*? raised an error \(retcode: ', output):
+            return 'UNIMPLEMENTED'
+
+        if re.search('MBI_MSG_RACE', output):
+            return 'MBI_MSG_RACE'
+
+        if re.search('MC is currently not supported here', output):
+            return 'failure'
+
+        if re.search('Collective communication mismatch', output):
+            return 'Collective mismatch'
+
+        if re.search('DEADLOCK DETECTED', output):
+            return 'deadlock'
+        if re.search('returned MPI_ERR', output):
+            return 'mpierr'
+        if re.search('Not yet implemented', output):
+            return 'UNIMPLEMENTED'
+        if re.search('CRASH IN THE PROGRAM', output):
+            return 'failure'
+        if re.search('Probable memory leaks in your code: SMPI detected', output):
+            return 'resleak'
+        if re.search('DFS exploration ended.', output) or re.search('No property violation found.', output):
+            return 'OK'
+
+        print (f">>>>[ INCONCLUSIVE ]>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> (simgrid{self.name_ext}/{cachefile})")
+        print(output)
+        print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
+        return 'other'
+
+    def is_correct_diagnostic(self, test_id, res_category, expected, detail):
+        if res_category != 'TRUE_POS':
+            return True
+
+        possible_output = {
+            'AInvalidParam' :    ['mpierr', 'deadlock'],
+            'BResLeak' :         ['mpierr', 'deadlock', 'resleak'],
+            'BReqLifecycle' :    ['mpierr', 'deadlock', 'resleak'],
+            'BEpochLifecycle' :  ['mpierr', 'deadlock', 'resleak'],
+            # 'BLocalConcurrency' :  ['mpierr'],
+            'CMatch' :           ['mpierr', 'deadlock'],
+            'DRace' :            ['mpierr', 'deadlock', 'MBI_MSG_RACE'],
+            'DMatch' :           ['mpierr', 'deadlock', 'Collective mismatch'],
+            # 'DGlobalConcurrency' : ['mpierr'],
+            'EBufferingHazard' : ['mpierr', 'deadlock'],
+            # 'InputHazard' : [],
+            # 'FOK' : []
+        }
+
+        if possible_details[detail] not in possible_output:
+            return True
+
+        out = self.parse(test_id)
+
+        if out not in possible_output[possible_details[detail]]:
+            print(f'{test_id} : {possible_details[detail]} ({detail}) : {out}')
+            return False
+
+        return True
+
+
+class v3_27(Tool):
+    name_ext = "-3.27"
+    version = "v3.27"
+    install_path = f"/sg-v3_27"
+    exec_cfg = "--cfg=smpi/list-leaks:10 --cfg=model-check/max-depth:10000"
+
+    def identify(self):
+        return f"SimGrid {self.version} wrapper"
+
+    def build(self, rootdir, cached=True):
+        if cached and os.path.exists(f'{self.install_path}/bin/simgrid-mc'):
+            return
+
+        here = os.getcwd() # Save where we were
+        os.chdir(rootdir)
+
+        # Install the dependencies
+        subprocess.run("apt-get -y install cmake gfortran libboost-dev libunwind-dev git   openmpi-bin libunwind8 libopenmpi-dev libdw-dev", shell=True, check=True)
+        
+        # Get a GIT checkout
+        subprocess.run(f"rm -rf /tmp/simgrid-{self.version} && git clone --depth=1 https://github.com/simgrid/simgrid.git /tmp/simgrid-{self.version} --branch {self.version}", shell=True, check=True)
+
+        # Build and install it
+        os.chdir(f"/tmp/simgrid-{self.version}")
+        subprocess.run(f"cmake -DCMAKE_INSTALL_PREFIX={self.install_path} -Denable_compile_optimizations=ON -Denable_model-checking=ON .", shell=True, check=True)
+        subprocess.run("make -j$(nproc) install VERBOSE=1", shell=True, check=True)
+
+        # Back to our previous directory
+        os.chdir(here)
+
+        # Remove the build-deps
+        subprocess.run("apt-get -y remove cmake libboost-dev libunwind-dev git", shell=True, check=True)
+        subprocess.run("apt-get autoremove -yq && apt-get clean -yq", shell=True, check=True)
+
+class v3_28(v3_27):
+    name_ext = "-3.28"
+    version = "v3.28"
+    install_path = f"/sg-v3_28"
+    exec_cfg = "--cfg=smpi/finalization-barrier:on --cfg=smpi/list-leaks:10 --cfg=model-check/max-depth:10000 --cfg=smpi/pedantic:true"
+
+class v3_29(v3_27):
+    name_ext = "-3.29"
+    version = "v3.29"
+    install_path = f"/sg-v3_29"
+    exec_cfg = "--cfg=smpi/finalization-barrier:on  --cfg=smpi/list-leaks:10 --cfg=model-check/max-depth:10000 --cfg=smpi/pedantic:true"
+
+class v3_30(v3_27):
+    name_ext = "-3.30"
+    version = "v3.30"
+    install_path = f"/sg-v3_30"
+    exec_cfg = "--cfg=smpi/finalization-barrier:on --cfg=smpi/list-leaks:10 --cfg=model-check/max-depth:10000 --cfg=smpi/pedantic:true"
+
+class v3_31(v3_27):
+    name_ext = "-3.31"
+    version = "v3.31"
+    install_path = f"/sg-v3_31"
+    exec_cfg = "--cfg=smpi/finalization-barrier:on --cfg=smpi/list-leaks:10 --cfg=model-check/max-depth:10000 --cfg=smpi/pedantic:true"
+
+class v3_32(v3_27):
+    name_ext = "-3.32"
+    version = "v3.32"
+    install_path = f"/sg-v3_32"
+    exec_cfg = "--cfg=smpi/finalization-barrier:on --cfg=smpi/list-leaks:10 --cfg=model-check/max-depth:10000 --cfg=smpi/pedantic:true"
diff --git a/scripts/tools/smpi.py b/scripts/tools/smpi.py
new file mode 100644
index 0000000000000000000000000000000000000000..794428891c83ec864df2c2c0b56399a794dc9a44
--- /dev/null
+++ b/scripts/tools/smpi.py
@@ -0,0 +1,83 @@
+import re
+import os
+import tools.simgrid
+from MBIutils import *
+
+class Tool(tools.simgrid.Tool):
+    def identify(self):
+        return "SimGrid MPI"
+
+    def ensure_image(self):
+        AbstractTool.ensure_image(self, "-x smpi")
+
+    def run(self, execcmd, filename, binary, id, timeout, batchinfo, extraargs=""):
+        cachefile = f'{binary}_{id}'
+
+        if not os.path.exists("cluster.xml"):
+            with open('cluster.xml', 'w') as outfile:
+                outfile.write("<?xml version='1.0'?>\n")
+                outfile.write("<!DOCTYPE platform SYSTEM \"https://simgrid.org/simgrid.dtd\">\n")
+                outfile.write('<platform version="4.1">\n')
+                outfile.write(' <cluster id="acme" prefix="node-" radical="0-99" suffix="" speed="1Gf" bw="125MBps" lat="50us"/>\n')
+                outfile.write('</platform>\n')
+
+        execcmd = re.sub("mpirun", f"smpirun {extraargs} -platform ./cluster.xml -analyze --cfg=smpi/finalization-barrier:on --cfg=smpi/list-leaks:10 --cfg=smpi/pedantic:true", execcmd)
+        execcmd = re.sub('\${EXE}', binary, execcmd)
+        execcmd = re.sub('\$zero_buffer', "", execcmd)
+        execcmd = re.sub('\$infty_buffer', "", execcmd)
+
+        self.run_cmd(
+            buildcmd=f"smpicc {filename} -trace-call-location -g -Wl,-znorelro -Wl,-znoseparate-code -o {binary}",
+            execcmd=execcmd,
+            cachefile=cachefile,
+            filename=filename,
+            binary=binary,
+            timeout=timeout,
+            batchinfo=batchinfo)
+
+        subprocess.run("find -type f -a -executable | xargs rm -f", shell=True, check=True) # Remove generated cruft (binary files)
+        subprocess.run("rm -f smpitmp-* core", shell=True, check=True)
+
+    def parse(self, cachefile):
+        if os.path.exists(f'{cachefile}.timeout') or os.path.exists(f'logs/smpi/{cachefile}.timeout'):
+            return 'timeout'
+        if not (os.path.exists(f'{cachefile}.txt') or os.path.exists(f'logs/smpi/{cachefile}.txt')):
+            return 'failure'
+
+        with open(f'{cachefile}.txt' if os.path.exists(f'{cachefile}.txt') else f'logs/smpi/{cachefile}.txt', 'r') as infile:
+            output = infile.read()
+
+        if re.search('Compilation of .*? raised an error \(retcode: ', output):
+            return 'UNIMPLEMENTED'
+
+        if re.search('MC is currently not supported here', output):
+            return 'failure'
+        if re.search('Segmentation fault.', output):
+            return 'segfault'
+
+        if re.search('MBI_MSG_RACE', output):
+            return 'MBI_MSG_RACE'
+
+        if re.search('DEADLOCK DETECTED', output):
+            return 'deadlock'
+        if re.search('returned MPI_ERR', output):
+            return 'mpierr'
+        if re.search('Not yet implemented', output):
+            return 'UNIMPLEMENTED'
+        if re.search('CRASH IN THE PROGRAM', output):
+            return 'segfault'
+        if re.search('Probable memory leaks in your code: SMPI detected', output):
+            return 'resleak'
+        if re.search('No property violation found', output):
+            return 'OK'
+        if re.search('Command return code: 0,', output):
+            return 'OK'
+        if re.search('Command killed by signal 15, elapsed time: 300', output):
+            return 'timeout'
+        if re.search('Execution failed with code 134.', output):
+            return 'segfault'
+
+        print (f">>>>[ INCONCLUSIVE ]>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> (smpi/{cachefile})")
+        print(output)
+        print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
+        return 'other'
diff --git a/scripts/tools/smpivg.py b/scripts/tools/smpivg.py
new file mode 100644
index 0000000000000000000000000000000000000000..18064d7648de727f86c93d586b8d5f046a2a93e7
--- /dev/null
+++ b/scripts/tools/smpivg.py
@@ -0,0 +1,69 @@
+import re
+import os
+import tools.smpi
+import subprocess
+from MBIutils import *
+
+class Tool(tools.smpi.Tool):
+    def identify(self):
+        return "SimGrid MPI with Valgrind wrapper"
+
+    def ensure_image(self):
+        AbstractTool.ensure_image(self, "-x smpivg")
+
+    def run(self, execcmd, filename, binary, id, timeout, batchinfo):
+        if not os.path.exists('simgrid.supp'):
+            if os.path.exists('../../simgrid.supp'):
+                print(f"\nCopying simgrid.supp from {os.getcwd()}/../.. to {os.getcwd()}.")
+                subprocess.run("cp ../../simgrid.supp .", shell=True, check=True)
+            else:
+                print(f"\nDownloading simgrid.supp in {os.getcwd()}.")
+                subprocess.run("apt-get update", shell=True, check=True)
+                subprocess.run("apt-get install -y wget", shell=True, check=True)
+                subprocess.run("wget 'https://framagit.org/simgrid/simgrid/-/raw/master/tools/simgrid.supp?inline=false' -O simgrid.supp", shell=True, check=True)
+
+        tools.smpi.Tool.run(self, execcmd, filename, binary, id, timeout, batchinfo, extraargs="-wrapper 'valgrind --leak-check=no --suppressions=simgrid.supp'")
+        subprocess.run("rm -f vgcore.*", shell=True, check=True) # Save disk space ASAP
+
+    def parse(self, cachefile):
+        if os.path.exists(f'{cachefile}.timeout') or os.path.exists(f'logs/smpivg/{cachefile}.timeout'):
+            return 'timeout'
+        if not (os.path.exists(f'{cachefile}.txt') or os.path.exists(f'logs/smpivg/{cachefile}.txt')):
+            return 'failure'
+
+        with open(f'{cachefile}.txt' if os.path.exists(f'{cachefile}.txt') else f'logs/smpivg/{cachefile}.txt', 'r') as infile:
+            output = infile.read()
+
+        if re.search('Compilation of .*? raised an error \(retcode: ', output):
+            return 'UNIMPLEMENTED'
+
+        if re.search('ERROR SUMMARY: [^0]', output):
+            return 'failure'
+
+        if re.search('MC is currently not supported here', output):
+            return 'failure'
+
+        if re.search('MBI_MSG_RACE', output):
+            return 'MBI_MSG_RACE'
+
+        if re.search('DEADLOCK DETECTED', output):
+            return 'deadlock'
+        if re.search('returned MPI_ERR', output):
+            return 'mpierr'
+        if re.search('Not yet implemented', output):
+            return 'UNIMPLEMENTED'
+        if re.search('CRASH IN THE PROGRAM', output):
+            return 'segfault'
+        if re.search('Probable memory leaks in your code: SMPI detected', output):
+            return 'resleak'
+        if re.search('No property violation found', output):
+            return 'OK'
+        if re.search('Command return code: 0,', output):
+            return 'OK'
+        if re.search('Command killed by signal 15, elapsed time: ', output):
+            return 'timeout'
+
+        print (f">>>>[ INCONCLUSIVE ]>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> (smpivg/{cachefile})")
+        print(output)
+        print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
+        return 'other'
diff --git a/scripts/workInProgress/InputHazardGenerator.py b/scripts/workInProgress/InputHazardGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..899e810f969afdd72ed1d4cf57825fb6aa46853b
--- /dev/null
+++ b/scripts/workInProgress/InputHazardGenerator.py
@@ -0,0 +1,188 @@
+#! /usr/bin/python3
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: MBI
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+  Version of MPI: Conforms to MPI 1.1, does not require MPI 2 implementation
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: Lacking
+  COLL!basic: @{collfeature}@
+  COLL!nonblocking: @{icollfeature}@
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 2 ${EXE} 1
+  | @{outcome}@
+  | @{errormsg}@
+  $ mpirun -np 2 ${EXE} 2
+  | @{outcome}@
+  | @{errormsg}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define N 10
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  MPI_Status sta;
+  int i=0;
+  int root = 0;
+  int stag=0;
+  int rtag=0;
+  int buff_size = N;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 2)
+    printf("MBI ERROR: This test needs at least 2 processes to produce a bug!\\n");
+
+  if (argc < 2)
+    printf("MBI ERROR: This test needs at least 1 argument to produce a bug!\\n");
+
+  int dbs = sizeof(int)*nprocs; /* Size of the dynamic buffers for alltoall and friends */
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+  MPI_Op op = MPI_SUM;
+
+  int n = atoi(argv[1]);
+  int buffer[N] = {42};
+
+  @{init1}@
+  @{init2}@
+
+  if (rank == 0) {
+    if ((n % 2) == 0) { @{errorcond}@
+      @{operation1b}@
+      @{fini1b}@
+    } else {
+      @{operation1a}@
+      @{fini1a}@
+    }
+  } else @{addcond}@ {
+    @{operation2}@
+    @{fini2}@
+  }
+
+  @{free1}@
+  @{free2}@
+
+  MPI_Finalize();
+
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+# P2P
+for s in gen.send + gen.isend:
+    for r in gen.recv + gen.irecv:
+        patterns = {}
+        patterns = {'s': s, 'r': r}
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['p2pfeature'] = 'Yes' if s in gen.send or r in gen.recv else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if s in gen.isend or r in gen.irecv else 'Lacking'
+        patterns['collfeature'] = 'Lacking'
+        patterns['icollfeature'] = 'Lacking'
+        patterns['s'] = s
+        patterns['r'] = r
+
+        patterns['init1'] = gen.init[s]("1")
+        patterns['operation1a'] = gen.operation[s]("1").replace("buf1", "buffer").replace("dest", "1")
+        patterns['operation1b'] = gen.operation[s]("1").replace("buf1", "buffer").replace("dest", "1")
+        patterns['fini1a'] = gen.fini[s]("1")
+        patterns['fini1b'] = gen.fini[s]("1")
+        patterns['free1'] = gen.free[s]("1")
+
+        patterns['init2'] = gen.init[r]("2")
+        patterns['operation2'] = gen.operation[r]("2").replace("buf2", "buffer").replace("src", "0")
+        patterns['fini2'] = gen.fini[r]("2")
+        patterns['free2'] = gen.free[r]("2")
+
+        patterns['errorcond'] = ''
+        patterns['addcond'] = 'if (rank == 1)'
+
+        # Generate a correct matching
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Correct call ordering.'
+        replace['longdesc'] = 'Correct call ordering.'
+        replace['outcome'] = 'OK'
+        replace['errormsg'] = 'OK'
+        gen.make_file(template, f'InputHazardCallOrdering_{r}_{s}_ok.c', replace)
+
+        # Generate the incorrect matching
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Missing Send function.'
+        replace['longdesc'] = 'Missing Send function call for a path depending to input, a deadlock is created.'
+        replace['outcome'] = 'ERROR: IHCallMatching'
+        replace['errormsg'] = 'P2P mistmatch. Missing @{r}@ at @{filename}@:@{line:MBIERROR}@.'
+        replace['errorcond'] = '/* MBIERROR */'
+        replace['operation1b'] = ''
+        replace['fini1b'] = ''
+        gen.make_file(template, f'InputHazardCallOrdering_{r}_{s}_nok.c', replace)
+
+# COLLECTIVE
+for c in gen.coll:
+    patterns = {}
+    patterns = {'c': c}
+    patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+    patterns['p2pfeature'] = 'Lacking'
+    patterns['ip2pfeature'] = 'Lacking'
+    patterns['collfeature'] = 'Yes' if c in gen.coll else 'Lacking'
+    patterns['icollfeature'] = 'Yes' if c in gen.icoll else 'Lacking'
+    patterns['c'] = c
+
+    patterns['init1'] = gen.init[c]("1")
+    patterns['operation1a'] = gen.operation[c]("1")
+    patterns['operation1b'] = gen.operation[c]("1")
+    patterns['fini1a'] = gen.fini[c]("1")
+    patterns['fini1b'] = gen.fini[c]("1")
+    patterns['free1'] = gen.free[c]("1")
+
+    patterns['init2'] = gen.init[c]("2")
+    patterns['operation2'] = gen.operation[c]("2")
+    patterns['fini2'] = gen.fini[c]("2")
+    patterns['free2'] = gen.free[c]("2")
+
+    patterns['errorcond'] = ''
+    patterns['addcond'] = ''
+
+    # Generate a correct matching
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Correct call ordering.'
+    replace['longdesc'] = 'Correct call ordering.'
+    replace['outcome'] = 'OK'
+    replace['errormsg'] = 'OK'
+    gen.make_file(template, f'InputHazardCallOrdering_{c}_ok.c', replace)
+
+    # Generate the incorrect matching
+    replace = patterns.copy()
+    replace['shortdesc'] = 'Missing collective function call.'
+    replace['longdesc'] = 'Missing collective function call for a path depending to input, a deadlock is created.'
+    replace['outcome'] = 'ERROR: IHCallMatching'
+    replace['errormsg'] = 'P2P mistmatch. Missing @{c}@ at @{filename}@:@{line:MBIERROR}@.'
+    replace['errorcond'] = '/* MBIERROR */'
+    replace['operation1b'] = ''
+    replace['fini1b'] = ''
+    gen.make_file(template, f'InputHazardCallOrdering_{c}_nok.c', replace)
diff --git a/scripts/workInProgress/P2PBufferingGenerator.py b/scripts/workInProgress/P2PBufferingGenerator.py
new file mode 100755
index 0000000000000000000000000000000000000000..05253b87380cb19bc9ec3a39ccd08116d2c643ef
--- /dev/null
+++ b/scripts/workInProgress/P2PBufferingGenerator.py
@@ -0,0 +1,188 @@
+#! /usr/bin/python3
+
+import os
+import sys
+import generator_utils as gen
+
+template = """// @{generatedby}@
+/* ///////////////////////// The MPI Bugs Initiative ////////////////////////
+
+  Origin: @{origin}@
+
+  Description: @{shortdesc}@
+    @{longdesc}@
+
+BEGIN_MPI_FEATURES
+  P2P!basic: @{p2pfeature}@
+  P2P!nonblocking: @{ip2pfeature}@
+  P2P!persistent: Lacking
+  COLL!basic: Lacking
+  COLL!nonblocking: Lacking
+  COLL!persistent: Lacking
+  COLL!tools: Lacking
+  RMA: Lacking
+END_MPI_FEATURES
+
+BEGIN_MBI_TESTS
+  $ mpirun -np 4 $zero_buffer ${EXE}
+  | @{outcome1}@
+  | @{errormsg1}@
+  $ mpirun -np 4 $infty_buffer ${EXE}
+  | @{outcome1}@
+  | @{errormsg1}@
+END_MBI_TESTS
+//////////////////////       End of MBI headers        /////////////////// */
+
+#include <mpi.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+
+int main(int argc, char **argv) {
+  int nprocs = -1;
+  int rank = -1;
+  int dest, src;
+  int stag = 0, rtag = 0;
+  int buff_size = 1;
+
+  MPI_Init(&argc, &argv);
+  MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  printf("Hello from rank %d \\n", rank);
+
+  if (nprocs < 4)
+    printf("MBI ERROR: This test needs at least 4 processes to produce a bug!\\n");
+
+  MPI_Comm newcom = MPI_COMM_WORLD;
+  MPI_Datatype type = MPI_INT;
+
+  @{init1}@
+  @{init2}@
+  if (rank == 0) {
+    src=@{src1}@,dest=@{dest1}@;
+    @{operation1a}@ /* MBIERROR1 */
+    @{fini1a}@
+    @{operation2a}@
+    @{fini2a}@
+  }else if (rank == 1) {
+    src=@{src2}@,dest=@{dest2}@;
+    @{operation1b}@ /* MBIERROR2 */
+    @{fini1b}@
+    @{operation2b}@
+    @{fini2b}@
+  }else{
+    src=@{src3}@,dest=@{dest3}@;
+    @{operation1c}@
+    @{fini1c}@
+    @{operation2c}@
+    @{fini2c}@
+  }
+  @{free1}@
+  @{free2}@
+
+  MPI_Finalize();
+  printf("Rank %d finished normally\\n", rank);
+  return 0;
+}
+"""
+
+for s in gen.send + gen.isend:
+    for r in gen.recv + gen.irecv:
+        patterns = {}
+        patterns = {'s': s, 'r': r}
+        patterns['origin'] = 'MBI'
+        patterns['generatedby'] = f'DO NOT EDIT: this file was generated by {os.path.basename(sys.argv[0])}. DO NOT EDIT.'
+        patterns['p2pfeature'] = 'Yes' if s in gen.send or r in gen.recv  else 'Lacking'
+        patterns['ip2pfeature'] = 'Yes' if s in gen.isend or r in gen.irecv  else 'Lacking'
+        patterns['s'] = s
+        patterns['r'] = r
+        patterns['src1'] = '1'
+        patterns['dest1'] = '1'
+        patterns['src2'] = '0'
+        patterns['dest2'] = '0'
+        patterns['src3'] = '0'
+        patterns['dest3'] = '0'
+        patterns['init1'] = gen.init[s]("1")
+        patterns['init2'] = gen.init[r]("2")
+        patterns['fini1a'] = gen.fini[s]("1")
+        patterns['fini1b'] = gen.fini[s]("1")
+        patterns['fini1c'] = ''
+        patterns['fini2a'] = gen.fini[r]("2")
+        patterns['fini2b'] = gen.fini[r]("2")
+        patterns['fini2c'] = ''
+        patterns['free1'] = gen.free[s]("1")
+        patterns['free2'] = gen.free[r]("2")
+        patterns['operation1a'] = gen.operation[s]("1")
+        patterns['operation2a'] = gen.operation[r]("2")
+        patterns['operation1b'] = gen.operation[s]("1")
+        patterns['operation2b'] = gen.operation[r]("2")
+        patterns['operation1c'] = ''
+        patterns['operation2c'] = ''
+
+        # Generate the incorrect matching depending on the buffering mode (send + recv)
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Point to point @{s}@ and @{r}@ may not be matched'
+        replace['longdesc'] = 'Processes 0 and 1 both call @{s}@ and @{r}@. This results in a deadlock depending on the buffering mode'
+        replace['outcome1'] = 'ERROR: BufferingHazard'
+        replace['errormsg1'] = f'Buffering Hazard. Possible deadlock depending the buffer size of MPI implementation and system environment cause by two processes call {s} before {r}.'
+        gen.make_file(template, f'P2PBuffering_{s}_{r}_{s}_{r}_nok.c', replace)
+
+        # Generate the incorrect matching with send message to the same process depending on the buffering mode (send + recv)
+        replace = patterns.copy().copy()
+        replace['origin'] = 'RTED'
+        replace['src1'] = '0'
+        replace['dest1'] = '0'
+        replace['src2'] = '1'
+        replace['dest2'] = '1'
+        replace['shortdesc'] = 'Point to point @{s}@ and @{r}@ may not be matched'
+        replace['longdesc'] = 'Processes 0 and 1 both call @{s}@ and @{r}@. This results in a deadlock depending on the buffering mode'
+        replace['outcome1'] = 'ERROR: BufferingHazard'
+        replace['errormsg1'] = f'Buffering Hazard. Possible deadlock depending the buffer size of MPI implementation and system environment cause Send message to the same process.'
+        gen.make_file(template, f'P2PBuffering_SameProcess_{s}_{r}_nok.c', replace)
+
+        # Generate the incorrect matching with circular send message depending on the buffering mode (send + recv)
+        replace = patterns.copy().copy()
+        replace['origin'] = 'RTED'
+        replace['src1'] = '(nprocs - 1)'
+        replace['dest1'] = '1'
+        replace['src2'] = '0'
+        replace['dest2'] = '2'
+        replace['src3'] = '(rank - 1)'
+        replace['dest3'] = '((rank + 1) % nprocs)'
+        replace['fini1c'] = gen.fini[s]("1")
+        replace['fini2c'] = gen.fini[r]("2")
+        replace['operation1c'] = gen.operation[s]("1") + ' /* MBIERROR3 */'
+        replace['operation2c'] = gen.operation[r]("2")
+        replace['shortdesc'] = 'Point to point @{s}@ and @{r}@ may not be matched'
+        replace['longdesc'] = 'Processes 0 and 1 both call @{s}@ and @{r}@. This results in a deadlock depending on the buffering mode'
+        replace['outcome1'] = 'ERROR: BufferingHazard'
+        replace['errormsg1'] = f'Buffering Hazard. Possible deadlock depending the buffer size of MPI implementation and system environment cause circular send message.'
+        gen.make_file(template, f'P2PBuffering_Circular_{s}_{r}_nok.c', replace)
+
+        # Generate the incorrect matching depending on the buffering mode (recv + send)
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Point to point @{s}@ and @{r}@ are not matched'
+        replace['longdesc'] = 'Processes 0 and 1 both call @{r}@ and @{s}@. This results in a deadlock'
+        replace['outcome1'] = 'ERROR: CallMatching'
+        replace['errormsg1'] = 'ERROR: CallMatching'
+        replace['operation1a'] = gen.operation[r]("2")
+        replace['fini1a'] = gen.fini[r]("2")
+        replace['operation2a'] = gen.operation[s]("1")
+        replace['fini2a'] = gen.fini[s]("1")
+        replace['operation1b'] = gen.operation[r]("2")
+        replace['fini1b'] = gen.fini[r]("2")
+        replace['operation2b'] = gen.operation[s]("1")
+        replace['fini2b'] = gen.fini[s]("1")
+        gen.make_file(template, f'P2PCallMatching_{r}_{s}_{r}_{s}_nok.c', replace)
+
+        # Generate the correct matching
+        replace = patterns.copy()
+        replace['shortdesc'] = 'Point to point @{s}@ and @{r}@ are correctly  matched'
+        replace['longdesc'] = 'Process 0 calls @{s}@ and process 1 calls @{r}@.'
+        replace['outcome1'] = 'OK'
+        replace['errormsg1'] = 'OK'
+        replace['fini1a'] = gen.fini[s]("1")
+        replace['fini2a'] = gen.fini[r]("2")
+        replace['operation1a'] = gen.operation[s]("1")
+        replace['operation2a'] = gen.operation[r]("2")
+        gen.make_file(template, f'P2PCallMatching_{s}_{r}_{r}_{s}_ok.c', replace)