diff --git a/real_world_data.csv.tar.gz b/real_world_data.csv.tar.gz
new file mode 100644
index 0000000000000000000000000000000000000000..e3af7e27b018e804caad3786ed785e22044a166f
Binary files /dev/null and b/real_world_data.csv.tar.gz differ
diff --git a/scripts/Infrastructure/CorrectParameter.py b/scripts/Infrastructure/CorrectParameter.py
index 461202cf4f8379e847720a8daac713fd7f57e32d..b42d9cb8fad3be30527a5765b6ff120c8722209f 100644
--- a/scripts/Infrastructure/CorrectParameter.py
+++ b/scripts/Infrastructure/CorrectParameter.py
@@ -1,6 +1,7 @@
 #! /usr/bin/python3
 from scripts.Infrastructure import MPICall
 from scripts.Infrastructure.Instruction import Instruction
+# from scripts.Infrastructure.MPICallFactory import MPICallFactory
 from scripts.Infrastructure.AllocCall import AllocCall, get_free
 
 
@@ -27,13 +28,13 @@ class CorrectParameterFactory:
             return self.buf_var_name
         if param in ["COUNT", "count", "sendcount", "recvcount", "origin_count", "target_count", "result_count"]:
             return str(self.buf_size)
+        if param in ["sendcounts", "recvcounts"]:
+            return str(self.buf_size)
         if param in ["DATATYPE", "datatype", "sendtype", "recvtype", "origin_datatype", "target_datatype",
                      "result_datatype"]:
             return self.dtype[1]
-        if param in ["DEST", "dest", "rank"]:
+        if param in ["DEST", "dest", "rank", "target_rank"]:
             return "0"
-        if param in ["target_rank"]:
-            return "1"
         if param in ["SRC", "source"]:
             return "1"
         if param in ["RANK", "root"]:
@@ -59,7 +60,7 @@ class CorrectParameterFactory:
         if param in ["REQUEST", "request"]:
             return "&mpi_request_0"
         if param in ["GROUP", "group"]:
-            return "mpi_group_0"
+            return "&mpi_group_0"
         if param in ["color"]:
             return "1"
         if param in ["message"]:
@@ -96,12 +97,28 @@ class CorrectParameterFactory:
             return self.buf_size_bytes
         if param in ["disp_unit"]:
             return "sizeof(int)"
+        if param in ["displs", "sdispls", "rdispls"]:
+            return "(int*)malloc(sizeof(int)*10)"
         if param in ["info"]:
             return "MPI_INFO_NULL"
         if param in ["result_addr"]:
             return "resultbuf"
         if param in ["compare_addr"]:
             return "comparebuf"
+        if param in ["comm_cart"]:
+            return "&mpi_comm_0"
+        if param in ["comm_old"]:
+            return "MPI_COMM_WORLD"
+        if param in ["ndims", "maxdims"]:
+            return "2"
+        if param in ["dims"]:
+            return "dims"
+        if param in ["coords"]:
+            return "coords"
+        if param in ["periods"]:
+            return "periods"
+        if param in ["reorder"]:
+            return "0"
         print("Not Implemented: " + param)
         assert False, "Param not known"
 
@@ -114,6 +131,26 @@ class CorrectParameterFactory:
             return "MPI_COMM_NULL"
         if variable_type == "MPI_Message":
             return "MPI_MESSAGE_NULL"
+        if variable_type == "MPI_Group":
+            return "MPI_GROUP_NULL"
+        if variable_type == "MPI_Datatype":
+            return "MPI_DATATYPE_NULL"
         # TODO implement other types
         print("Not Implemented: " + variable_type)
         assert False, "Param not known"
+
+
+# todo also for send and non default args
+def get_matching_recv(call: MPICall) -> MPICall:
+    correct_params = CorrectParameterFactory()
+    recv = MPICallFactory().mpi_recv(
+        correct_params.get("BUFFER"),
+        correct_params.get("COUNT"),
+        correct_params.get("DATATYPE"),
+        correct_params.get("SRC"),
+        correct_params.get("TAG"),
+        correct_params.get("COMM"),
+        correct_params.get("STATUS", "MPI_Recv"),
+    )
+
+    return recv
diff --git a/scripts/Infrastructure/ErrorGenerator.py b/scripts/Infrastructure/ErrorGenerator.py
index 2b4824245f6c23796185604b09e153a6d33a771e..a1fb947b9c1bb9bfa1d8921e6d9e37428e59ab7e 100644
--- a/scripts/Infrastructure/ErrorGenerator.py
+++ b/scripts/Infrastructure/ErrorGenerator.py
@@ -3,6 +3,8 @@
 # ABC in python is abstract Base Class
 from abc import ABC, abstractmethod
 
+import pandas as pd
+
 
 class ErrorGenerator(ABC):
     """
@@ -33,13 +35,14 @@ class ErrorGenerator(ABC):
         pass
 
     @abstractmethod
-    def generate(self, generation_level: int):
+    def generate(self, generation_level: int, real_world_score_table: pd.DataFrame):
         """
         Abstract method to generate error cases. Implemented as a python generator.
         Meaning that the cases should be yielded instead of returned
 
         Parameters:
-            - generate_full_set: if the generator should generate the extended full set of cases.
+            - generation_level: if the generator should generate the extended full set of cases.
+            - real_world_score_table: used to decide which cases to generate
 
         yield:
             TemplateManager: An instantiated and set up TemplateManager with the error case.
diff --git a/scripts/Infrastructure/GeneratorManager.py b/scripts/Infrastructure/GeneratorManager.py
index 35198acfc5c5e3988b22709876973256e8e6f21c..45830ec74dfd19329e99308536e047b8099969d6 100644
--- a/scripts/Infrastructure/GeneratorManager.py
+++ b/scripts/Infrastructure/GeneratorManager.py
@@ -14,7 +14,7 @@ import tqdm
 
 from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
 from scripts.Infrastructure.Template import TemplateManager
-from scripts.Infrastructure.Variables import featurelist
+from scripts.Infrastructure.Variables import *
 
 # number of digits to use numbering filenames
 digits_to_use = 3
@@ -33,7 +33,7 @@ def import_module(root, file):
 
 
 class GeneratorManager:
-    def __init__(self, path, print_discovery=True, skip_invalid=False):
+    def __init__(self, path, score_table=None,print_discovery=True, skip_invalid=False):
         """
         Instantiates an GeneratorManager and discovers ErrorGenerator classes from Python files in the specified path.
 
@@ -52,6 +52,7 @@ class GeneratorManager:
             - Discovers the generators in Python files with the '.py' extension in the specified path and its subdirectories.
         """
         self.generators = []
+        self.score_table = score_table
         self.discover_generators(path, print_discovery, skip_invalid)
         self.case_names = {}
         # discover all Error Generators
@@ -70,7 +71,7 @@ class GeneratorManager:
         return case_name + "-" + str(num).zfill(digits_to_use) + suffix
 
     def generate(self, outpath: str | Path | os.PathLike[str], filterlist_: typing.Sequence[str] = None,
-                 print_progress_bar: bool = True, overwrite: bool = True, generation_level: int = 1,
+                 print_progress_bar: bool = True, overwrite: bool = True, generation_level: int = BASIC_TEST_LEVEL,
                  try_compile: bool = False, max_mpi_version: str = "4.0", use_clang_format: bool = True):
         """
         Generates test cases based on the specified parameters.
@@ -120,7 +121,7 @@ class GeneratorManager:
             category_path = os.path.join(outpath, generator.get_feature()[0])
             os.makedirs(category_path, exist_ok=True)
 
-            for result_error in generator.generate(generation_level):
+            for result_error in generator.generate(generation_level, self.score_table):
                 assert isinstance(result_error, TemplateManager)
 
                 if not float(result_error.get_version()) > float(max_mpi_version):
diff --git a/scripts/Infrastructure/Instruction.py b/scripts/Infrastructure/Instruction.py
index 39de122549749e810d1f33b5d6971d15956c6b23..866627f301c42ce3a9ce0f5d3b6ffee8f4b88113 100644
--- a/scripts/Infrastructure/Instruction.py
+++ b/scripts/Infrastructure/Instruction.py
@@ -21,6 +21,9 @@ class Instruction(object):
     def set_has_error(self, has_error: bool = True):
         self._has_error = has_error
 
+    def has_error(self):
+        return self._has_error
+
     def get_identifier(self) -> str:
         return self._identifier
 
diff --git a/scripts/Infrastructure/MPICall.py b/scripts/Infrastructure/MPICall.py
index 1ad16ec32b8c92b13d4f71562e3f7f0c994de522..30c8c9e8ad01e696ee0ec847923b43f43083cb94 100644
--- a/scripts/Infrastructure/MPICall.py
+++ b/scripts/Infrastructure/MPICall.py
@@ -32,7 +32,9 @@ class MPICall(Instruction):
         return s
 
     def set_arg(self, arg: str, value: str):
-        assert self.has_arg(arg)
+        if not self.has_arg(arg):
+            print("Unknown arg " + arg + " in " + self._function)
+            assert self.has_arg(arg)
         self._args[arg] = value
 
     def get_arg(self, arg: str) -> str:
diff --git a/scripts/Infrastructure/MPICallFactory.py b/scripts/Infrastructure/MPICallFactory.py
index a243cb2be718f75283b2d09c75e7e018fb599863..b89d4ec7edd0e24fde44acd8bc885fff3905575d 100644
--- a/scripts/Infrastructure/MPICallFactory.py
+++ b/scripts/Infrastructure/MPICallFactory.py
@@ -149,7 +149,7 @@ class MPICallFactory:
 
     @staticmethod
     def mpi_cart_get(*args):
-        return MPICall("MPI_Cart_get", OrderedDict([("comm", args[0]), ("maxdims", args[1]), ("dims", args[2]), ("periods", args[3]), ("coords", args[4]), ]), "1.0")
+        return MPICall("MPI_Cart_get", OrderedDict([("comm_cart", args[0]), ("maxdims", args[1]), ("dims", args[2]), ("periods", args[3]), ("coords", args[4]), ]), "1.0")
 
     @staticmethod
     def mpi_cart_map(*args):
@@ -2255,7 +2255,7 @@ class CorrectMPICallFactory:
     @staticmethod
     def mpi_cart_get():
         correct_params = CorrectParameterFactory()
-        return MPICallFactory().mpi_cart_get(correct_params.get("comm"),correct_params.get("maxdims"),correct_params.get("dims"),correct_params.get("periods"),correct_params.get("coords"))
+        return MPICallFactory().mpi_cart_get(correct_params.get("comm_cart"),correct_params.get("maxdims"),correct_params.get("dims"),correct_params.get("periods"),correct_params.get("coords"))
 
     @staticmethod
     def mpi_cart_map():
diff --git a/scripts/Infrastructure/ScoingModule/ScoringTable.py b/scripts/Infrastructure/ScoingModule/ScoringTable.py
new file mode 100644
index 0000000000000000000000000000000000000000..d6eb3ec3f2aab1f1fe3f0f58da5eaf0e8c550ae0
--- /dev/null
+++ b/scripts/Infrastructure/ScoingModule/ScoringTable.py
@@ -0,0 +1,146 @@
+import pandas as pd
+import numpy as np
+
+from tqdm.auto import tqdm
+
+tqdm.pandas()
+
+from scripts.Infrastructure.MPIAPIInfo.MPIAPICategories import mpi_all_mpi
+
+# Settings
+categories_to_score = ["RANK", "TAG", "POLYXFER_NUM_ELEM_NNI", "DATATYPE", "COMMUNICATOR", "OPERATION"]
+
+columns_to_lowercase = ["call", "DATATYPE", "COMMUNICATOR"]
+
+# maximum total score
+maximum_score = 100
+# maximum score per call
+maximum_call_score = 1
+
+# calls to completely ignore while scoring
+# ALL Programs must use MPI init, using it for scoring is therefore not that useful
+calls_to_exclude = ["MPI_Comm_rank", "MPI_Comm_size", "MPI_Init", "MPI_Finalize", "MPI_Init_thread"]
+
+# end settings
+max_call_weight = float(maximum_score) / float(maximum_call_score)
+
+
+# replace value with other to use as wildcard
+def clean_data_to_score(row, categories_to_score):
+    for c in categories_to_score:
+        cc = c + "_CATEGORY"
+        if not pd.isna(row[c]):
+            if row[cc] not in ["MPI_constant", "literal_constant", "handle"]:
+                row[c] = "other"
+            if row[c] == "inconclusive":
+                row[c] = "other"
+            if c == "TAG" and row[c] != "MPI_ANY_TAG":
+                # discard all tag values besides the one with special meanings
+                row[c] = "other"
+    return row
+
+
+def get_scoring_table(df, c_only=False):
+    df = df[~df["call"].isin(calls_to_exclude)]
+    if c_only:
+        # remove fortran
+        df = df[~df['src_location'].str.contains(".f", regex=False)]
+        df = df[~df['src_location'].str.contains(".F", regex=False)]
+    columns = ["call"] + categories_to_score + ["score"]
+    num_categories = len(categories_to_score)
+    types = [pd.StringDtype] + [pd.StringDtype for _ in range(num_categories)] + [np.float64]
+
+    scoring_table = pd.DataFrame(columns=columns)
+
+    call_weights = {}
+    # other ist ein wildcard, der alles matcht
+
+    # there may also be openmp, cuda etc. in the data
+    df_only_mpi = df[df["call"].isin(mpi_all_mpi)].apply(clean_data_to_score, args=(categories_to_score,), axis=1)
+    total_calls = float(len(df_only_mpi))
+
+    for call, count in df_only_mpi["call"].value_counts().items():
+        call_weights[call] = (float(count) / total_calls) * max_call_weight
+
+    for call, this_call_weight in tqdm(call_weights.items()):
+        if num_categories == 0:
+            scoring_table.loc[len(scoring_table)] = [call, this_call_weight]
+        else:
+            this_call_only = df_only_mpi[df_only_mpi["call"] == call][categories_to_score]
+            unique_calls_to_this = (
+                # drops all NA but disregards the cols where all values are NA (e.g. the param is not used in this call
+                # but all other NA values (wrong reading of calls) are discarded
+                this_call_only.dropna(subset=this_call_only.columns[~this_call_only.isnull().all()], how='any')
+                .groupby(categories_to_score, as_index=False, dropna=False).size()
+            )
+            total_calls_to_this_mpi_func = unique_calls_to_this['size'].sum()
+
+            for _, row in unique_calls_to_this.iterrows():
+                score = this_call_weight * float(maximum_call_score) * float(row['size']) / float(
+                    total_calls_to_this_mpi_func)
+                scoring_table.loc[len(scoring_table)] = [call] + row[categories_to_score].tolist() + [score]
+    for c in columns_to_lowercase:
+        scoring_table[c] = scoring_table[c].str.lower()
+
+    scoring_table.rename({"POLYXFER_NUM_ELEM_NNI": "COUNT"}, inplace=True)
+
+    return scoring_table
+
+def is_combination_important(real_world_score_table, call,
+                             rank=None, tag=None, count=None, datatype=None, communicator=None, op=None):
+    # the scoreboard has other values == wildcard, we treat the given standard values as matching
+    standard_rank = 0
+    standard_tag = 0
+    standard_count = 1
+    standard_datatype = "MPI_INT".lower()
+    standard_communicator = "MPI_COMM_WORLD".lower()
+    standard_op = "MPI_SUM"
+
+    # Filter based on the 'call' column
+    relevant = real_world_score_table[real_world_score_table["call"] == call]
+
+    # Filter by 'rank'
+    if rank is not None:
+        if rank == standard_rank:
+            relevant = relevant[(relevant["RANK"] == rank) | (relevant["RANK"] == "other")]
+        else:
+            relevant = relevant[relevant["RANK"] == rank]
+
+    # Filter by 'tag'
+    if tag is not None:
+        if tag == standard_tag:
+            relevant = relevant[(relevant["TAG"] == tag) | (relevant["TAG"] == "other")]
+        else:
+            relevant = relevant[relevant["TAG"] == tag]
+
+    # Filter by 'count'
+    if count is not None:
+        if count == standard_count:
+            relevant = relevant[(relevant["COUNT"] == count) | (relevant["COUNT"] == "other")]
+        else:
+            relevant = relevant[relevant["COUNT"] == count]
+
+    # Filter by 'datatype'
+    if datatype is not None:
+        if datatype.lower() == standard_datatype:
+            relevant = relevant[
+                (relevant["DATATYPE"] == datatype) | (relevant["DATATYPE"] == "other")]
+        else:
+            relevant = relevant[relevant["DATATYPE"] == datatype]
+
+    # Filter by 'communicator'
+    if communicator is not None:
+        if communicator.lower() == standard_communicator:
+            relevant = relevant[
+                (relevant["COMMUNICATOR"] == communicator) | (relevant["COMMUNICATOR"] == "other")]
+        else:
+            relevant = relevant[relevant["COMMUNICATOR"] == communicator]
+
+    # Filter by 'op'
+    if op is not None:
+        if op == standard_op:
+            relevant = relevant[(relevant["OP"] == op) | (relevant["OP"] == "other")]
+        else:
+            relevant = relevant[relevant["OP"] == op]
+
+    return len(relevant) > 0
diff --git a/scripts/Infrastructure/Template.py b/scripts/Infrastructure/Template.py
index da756f52d8cf2956d312982b79038a8335b681c5..d836d084dc884bfdbeb4e56e9b036723ffb10864 100644
--- a/scripts/Infrastructure/Template.py
+++ b/scripts/Infrastructure/Template.py
@@ -7,13 +7,7 @@ from scripts.Infrastructure.Instruction import Instruction
 from scripts.Infrastructure.MPICall import MPICall
 from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory
 
-template = """// @{generatedby}@
-/* ///////////////////////// The MPI Bug Bench ////////////////////////
-
-  Description: @{desc}@
-
-  Version of MPI: @{version}@
-
+"""
 THIS BLOCK IS CURRENTLY NOT USED:
 BEGIN_MPI_FEATURES
   P2P!basic: @{p2pfeature}@
@@ -25,6 +19,13 @@ BEGIN_MPI_FEATURES
   COLL!tools: Yes
   RMA: Lacking
 END_MPI_FEATURES
+"""
+
+template = """/* ///////////////////////// The MPI Bug Bench ////////////////////////
+
+  Description: @{desc}@
+
+  Version of MPI: @{version}@
 
 BEGIN_MBB_TESTS
   $ mpirun -np @{min_num_ranks}@ ${EXE}
@@ -36,6 +37,8 @@ END_MBB_TESTS
 #include <mpi.h>
 #include <stdio.h>
 #include <stdlib.h>
+#include <stddef.h>
+#include <stdint.h>
 
 int main(int argc, char **argv) {
   int nprocs = -1;
@@ -121,6 +124,16 @@ class TemplateManager:
         code_string = ""
         current_rank = 'all'
 
+        has_error = False
+        for i in self._instructions:
+            if i.has_error():
+                has_error = True
+
+        short_short_descr = self._descr_short.split("-")[0]
+        outcome_str = "OK"
+        if has_error:
+            outcome_str = "ERROR " + short_short_descr
+
         instr_copy = []
         if self._allow_reorder:
             # "bucket-sort" the instructions to group those from different ranks together
@@ -140,6 +153,10 @@ class TemplateManager:
                         buckets = [[] for _ in range(self._min_ranks)]
                         used_buckets = False
                     instr_copy.append(instr)  # current inst
+
+            if used_buckets:
+                for bucket in buckets:
+                    instr_copy = instr_copy + bucket
         else:
             # no re-ordering
             instr_copy = self._instructions.copy()
@@ -180,10 +197,12 @@ class TemplateManager:
 
         return (template
                 .replace("@{min_num_ranks}@", str(self._min_ranks))
+                .replace("@{outcome}@", outcome_str)
                 .replace("@{stack_vars}@", stack_vars_str)
                 .replace("@{mpi_init}@", init_string)
                 .replace("@{mpi_finalize}@", finalize_string)
                 .replace("@{desc}@", self._descr_full)
+                .replace("@{errormsg}@", self._descr_short)
                 .replace("@{version}@", version)
                 .replace("@{test_code}@", code_string))
 
diff --git a/scripts/Infrastructure/TemplateFactory.py b/scripts/Infrastructure/TemplateFactory.py
index 8a5befca535ad3bb646d23344981ea776af70035..2d69d5bc8a06aef06fb0c5464f6789c9a72a2974 100644
--- a/scripts/Infrastructure/TemplateFactory.py
+++ b/scripts/Infrastructure/TemplateFactory.py
@@ -11,6 +11,231 @@ from scripts.Infrastructure.MPICall import MPICall
 from scripts.Infrastructure.MPICallFactory import CorrectMPICallFactory, MPICallFactory
 from scripts.Infrastructure.Template import TemplateManager
 
+list_of_not_implemented_types = [
+    # TODO add wrong usage of
+    'MPI_PACKED',
+    # min/maxloc reduction types
+]
+
+predefined_mpi_dtype_consants = {
+    'MPI_INT': 'signed int',
+    'MPI_DOUBLE': 'double',
+
+    'MPI_CHAR': 'char',
+    'MPI_SHORT': 'signed short int',
+    'MPI_LONG': 'signed long int',
+    'MPI_LONG_LONG_INT': 'signed long long int',
+    'MPI_LONG_LONG': 'signed long long int',
+    'MPI_SIGNED_CHAR': 'signed char',
+    'MPI_UNSIGNED_CHAR': 'unsigned char',
+    'MPI_UNSIGNED_SHORT': 'unsigned short int',
+    'MPI_UNSIGNED': 'unsigned int',
+    'MPI_UNSIGNED_LONG': 'unsigned long int',
+    'MPI_UNSIGNED_LONG_LONG': 'unsigned long long int',
+    'MPI_FLOAT': 'float',
+    'MPI_LONG_DOUBLE': 'long double',
+    'MPI_WCHAR': 'wchar_t',
+    'MPI_C_BOOL': '_Bool',
+    'MPI_INT8_T': 'int8_t',
+    'MPI_INT16_T': 'int16_t',
+    'MPI_INT32_T': 'int32_t',
+    'MPI_INT64_T': 'int64_t',
+    'MPI_UINT8_T': 'uint8_t',
+    'MPI_UINT16_T': 'uint16_t',
+    'MPI_UINT32_T': 'uint32_t',
+    'MPI_UINT64_T': 'uint64_t',
+    'MPI_C_COMPLEX': 'float _Complex',
+    'MPI_C_FLOAT_COMPLEX': 'float _Complex',
+    'MPI_C_DOUBLE_COMPLEX': 'double _Complex',
+    'MPI_C_LONG_DOUBLE_COMPLEX': 'long double _Complex',
+    'MPI_BYTE': 'char',
+    'MPI_PACKED': 'char',
+    # MPI 4
+    'MPI_AINT': 'MPI_Aint',
+    'MPI_COUNT': 'MPI_Count',
+    'MPI_OFFSET': 'MPI_Offset',
+    # predefined struct types
+    'MPI_2INT': "struct {int a; int b;}",
+    'MPI_FLOAT_INT': "struct {float a; int b;}",
+    'MPI_DOUBLE_INT': "struct {double a; int b;}",
+    'MPI_LONG_INT': "struct {long a; int b;}",
+    'MPI_SHORT_INT': "struct {short a; int b;}",
+    'MPI_LONG_DOUBLE_INT': "struct {long double a; int b;}"
+}
+predefined_types = list(predefined_mpi_dtype_consants.keys())
+
+# TODO can you use MPI_Type_create_darray for p2p communication? it is only meant for file IO
+
+user_defined_types = [
+    "mpi_type_contiguous",
+    "mpi_type_vector",
+    "mpi_type_create_hvector",
+    "mpi_type_indexed",
+    "mpi_type_create_hindexed",
+    "mpi_type_create_indexed_block",
+    "mpi_type_create_hindexed_block",
+    "mpi_type_create_struct",
+    "mpi_type_create_subarray",
+    "mpi_type_create_subarray"
+]
+
+
+def get_buffer_for_usertype(mpi_type_creator, tm, insert_before, size=100):
+    if mpi_type_creator == "mpi_type_contiguous":
+        result_type = tm.add_stack_variable("MPI_Datatype")
+        call = MPICallFactory.mpi_type_contiguous(2, "MPI_INT", "&" + result_type)
+        tm.insert_instruction(call, before_instruction=insert_before, before_first_of_list=True)
+        alloc = AllocCall("int", str(size) + " * 2", "buf_" + mpi_type_creator.lower())
+        tm.insert_instruction(alloc, before_instruction=insert_before, before_first_of_list=True)
+    elif mpi_type_creator == "mpi_type_vector":
+        result_type = tm.add_stack_variable("MPI_Datatype")
+        call = MPICallFactory.mpi_type_vector(2, 2, 3, "MPI_INT", "&" + result_type)
+        tm.insert_instruction(call, before_instruction=insert_before, before_first_of_list=True)
+        alloc = AllocCall("int", str(size) + " * 2*3", "buf_" + mpi_type_creator.lower())
+        tm.insert_instruction(alloc, before_instruction=insert_before, before_first_of_list=True)
+    elif mpi_type_creator == "mpi_type_create_hvector":
+        result_type = tm.add_stack_variable("MPI_Datatype")
+        call = MPICallFactory.mpi_type_create_hvector(2, 2, str(4) + "* sizeof(int)", "MPI_INT", "&" + result_type)
+        tm.insert_instruction(call, before_instruction=insert_before, before_first_of_list=True)
+        alloc = AllocCall("int", str(size) + " * 2*4", "buf_" + mpi_type_creator.lower())
+        tm.insert_instruction(alloc, before_instruction=insert_before, before_first_of_list=True)
+    elif mpi_type_creator == "mpi_type_indexed":
+        result_type = tm.add_stack_variable("MPI_Datatype")
+        tm.insert_instruction(Instruction("int block_length_indexed[] = {3,2};"),
+                              before_instruction=insert_before, before_first_of_list=True)
+        tm.insert_instruction(Instruction("int displacements_indexed[] = {0,1};"),
+                              before_instruction=insert_before, before_first_of_list=True)
+        call = MPICallFactory.mpi_type_indexed(2, "block_length_indexed", "displacements_indexed", "MPI_INT",
+                                               "&" + result_type)
+        tm.insert_instruction(call, before_instruction=insert_before, before_first_of_list=True)
+        alloc = AllocCall("int", str(size) + " * 6", "buf_" + mpi_type_creator.lower())
+        tm.insert_instruction(alloc, before_instruction=insert_before, before_first_of_list=True)
+    elif mpi_type_creator == "mpi_type_create_hindexed":
+        result_type = tm.add_stack_variable("MPI_Datatype")
+        tm.insert_instruction(Instruction("int block_length_hindexed[] = {3,2};"),
+                              before_instruction=insert_before, before_first_of_list=True)
+        tm.insert_instruction(Instruction("MPI_Aint displacements_hindexed[] = {0,2*sizeof(int)};"),
+                              before_instruction=insert_before, before_first_of_list=True)
+        call = MPICallFactory.mpi_type_create_hindexed(2, "block_length_hindexed", "displacements_hindexed", "MPI_INT",
+                                                       "&" + result_type)
+        tm.insert_instruction(call, before_instruction=insert_before, before_first_of_list=True)
+        alloc = AllocCall("int", str(size) + " * 7", "buf_" + mpi_type_creator.lower())
+        tm.insert_instruction(alloc, before_instruction=insert_before, before_first_of_list=True)
+    elif mpi_type_creator == "mpi_type_create_indexed_block":
+        result_type = tm.add_stack_variable("MPI_Datatype")
+        tm.insert_instruction(Instruction("int displacements_indexed_block[] = {0,1};"),
+                              before_instruction=insert_before, before_first_of_list=True)
+        call = MPICallFactory.mpi_type_create_indexed_block(2, "3", "displacements_indexed_block", "MPI_INT",
+                                                            "&" + result_type)
+        tm.insert_instruction(call, before_instruction=insert_before, before_first_of_list=True)
+        alloc = AllocCall("int", str(size) + " * 7", "buf_" + mpi_type_creator.lower())
+        tm.insert_instruction(alloc, before_instruction=insert_before, before_first_of_list=True)
+    elif mpi_type_creator == "mpi_type_create_hindexed_block":
+        result_type = tm.add_stack_variable("MPI_Datatype")
+        tm.insert_instruction(Instruction("MPI_Aint displacements_hindexed_block[] = {0,6*sizeof(int)};"),
+                              before_instruction=insert_before, before_first_of_list=True)
+        call = MPICallFactory.mpi_type_create_hindexed_block(2, "3", "displacements_hindexed_block", "MPI_INT",
+                                                             "&" + result_type)
+        tm.insert_instruction(call, before_instruction=insert_before, before_first_of_list=True)
+        alloc = AllocCall("int", str(size) + " * 12", "buf_" + mpi_type_creator.lower())
+        tm.insert_instruction(alloc, before_instruction=insert_before, before_first_of_list=True)
+    elif mpi_type_creator == "mpi_type_create_struct":
+        result_type = tm.add_stack_variable("MPI_Datatype")
+        tm.insert_instruction(Instruction("int block_length_struct[] = {2,4};"),
+                              before_instruction=insert_before, before_first_of_list=True)
+        tm.insert_instruction(Instruction("MPI_Aint displacements_struct[] = {0,1*sizeof(int)};"),
+                              before_instruction=insert_before, before_first_of_list=True)
+        tm.insert_instruction(Instruction("MPI_Datatype dtypes_struct[] = {MPI_INT,MPI_INT};"),
+                              before_instruction=insert_before, before_first_of_list=True)
+        call = MPICallFactory.mpi_type_create_struct(2, "block_length_struct", "displacements_struct", "dtypes_struct",
+                                                     "&" + result_type)
+        tm.insert_instruction(call, before_instruction=insert_before, before_first_of_list=True)
+        alloc = AllocCall("int", str(size) + " * 7", "buf_" + mpi_type_creator.lower())
+        tm.insert_instruction(alloc, before_instruction=insert_before, before_first_of_list=True)
+    elif mpi_type_creator == "mpi_type_create_subarray":
+        result_type = tm.add_stack_variable("MPI_Datatype")
+        tm.insert_instruction(Instruction("int sizes_subarray[] = {3,3};"),
+                              before_instruction=insert_before, before_first_of_list=True)
+        tm.insert_instruction(Instruction("int sub_sizes_subarray[] = {3,1};"),
+                              before_instruction=insert_before, before_first_of_list=True)
+        tm.insert_instruction(Instruction("int starts_subarray[] = {0,1};"),
+                              before_instruction=insert_before, before_first_of_list=True)
+
+        call = MPICallFactory.mpi_type_create_subarray(2, "sizes_subarray", "sub_sizes_subarray", "starts_subarray",
+                                                       "MPI_ORDER_C", "MPI_INT",
+                                                       "&" + result_type)
+        tm.insert_instruction(call, before_instruction=insert_before, before_first_of_list=True)
+        alloc = AllocCall("int", str(size) + " * 3*3", "buf_" + mpi_type_creator.lower())
+        tm.insert_instruction(alloc, before_instruction=insert_before, before_first_of_list=True)
+    else:
+        # print(mpi_type_creator)
+        assert False and "Not implemented"
+
+    tm.insert_instruction(
+        MPICallFactory.mpi_type_commit("&" + result_type), before_instruction=insert_before, before_first_of_list=True)
+
+    tm.register_instruction(MPICallFactory.mpi_type_free("&" + result_type))
+
+    return alloc.get_name(), result_type
+
+
+def get_buffer_for_type(mpi_type, size=100):
+    ctype = predefined_mpi_dtype_consants[mpi_type]
+    return AllocCall(ctype, size, "buf_" + mpi_type.lower())
+
+
+def get_bytes_size_for_type(mpi_type):
+    if mpi_type in predefined_mpi_dtype_consants:
+        return "sizeof(" + predefined_mpi_dtype_consants[mpi_type] + ")"
+
+    if mpi_type == "mpi_type_contiguous":
+        return "(2* sizeof(int))"
+    if mpi_type == "mpi_type_vector":
+        return "(2*3* sizeof(int))"
+    if mpi_type == "mpi_type_create_hvector":
+        return "(2*4* sizeof(int))"
+    if mpi_type == "mpi_type_indexed":
+        return "(6* sizeof(int))"
+    if mpi_type == "mpi_type_create_hindexed":
+        return "(7* sizeof(int))"
+    if mpi_type == "mpi_type_create_indexed_block":
+        return "(7* sizeof(int))"
+    if mpi_type == "mpi_type_create_hindexed_block":
+        return "(12* sizeof(int))"
+    if mpi_type == "mpi_type_create_struct":
+        return "(7* sizeof(int))"
+    if mpi_type == "mpi_type_create_subarray":
+        return "(3*3* sizeof(int))"
+
+    print(mpi_type)
+    assert False and "Not implemented"
+
+
+def get_type_buffers(tm, type_1, type_2, size_1, size_2):
+    if type_1 in predefined_types:
+        type_1_variable = type_1
+        buf_alloc_call = get_buffer_for_type(type_1, size_1)
+        tm.insert_instruction(buf_alloc_call, before_instruction="MPICALL",
+                              before_first_of_list=True)
+        buf_name_1 = buf_alloc_call.get_name()
+    else:
+        assert type_1 in user_defined_types
+        buf_name_1, type_1_variable = get_buffer_for_usertype(type_1, tm, "MPICALL", size_1)
+    tm.register_instruction("free(" + buf_name_1 + ");")
+    if type_2 in predefined_types:
+        type_2_variable = type_2
+        buf_alloc_call = get_buffer_for_type(type_2, size_2)
+        tm.insert_instruction(buf_alloc_call, before_instruction="MPICALL",
+                              before_first_of_list=True)
+        buf_name_2 = buf_alloc_call.get_name()
+    else:
+        assert type_2 in user_defined_types
+        buf_name_2, type_2_variable = get_buffer_for_usertype(type_2, tm, "MPICALL", size_2)
+    tm.register_instruction("free(" + buf_name_2 + ");")
+    return type_1_variable, buf_name_1, type_2_variable, buf_name_2
+
+
+
 
 def get_default_template(mpi_func):
     """
@@ -113,11 +338,12 @@ def get_send_recv_template(send_func: str = "mpi_isend", recv_func: str | typing
     if recv_func in probe_pairs:
         if recv_func in [["mpi_improbe", "mpi_mrecv"],
                          ["mpi_improbe", "mpi_imrecv"]]:
-            tm.insert_instruction(Instruction("while (!" + flag_name + "){", rank=0), before_instruction=r)
+            tm.insert_instruction(Instruction("while (!" + flag_name + "){", rank=0, identifier="PROBELOOP"),
+                                  before_instruction=r)
             # insertion before the improbe call
-            tm.register_instruction("}", rank_to_execute=0)  # end while
+            tm.register_instruction("}", rank_to_execute=0, identifier="PROBELOOP")  # end while
         # the matched recv
-        tm.register_instruction(CorrectMPICallFactory.get(recv_func[1]), rank_to_execute=0)
+        tm.register_instruction(CorrectMPICallFactory.get(recv_func[1]), rank_to_execute=0, identifier="MATCHEDRECEIVE")
 
     if send_func in persistent_send_funcs:
         tm.register_instruction(CorrectMPICallFactory.mpi_start(), rank_to_execute=1, identifier="START")
@@ -223,8 +449,6 @@ def get_invalid_param_p2p_case(param, value, check_receive, send_func, recv_func
             call.set_arg(param, value)
 
     return tm
-
-
 def get_invalid_param_rma_case(param, rmaop_func, value):
     tm = get_rma_template(rmaop_func=rmaop_func)
 
@@ -235,45 +459,209 @@ def get_invalid_param_rma_case(param, rmaop_func, value):
 
     return tm
 
-def get_collective_template(collective_func, seperate=True):
+def insert_probe(tm, probe_to_use, recv_call):
+    probe_call = CorrectMPICallFactory.get(probe_to_use)
+    probe_call.set_rank_executing(recv_call.get_rank_executing())
+
+    if probe_to_use == "mpi_iprobe":
+        flag_name = tm.add_stack_variable("int")  # the flag
+        tm.insert_instruction(Instruction(flag_name + "=0;", rank=recv_call.get_rank_executing()),
+                              before_instruction=recv_call)
+        tm.insert_instruction(Instruction("while (!" + flag_name + "){", rank=recv_call.get_rank_executing()),
+                              before_instruction=recv_call)
+        probe_call.set_arg("flag", "&"+flag_name)
+        tm.insert_instruction(probe_call, before_instruction=recv_call)
+        tm.insert_instruction(Instruction("}", rank=recv_call.get_rank_executing()),
+                              before_instruction=recv_call)  # end while
+    else:
+        tm.insert_instruction(probe_call, before_instruction=recv_call)
+
+    return probe_call
+
+
+def replace_wait(wait_call, tm, wait_func_to_use):
+    assert wait_func_to_use in ["mpi_wait", "mpi_test", "mpi_waitall", "mpi_testall", "mpi_waitany", "mpi_testany",
+                                "mpi_waitsome", "mpi_testsome"]
+    if wait_func_to_use == "mpi_wait":
+        return  # nothing to do
+    if wait_func_to_use == "mpi_test":
+        flag_name = tm.add_stack_variable("int")
+        test_call = MPICallFactory.mpi_test(wait_call.get_arg("request"), "&" + flag_name, wait_call.get_arg("status"))
+        test_call.set_rank_executing(wait_call.get_rank_executing())
+        test_call.set_identifier(wait_call.get_identifier())
+        tm.insert_instruction(Instruction("while (!" + flag_name + "){", rank=wait_call.get_rank_executing()),
+                              before_instruction=wait_call)
+        tm.insert_instruction(test_call, before_instruction=wait_call)  # insertion before the improbe call
+        tm.insert_instruction(Instruction("}", rank=wait_call.get_rank_executing()), before_instruction="FREE",
+                              )  # end while
+        tm.remove_instruction(wait_call)
+        return
+    status_to_use = wait_call.get_arg("status")
+    if status_to_use == "MPI_STATUS_IGNORE":
+        status_to_use = "MPI_STATUSES_IGNORE"
+    if wait_func_to_use == "mpi_waitall":
+        test_call = MPICallFactory.mpi_waitall("1", wait_call.get_arg("request"), status_to_use)
+        test_call.set_rank_executing(wait_call.get_rank_executing())
+        test_call.set_identifier(wait_call.get_identifier())
+        tm.insert_instruction(test_call, before_instruction=wait_call)  # insertion before the improbe call
+        tm.remove_instruction(wait_call)
+        return
+    if wait_func_to_use == "mpi_testall":
+        flag_name = tm.add_stack_variable("int")
+        test_call = MPICallFactory.mpi_testall("1", wait_call.get_arg("request"), "&" + flag_name,
+                                               status_to_use)
+        test_call.set_rank_executing(wait_call.get_rank_executing())
+        test_call.set_identifier(wait_call.get_identifier())
+        tm.insert_instruction(Instruction("while (!" + flag_name + "){", rank=wait_call.get_rank_executing()),
+                              before_instruction=wait_call)
+        tm.insert_instruction(test_call, before_instruction=wait_call)  # insertion before the improbe call
+        tm.insert_instruction(Instruction("}", rank=wait_call.get_rank_executing()),
+                              before_instruction=wait_call)  # end while
+        tm.remove_instruction(wait_call)
+        return
+    if wait_func_to_use == "mpi_waitany":
+        idx_name = tm.add_stack_variable("int")
+        test_call = MPICallFactory.mpi_waitany("1", wait_call.get_arg("request"), "&" + idx_name,
+                                               wait_call.get_arg("status"))
+        test_call.set_rank_executing(wait_call.get_rank_executing())
+        test_call.set_identifier(wait_call.get_identifier())
+        tm.insert_instruction(test_call, before_instruction=wait_call)  # insertion before the improbe call
+        tm.remove_instruction(wait_call)
+        return
+    if wait_func_to_use == "mpi_testany":
+        flag_name = tm.add_stack_variable("int")
+        idx_name = tm.add_stack_variable("int")
+        test_call = MPICallFactory.mpi_testany("1", wait_call.get_arg("request"), "&" + idx_name, "&" + flag_name,
+                                               wait_call.get_arg("status"))
+        test_call.set_rank_executing(wait_call.get_rank_executing())
+        test_call.set_identifier(wait_call.get_identifier())
+        tm.insert_instruction(Instruction("while (!" + flag_name + "){", rank=wait_call.get_rank_executing()),
+                              before_instruction=wait_call)
+        tm.insert_instruction(test_call, before_instruction=wait_call)  # insertion before the improbe call
+        tm.insert_instruction(Instruction("}", rank=wait_call.get_rank_executing()),
+                              before_instruction=wait_call)  # end while
+        tm.remove_instruction(wait_call)
+        return
+    if wait_func_to_use == "mpi_waitsome":
+        idx_name = tm.add_stack_variable("int")
+        idx_array = tm.add_stack_variable("int")
+        test_call = MPICallFactory.mpi_waitsome("1", wait_call.get_arg("request"), "&" + idx_name,
+                                                "&" + idx_array, status_to_use)
+        test_call.set_rank_executing(wait_call.get_rank_executing())
+        test_call.set_identifier(wait_call.get_identifier())
+        tm.insert_instruction(test_call, before_instruction=wait_call)  # insertion before the improbe call
+        tm.remove_instruction(wait_call)
+        return
+    if wait_func_to_use == "mpi_testsome":
+        flag_name = tm.add_stack_variable("int")
+        idx_array = tm.add_stack_variable("int")
+        test_call = MPICallFactory.mpi_testsome("1", wait_call.get_arg("request"), "&" + flag_name, "&" + idx_array,
+                                                status_to_use)
+        test_call.set_rank_executing(wait_call.get_rank_executing())
+        test_call.set_identifier(wait_call.get_identifier())
+        tm.insert_instruction(Instruction("while (!" + flag_name + "){", rank=wait_call.get_rank_executing()),
+                              before_instruction=wait_call)
+        tm.insert_instruction(test_call, before_instruction=wait_call)  # insertion before the improbe call
+        tm.insert_instruction(Instruction("}", rank=wait_call.get_rank_executing()),
+                              before_instruction=wait_call)  # end while
+        tm.remove_instruction(wait_call)
+        return
+    assert False and "Not implemented"
+
+
+def get_collective_template(collective_func):
     """
-    Contructs a default template for the given mpi collecive
+    Contructs a default template for the given mpi collective
     Returns:
         TemplateManager Initialized with a default template
         The function is contained in a block named MPICALL
         with seperate calls for rank 1 and 2 if seperate ==True
     """
+
     tm = TemplateManager()
     cf = CorrectParameterFactory()
 
-    alloc_block = InstructionBlock("alloc")
-    alloc_block.register_instruction(cf.get_buffer_alloc())
-    if False:
-        # spilt send and recv buf
-        alloc = cf.get_buffer_alloc()
-        alloc.set_name("recv_buf")
-        alloc_block.register_instruction(alloc)
-    tm.register_instruction_block(alloc_block)
-
     cmpicf = CorrectMPICallFactory()
     call_creator_function = getattr(cmpicf, collective_func)
     c = call_creator_function()
 
-    b = InstructionBlock("MPICALL")
-    if seperate:
-        b.register_instruction(c, 1)
-        b.register_instruction(c, 0)
-    else:
-        b.register_instruction(c, 'all')
+    if c.has_arg("buffer") or c.has_arg("sendbuf"): 
+        alloc = cf.get_buffer_alloc()
+        alloc.set_identifier("ALLOC")
+        alloc.set_name("buf")
+        tm.register_instruction(alloc)
+
+    if c.has_arg("comm_cart"):
+        tm.add_stack_variable("MPI_Comm")
+        # TODO: create proper instructions
+        tm.register_instruction(Instruction("int periods[2]={1,1};"), identifier="ALLOC")
+        tm.register_instruction(Instruction("int dims[2]={0,0};"), identifier="ALLOC") # this is an initialization, we use dim_create function
+        tm.register_instruction(Instruction("int coords[2]={0,0};"), identifier="ALLOC")
+        # Get the dims of the cartesian topology MPI_Dims_create(nprocs,2,dims);
+        dims_create = MPICallFactory.mpi_dims_create("nprocs", "2", "dims")
+        tm.register_instruction(dims_create)
+    
+    # add request for nonblocking collectives
+    if collective_func.startswith("mpi_i"): 
+        tm.add_stack_variable("MPI_Request")
+
 
-    tm.register_instruction_block(b)
+    coll = CorrectMPICallFactory.get(collective_func)
+    coll.set_identifier("MPICALL")
+    tm.register_instruction(coll)
 
-    free_block = InstructionBlock("buf_free")
-    free_block.register_instruction(cf.get_buffer_free())
-    tm.register_instruction_block(free_block)
+    # add wait function for nonblocking collectives
+    if collective_func.startswith("mpi_i"):
+        tm.register_instruction(CorrectMPICallFactory.mpi_wait(), rank_to_execute='all', identifier="WAIT")
+
+    if c.has_arg("buffer") or c.has_arg("sendbuf"):
+        tm.register_instruction(cf.get_buffer_free(), identifier="FREE")
 
     return tm
 
+def get_two_collective_template(collective_func1, collective_func2):
+    """
+    Contructs a default template for two given mpi collectives
+    Returns:
+        TemplateManager Initialized with a default template
+        The function is contained in a block named MPICALL
+    """
+
+    tm = TemplateManager()
+    cf = CorrectParameterFactory()
+
+    # todo: spilt send and recv buf
+    alloc = cf.get_buffer_alloc()
+    alloc.set_identifier("ALLOC")
+    alloc.set_name("buf")
+    tm.register_instruction(alloc)
+
+    cmpicf = CorrectMPICallFactory()
+    call_creator_function = getattr(cmpicf, collective_func1)
+    c = call_creator_function()
+
+    # add request for nonblocking collectives
+    if collective_func1.startswith("mpi_i") or collective_func2.startswith("mpi_i"): 
+        tm.add_stack_variable("MPI_Request")
+
+
+    coll1 = CorrectMPICallFactory.get(collective_func1)
+    coll1.set_identifier("MPICALL")
+    tm.register_instruction(coll1)
+    coll1.set_rank_executing(0)
+
+    coll2 = CorrectMPICallFactory.get(collective_func2)
+    coll2.set_identifier("MPICALL")
+    tm.register_instruction(coll2)
+    coll2.set_rank_executing('not0')
+
+    # add wait function for nonblocking collectives
+    if collective_func1.startswith("mpi_i") or collective_func2.startswith("mpi_i"):
+        tm.register_instruction(CorrectMPICallFactory.mpi_wait(), rank_to_execute='all', identifier="WAIT")
+
+    tm.register_instruction(cf.get_buffer_free(), identifier="FREE")
+
+    return tm
 
 def get_allocated_window(win_alloc_func, name, bufname, ctype, num_elements):
     """
@@ -360,125 +748,140 @@ def get_rma_call(tm: TemplateManager, rma_func, rank, identifier="RMACall") -> T
     return (additional_alloc_list, rma_call, inst_rma_req_wait)
 
 
-def get_communicator(comm_create_func, name, identifier="COMM"):
+
+def get_communicator(comm_create_func: str, tm: TemplateManager, before_idx: int = 0, identifier: str = "COMM"):
     """
     :param comm_create_func: teh function used to create the new communicator
     :param name: name of the communicator variable
-    :return: instruction block with name "comm_create" that will initialize the communicator with the given initialization function, does include the allocation of a stack variable with the procided name for the communicator
+    Inserts operations with the given identifier before the given index
+    :return: name of result variable
     """
     assert comm_create_func in ["mpi_comm_dup", "mpi_comm_dup_with_info", "mpi_comm_idup",
                                 "mpi_comm_idup_with_info", "mpi_comm_create", "mpi_comm_create_group", "mpi_comm_split",
                                 "mpi_comm_split_type", "mpi_comm_create_from_group"]
-    inst_list = []
-    inst_list.append(Instruction("MPI_Comm " + name + ";", identifier=identifier))
+    newcomm = tm.add_stack_variable("MPI_Comm")
+    instr_list=[]
     if comm_create_func.startswith("mpi_comm_i"):
-        inst_list.append(Instruction("MPI_Request comm_create_req;", identifier=identifier))
+        req_name = tm.add_stack_variable("MPI_Request")
     if comm_create_func in ["mpi_comm_create", "mpi_comm_create_group"]:
-        inst_list.append(Instruction("MPI_Group group;", identifier=identifier))
+        group_variable = tm.add_stack_variable("MPI_Group")
         group = CorrectMPICallFactory.mpi_comm_group()
         group.set_identifier(identifier)
-        inst_list.append(group)
+        group.set_arg("group", "&" + group_variable)
+        tm.insert_instruction(group, before_instruction=before_idx)
 
     call = CorrectMPICallFactory.get(comm_create_func)
-    call.set_arg("newcomm", "&" + name)
+    call.set_arg("newcomm", "&" + newcomm)
     if comm_create_func.startswith("mpi_comm_i"):
-        call.set_arg("request", "&comm_create_req")
+        call.set_arg("request", "&" + req_name)
     if comm_create_func in ["mpi_comm_create", "mpi_comm_create_group"]:
-        call.set_arg("group", "group")  # not &group
+        call.set_arg("group", group_variable)  # not &group
     call.set_identifier(identifier)
-    inst_list.append(call)
+    tm.insert_instruction(call, before_instruction=before_idx)
     if comm_create_func.startswith("mpi_comm_i"):
-        wait = MPICallFactory.mpi_wait("&comm_create_req", "MPI_STATUS_IGNORE")
+        wait = MPICallFactory.mpi_wait("&" + req_name, "MPI_STATUS_IGNORE")
         wait.set_identifier(identifier)
-        inst_list.append(wait)
+        tm.insert_instruction(wait, before_instruction=before_idx)
     if comm_create_func in ["mpi_comm_create", "mpi_comm_create_group"]:
         group_free = CorrectMPICallFactory.mpi_group_free()
+        group_free.set_arg("group", "&" + group_variable)
         group_free.set_identifier(identifier)
-        inst_list.append(group_free)
-    return inst_list
+        tm.insert_instruction(group_free, before_instruction=before_idx)
+
+    comm_free = MPICallFactory.mpi_comm_free("&"+newcomm)
+    comm_free.set_identifier(identifier)
+    tm.register_instruction(comm_free)
+    return newcomm
 
 
-def get_intercomm(comm_create_func, name, identifier="COMM"):
+def get_intercomm(comm_create_func: str, tm: TemplateManager, before_idx: int = 0, identifier: str = "COMM"):
     """
-    :param comm_create_func: the function used to create the new communicator
-    :param name: name of the communicator variable
-    :return Tuple InstructionBlock, InstructionBlock: instruction block with name "comm_create" that will initialize the communicator with the given initialization function,
-    does include the allocation of a stack variable with the provided name for the communicator
-    may also contain other stack variables as needed
-    and the block containing all the necessary frees
+       Inserts operations with the given identifier before the given index
+    :return: name of result variable
     """
     assert comm_create_func in ["mpi_intercomm_create", "mpi_intercomm_create_from_groups", "mpi_intercomm_merge"]
-    assert name != "intercomm_base_comm"
 
     if comm_create_func == "mpi_intercomm_create":
-        inst_list = []
-        inst_list.append(Instruction("MPI_Comm intercomm_base_comm;", identifier=identifier))
-        call = MPICallFactory.mpi_comm_split("MPI_COMM_WORLD", "rank % 2", "rank", "&intercomm_base_comm")
+        base_comm = tm.add_stack_variable("MPI_Comm")
+        intercomm = tm.add_stack_variable("MPI_Comm")
+        call = MPICallFactory.mpi_comm_split("MPI_COMM_WORLD", "rank % 2", "rank", "&" + base_comm)
         call.set_identifier(identifier)
-        inst_list.append(call)
-        inst_list.append(Instruction("MPI_Comm " + name + ";", identifier=identifier))
-        call = MPICallFactory.mpi_intercomm_create("intercomm_base_comm", "0", "MPI_COMM_WORLD", "!(rank %2)",
-                                                   CorrectParameterFactory().get("tag"), "&" + name)
+        tm.insert_instruction(call, before_instruction=before_idx)
+        call = MPICallFactory.mpi_intercomm_create(base_comm, "0", "MPI_COMM_WORLD", "!(rank %2)",
+                                                   CorrectParameterFactory().get("tag"), "&" + intercomm)
         call.set_identifier(identifier)
-        inst_list.append(call)
-        call = MPICallFactory.mpi_comm_free("&intercomm_base_comm")
+        tm.insert_instruction(call, before_instruction=before_idx)
+        call = MPICallFactory.mpi_comm_free("&" + base_comm)
         call.set_identifier(identifier)
-        inst_list.append(call)
-        return inst_list
+        tm.insert_instruction(call, before_instruction=before_idx)
+        comm_free = MPICallFactory.mpi_comm_free("&" + intercomm)
+        comm_free.set_identifier(identifier)
+        tm.register_instruction(comm_free)
+        return intercomm
     if comm_create_func == "mpi_intercomm_create_from_groups":
-        inst_list = []
-        inst_list.append(Instruction("MPI_Group world_group,even_group,odd_group;", identifier=identifier))
-        call = MPICallFactory.mpi_comm_group("MPI_COMM_WORLD", "&world_group")
+        intercomm = tm.add_stack_variable("MPI_Comm")
+        world_group = tm.add_stack_variable("MPI_Comm")
+        even_group = tm.add_stack_variable("MPI_Comm")
+        odd_group = tm.add_stack_variable("MPI_Comm")
+        call = MPICallFactory.mpi_comm_group("MPI_COMM_WORLD", "&" + world_group)
         call.set_identifier(identifier)
-        inst_list.append(call)
+        tm.insert_instruction(call, before_instruction=before_idx)
         call = MPICallFactory.mpi_comm_group("intercomm_base_comm", "&intercomm_base_comm_group")
         call.set_identifier(identifier)
-        inst_list.append(call)
-        Instruction("int[3] triplet;"
-                    "triplet[0] =0;"
-                    "triplet[1] =size;"
-                    "triplet[2] =2;", identifier=identifier)
-
-        call = MPICallFactory.mpi_group_incl("world_group", "1", "&triplet", "even_group")
+        tm.insert_instruction(call, before_instruction=before_idx)
+        inst = Instruction("int[3] triplet;"
+                           "triplet[0] =0;"
+                           "triplet[1] =size;"
+                           "triplet[2] =2;", identifier=identifier)
+        tm.insert_instruction(inst, before_instruction=before_idx)
+        call = MPICallFactory.mpi_group_incl(world_group, "1", "&triplet", even_group)
         call.set_identifier(identifier)
-        inst_list.append(call)
-        inst_list.append(Instruction("triplet[0] =1;", identifier=identifier))
-        call = MPICallFactory.mpi_group_incl("world_group", "1", "&triplet", "odd_group")
+        tm.insert_instruction(call, before_instruction=before_idx)
+        inst = Instruction("triplet[0] =1;", identifier=identifier)
+        tm.insert_instruction(inst, before_instruction=before_idx)
+        call = MPICallFactory.mpi_group_incl(world_group, "1", "&triplet", odd_group)
         call.set_identifier(identifier)
-        inst_list.append(call)
-        inst_list.append(Instruction("MPI_Comm " + name + ";", identifier=identifier))
-        call = MPICallFactory.mpi_intercomm_create_from_groups("(rank % 2 ? even_group:odd_group)", "0",
-                                                               "(!(rank % 2) ? even_group:odd_group)", "0",
+        tm.insert_instruction(call, before_instruction=before_idx)
+
+        call = MPICallFactory.mpi_intercomm_create_from_groups("(rank % 2 ? " + even_group + ":" + odd_group + ")",
+                                                               "0",
+                                                               "(!(rank % 2) ? " + even_group + ":" + odd_group + ")",
+                                                               "0",
                                                                CorrectParameterFactory().get("stringtag"),
                                                                CorrectParameterFactory().get("INFO"),
                                                                CorrectParameterFactory().get("errhandler"),
-                                                               "&" + name)
+                                                               "&" + intercomm)
         call.set_identifier(identifier)
-        inst_list.append(call)
-        return inst_list
+        tm.insert_instruction(call, before_instruction=before_idx)
+        comm_free = MPICallFactory.mpi_comm_free("&" + intercomm)
+        comm_free.set_identifier(identifier)
+        tm.register_instruction(comm_free)
+        return intercomm
 
     if comm_create_func == "mpi_intercomm_merge":
-        inst_list = []
-        inst_list.append(Instruction("MPI_Comm intercomm_base_comm;", identifier=identifier))
-        inst_list.append(Instruction("MPI_Comm to_merge_intercomm_comm;", identifier=identifier))
-        call = MPICallFactory.mpi_comm_split("MPI_COMM_WORLD", "rank % 2", "rank", "&intercomm_base_comm")
+        intercomm_base_comm = tm.add_stack_variable("MPI_Comm")
+        to_merge_intercomm = tm.add_stack_variable("MPI_Comm")
+        result_comm = tm.add_stack_variable("MPI_Comm")
+        call = MPICallFactory.mpi_comm_split("MPI_COMM_WORLD", "rank % 2", "rank", "&" + intercomm_base_comm)
         call.set_identifier(identifier)
-        inst_list.append(call)
-        inst_list.append(Instruction("MPI_Comm " + name + ";", identifier=identifier))
-        call = MPICallFactory.mpi_intercomm_create("intercomm_base_comm", "0", "MPI_COMM_WORLD", "!(rank %2)",
-                                                   CorrectParameterFactory().get("tag"), "&to_merge_intercomm_comm")
+        tm.insert_instruction(call, before_instruction=before_idx)
+        call = MPICallFactory.mpi_intercomm_create(intercomm_base_comm, "0", "MPI_COMM_WORLD", "!(rank %2)",
+                                                   CorrectParameterFactory().get("tag"), "&" + to_merge_intercomm)
         call.set_identifier(identifier)
-        inst_list.append(call)
-        call = MPICallFactory.mpi_intercomm_merge("to_merge_intercomm_comm", "rank %2", "&" + name)
+        tm.insert_instruction(call, before_instruction=before_idx)
+        call = MPICallFactory.mpi_intercomm_merge(to_merge_intercomm, "rank %2", "&" + result_comm)
         call.set_identifier(identifier)
-        inst_list.append(call)
-        call = MPICallFactory.mpi_comm_free("&to_merge_intercomm_comm")
+        tm.insert_instruction(call, before_instruction=before_idx)
+        call = MPICallFactory.mpi_comm_free("&" + to_merge_intercomm)
         call.set_identifier(identifier)
-        inst_list.append(call)
-        call = MPICallFactory.mpi_comm_free("&intercomm_base_comm")
+        tm.insert_instruction(call, before_instruction=before_idx)
+        call = MPICallFactory.mpi_comm_free("&" + intercomm_base_comm)
         call.set_identifier(identifier)
-        inst_list.append(call)
-        return inst_list
+        tm.insert_instruction(call, before_instruction=before_idx)
+        comm_free = MPICallFactory.mpi_comm_free("&" + result_comm)
+        comm_free.set_identifier(identifier)
+        tm.register_instruction(comm_free)
+        return result_comm
 
     return None
 
diff --git a/scripts/Infrastructure/Variables.py b/scripts/Infrastructure/Variables.py
index 03bd33895b8585c3d1ede9f9315f13475d1b45fc..96ad36ffae50552d6e9c9bd163c2a9b3d555ad25 100644
--- a/scripts/Infrastructure/Variables.py
+++ b/scripts/Infrastructure/Variables.py
@@ -5,9 +5,12 @@ ERROR_MARKER_COMMENT_BEGIN = "/*MBBERROR_BEGIN*/"
 ERROR_MARKER_COMMENT_END = "/*MBBERROR_END*/"
 
 # The List of  Features considered
-featurelist = ["P2P", "COLL", "RMA", "TOOL"]
+featurelist = ["P2P", "COLL", "RMA", "TOOL", "other"]
 
-BASIC_TEST_LEVEL=1
-SUFFICIENT_TEST_LEVEL=2
-FULL_TEST_LEVEL=3
+BASIC_TEST_LEVEL = 1
+SUFFICIENT_TEST_LEVEL = 2
+SUFFICIENT_REAL_WORLD_TEST_LEVEL = 3
+FULL_REAL_WORLD_TEST_LEVEL = 4
+FULL_TEST_LEVEL = 5
 
+REAL_WORLD_FILTERING_LEVELS = [SUFFICIENT_REAL_WORLD_TEST_LEVEL, FULL_REAL_WORLD_TEST_LEVEL]
diff --git a/scripts/README.md b/scripts/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..a75a4889d78e17455aa8f2b24aa8c865997e654d
--- /dev/null
+++ b/scripts/README.md
@@ -0,0 +1,2 @@
+Command to run the script:
+PYTHONPATH=../ python main.py
diff --git a/scripts/errors/coll/CallOrdering.py b/scripts/errors/coll/CallOrdering.py
new file mode 100644
index 0000000000000000000000000000000000000000..ca5bba209d0cb5a8b5a65e53a154695ede5a2b58
--- /dev/null
+++ b/scripts/errors/coll/CallOrdering.py
@@ -0,0 +1,58 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
+from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
+from scripts.Infrastructure.Template import TemplateManager
+from scripts.Infrastructure.TemplateFactory import get_send_recv_template, get_collective_template, \
+    get_two_collective_template
+
+
+class InvalidRankErrorColl(ErrorGenerator):
+    functions_to_use = ["mpi_allgather", "mpi_allreduce", "mpi_alltoall", "mpi_barrier", "mpi_bcast", "mpi_reduce",
+                        "mpi_scatter", "mpi_exscan", "mpi_gather", "mpi_reduce_scatter_block", "mpi_scan",
+                        "mpi_ibarrier", "mpi_iallreduce", "mpi_ialltoall", "mpi_ibcast", "mpi_ireduce", "mpi_iscatter",
+                        "mpi_igather", "mpi_iscan"]
+    functions_not_supported_yet = ["mpi_allgatherv", "mpi_alltoallv", "mpi_alltoallw", "mpi_gatherv",
+                                   "mpi_reduce_scatter", "mpi_scatterv"]
+
+    # need_buf_funcs = ["mpi_bcast", "mpi_ibcast", "mpi_reduce", "mpi_ireduce", "mpi_exscan", "mpi_scan", "mpi_iscan", "mpi_gather", "mpi_igather", "mpi_allgather", "mpi_iallgather", "mpi_allreduce", "mpi_iallreduce", "mpi_alltoall", "mpi_ialltoall", "mpi_scatter", "mpi_iscatter", "mpi_reduce_scatter_block"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["COLL"]
+
+    def generate(self, generate_level, real_world_score_table):
+
+        for func_to_use in self.functions_to_use:
+            tm = get_collective_template(func_to_use)
+
+            tm.set_description("CallOrdering-unmatched-" + func_to_use, func_to_use + " is not called by all processes")
+
+            for call in tm.get_instruction("MPICALL", return_list=True):
+                call.set_rank_executing(0)  # do the same for wait function
+                call.set_has_error()
+
+            yield tm
+            if generate_level <= BASIC_TEST_LEVEL:
+                break
+                # basic test level only tests each error once, no need to loop all functions
+
+        for i, func1 in enumerate(self.functions_to_use):
+            for func2 in self.functions_to_use[i:]:
+                tm = get_two_collective_template(func1, func2)
+
+                tm.set_description("CallOrdering-unmatched-" + func1 + "-" + func2,
+                                   "Collective mismatch: " + func1 + " is matched with " + func2)
+
+                for call in tm.get_instruction("MPICALL", return_list=True):
+                    call.set_has_error()
+                if func1 != func2:  # we want different functions
+                    yield tm
+
+            if generate_level <= BASIC_TEST_LEVEL:
+                return
diff --git a/scripts/errors/coll/Correct.py b/scripts/errors/coll/Correct.py
new file mode 100644
index 0000000000000000000000000000000000000000..57fce928abe1a72a2b656ea1d20b13608ba6ceff
--- /dev/null
+++ b/scripts/errors/coll/Correct.py
@@ -0,0 +1,55 @@
+#! /usr/bin/python3
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
+from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
+from scripts.Infrastructure.Template import TemplateManager
+from scripts.Infrastructure.TemplateFactory import get_collective_template, get_two_collective_template
+from scripts.Infrastructure.Variables import *
+
+
+class CorrectColl(ErrorGenerator):
+    functions_to_use = ["mpi_allgather", "mpi_allreduce", "mpi_alltoall", "mpi_barrier", "mpi_bcast", "mpi_reduce",
+                        "mpi_scatter", "mpi_exscan", "mpi_gather", "mpi_reduce_scatter_block", "mpi_scan",
+                        "mpi_ibarrier", "mpi_iallreduce", "mpi_ialltoall", "mpi_ibcast", "mpi_ireduce", "mpi_iscatter",
+                        "mpi_igather", "mpi_iscan"]
+    functions_not_supported_yet = ["mpi_gatherv", "mpi_scatterv", "mpi_igatherv", "mpi_iscatterv"]
+    topology_functions = ["mpi_cart_create"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["COLL"]
+
+    def generate(self, generate_level, real_world_score_table):
+
+        # Only one function called by all processes
+        for func_to_use in self.functions_to_use:
+            tm = get_collective_template(func_to_use)
+            tm.set_description("Correct-" + func_to_use, "Correct code")
+            yield tm
+            if generate_level <= BASIC_TEST_LEVEL:
+                break
+
+        # Separate function called depending on process ID
+        for func_to_use in self.functions_to_use:
+            tm = get_two_collective_template(func_to_use, func_to_use)
+            tm.set_description("Correct-" + func_to_use + "-" + func_to_use, "Correct code")
+            yield tm
+            if generate_level <= BASIC_TEST_LEVEL:
+                break
+
+        # Generate scenarios with topology functions
+        for func_to_use in self.topology_functions:
+            tm = get_collective_template(func_to_use)
+            tm.set_description("Correct-" + func_to_use, "Correct code")
+            yield tm
+            tm.set_description("Correct-" + func_to_use + "-mpi_cart_get", "Correct code")
+            cart_get = CorrectMPICallFactory().mpi_cart_get()
+            cart_get.set_arg("comm_cart", "mpi_comm_0")
+            tm.register_instruction(cart_get)
+            yield tm
+            if generate_level <= BASIC_TEST_LEVEL:
+                break
diff --git a/scripts/errors/coll/InvalidComm.py b/scripts/errors/coll/InvalidComm.py
new file mode 100644
index 0000000000000000000000000000000000000000..618fd1f16be784d6f292acfab62b586aa886bc75
--- /dev/null
+++ b/scripts/errors/coll/InvalidComm.py
@@ -0,0 +1,55 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.MPICallFactory import CorrectMPICallFactory
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.TemplateFactory import get_collective_template
+
+
+class InvalidComErrorColl(ErrorGenerator):
+    invalid_com = ["MPI_COMM_NULL", "NULL"]
+    functions_to_use = ["mpi_allgather", "mpi_allreduce", "mpi_alltoall", "mpi_barrier", "mpi_bcast", "mpi_reduce",
+                        "mpi_scatter", "mpi_exscan", "mpi_gather", "mpi_reduce_scatter_block", "mpi_scan",
+                        "mpi_ibarrier", "mpi_iallreduce", "mpi_ialltoall", "mpi_ibcast", "mpi_ireduce", "mpi_iscatter",
+                        "mpi_igather", "mpi_iscan", "mpi_cart_create"]
+    functions_not_supported_yet = ["mpi_allgatherv", "mpi_alltoallv", "mpi_alltoallw", "mpi_gatherv",
+                                   "mpi_reduce_scatter", "mpi_scatterv"]
+    ####functions_to_use = ["mpi_allgather","mpi_allgatherv","mpi_allreduce","mpi_alltoall","mpi_alltoallv","mpi_alltoallw","mpi_barrier","mpi_bcast", "mpi_exscan","mpi_gather", "mpi_gatherv","mpi_reduce", "mpi_reduce_scatter", "mpi_reduce_scatter_block", "mpi_scan", "mpi_scatter", "mpi_scatterv", "mpi_ibarrier", "mpi_iallreduce", "mpi_ialltoall", "mpi_ibcast", "mpi_ireduce", "mpi_iscatter", "mpi_igather", "mpi_iscan"]
+    topology_functions = ["mpi_cart_create"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["COLL"]
+
+    def generate(self, generate_level, real_world_score_table):
+        for com_to_use in self.invalid_com:
+            for func_to_use in self.functions_to_use:
+                tm = get_collective_template(func_to_use)
+
+                tm.set_description("InvalidParam-Comm-" + func_to_use, "Invalid communicator: %s" % com_to_use)
+                for call in tm.get_instruction("MPICALL", return_list=True):
+                    arg_to_replace = "comm" if call.has_arg("comm") else "comm_old"
+                    call.set_arg(arg_to_replace, com_to_use)
+                    call.set_has_error()
+
+                yield tm
+                if generate_level <= BASIC_TEST_LEVEL:
+                    break
+
+        for fun_to_use in self.topology_functions:
+            tm = get_collective_template(func_to_use)
+
+            for com_to_use in ["MPI_COMM_NULL", "NULL", "MPI_COMM_WORLD"]:
+                tm.set_description("InvalidParam-Comm-" + func_to_use + "-mpi_cart_get",
+                                   "A function tries to get cartesian information of " + com_to_use)
+
+                cart_get = CorrectMPICallFactory.mpi_cart_get()
+                cart_get.set_arg("comm_cart", com_to_use)
+                tm.register_instruction(cart_get)
+                cart_get.set_has_error()
+                yield tm
+
+            if generate_level <= BASIC_TEST_LEVEL:
+                return
diff --git a/scripts/errors/coll/InvalidOp.py b/scripts/errors/coll/InvalidOp.py
new file mode 100644
index 0000000000000000000000000000000000000000..d39708985e0491b9ecd09ac827977cbe11aefa4a
--- /dev/null
+++ b/scripts/errors/coll/InvalidOp.py
@@ -0,0 +1,32 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.TemplateFactory import get_collective_template
+
+
+class InvalidComErrorColl(ErrorGenerator):
+    invalid_op = ["MPI_OP_NULL"]
+    functions_to_use = ["mpi_reduce", "mpi_ireduce", "mpi_allreduce", "mpi_iallreduce"]
+
+    # TODO invalid op+ type combinations aka MPI_MAXLOC with MPI_BYTE or something klie this
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["COLL"]
+
+    def generate(self, generate_level, real_world_score_table):
+        for op_to_use in self.invalid_op:
+            for func_to_use in self.functions_to_use:
+                tm = get_collective_template(func_to_use)
+                tm.set_description("InvalidParam-Op-" + func_to_use, "Invalid operator: %s" % op_to_use)
+
+                for call in tm.get_instruction("MPICALL", return_list=True):
+                    call.set_arg("op", op_to_use)
+                    call.set_has_error()
+                yield tm
+
+                if generate_level <= BASIC_TEST_LEVEL:
+                    return
diff --git a/scripts/errors/coll/InvalidRank.py b/scripts/errors/coll/InvalidRank.py
new file mode 100644
index 0000000000000000000000000000000000000000..720b5c783d40ee360e0641c3abc241a9a9ee520b
--- /dev/null
+++ b/scripts/errors/coll/InvalidRank.py
@@ -0,0 +1,33 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.TemplateFactory import get_collective_template
+
+
+class InvalidRankErrorColl(ErrorGenerator):
+    invalid_ranks = ["-1", "nprocs", "MPI_PROC_NULL"]
+    functions_to_use = ["mpi_reduce", "mpi_bcast", "mpi_gather", "mpi_scatter", "mpi_ireduce", "mpi_ibcast",
+                        "mpi_igather", "mpi_iscatter"]
+    functions_not_supported_yet = ["mpi_gatherv", "mpi_scatterv", "mpi_igatherv", "mpi_iscatterv"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["COLL"]
+
+    def generate(self, generate_level, real_world_score_table):
+
+        for func_to_use in self.functions_to_use:
+            for rank_to_use in self.invalid_ranks:
+                tm = get_collective_template(func_to_use)
+
+                tm.set_description("InvalidParam-Rank-" + func_to_use, "Invalid Rank: %s" % rank_to_use)
+                for call in tm.get_instruction("MPICALL", return_list=True):
+                    call.set_arg("root", rank_to_use)
+                    call.set_has_error()
+
+                yield tm
+                if generate_level <= BASIC_TEST_LEVEL:
+                    return
diff --git a/scripts/errors/coll/InvalidType.py b/scripts/errors/coll/InvalidType.py
new file mode 100644
index 0000000000000000000000000000000000000000..a79d5f276280c2f9b84d0396ce3fcd41e705d49c
--- /dev/null
+++ b/scripts/errors/coll/InvalidType.py
@@ -0,0 +1,43 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
+from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory
+from scripts.Infrastructure.Template import TemplateManager
+from scripts.Infrastructure.TemplateFactory import get_collective_template
+
+
+class InvalidComErrorColl(ErrorGenerator):
+    invalid_type = ["MPI_DATATYPE_NULL", "NULL"]
+    functions_to_use = ["mpi_bcast", "mpi_ibcast", "mpi_reduce", "mpi_ireduce", "mpi_exscan", "mpi_scan", "mpi_iscan",
+                        "mpi_gather", "mpi_igather", "mpi_allgather", "mpi_iallgather", "mpi_allreduce",
+                        "mpi_iallreduce", "mpi_alltoall", "mpi_ialltoall", "mpi_scatter", "mpi_iscatter"]
+    func_one_type_arg = ["mpi_bcast", "mpi_reduce", "mpi_exscan", "mpi_scan", "mpi_ibcast", "mpi_ireduce", "mpi_iscan",
+                         "mpi_allreduce", "mpi_iallreduce"]
+    functions_not_supported_yet = ["mpi_reduce_scatter_block", "mpi_allgatherv", "mpi_alltoallv", "mpi_alltoallw",
+                                   "mpi_gatherv", "mpi_reduce_scatter", "mpi_scatterv"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["COLL"]
+
+    def generate(self, generate_level, real_world_score_table):
+        for type_to_use in self.invalid_type:
+            for func_to_use in self.functions_to_use:
+                tm = get_collective_template(func_to_use)
+                tm.set_description("InvalidParam-Type-" + func_to_use, "Invalid datatype: %s" % type_to_use)
+
+                if func_to_use in self.func_one_type_arg:
+                    for call in tm.get_instruction("MPICALL", return_list=True):
+                        # if call.has_arg("recvtype"): # sendtype
+                        # call.set_arg("recvtype", type_to_use)
+                        call.set_arg("datatype", type_to_use)
+                        call.set_has_error()
+                    yield tm
+
+                if generate_level <= BASIC_TEST_LEVEL:
+                    return
diff --git a/scripts/errors/coll/LocalConcurrency.py b/scripts/errors/coll/LocalConcurrency.py
new file mode 100644
index 0000000000000000000000000000000000000000..af576f06acb95055084577592503d6f6c396d8d2
--- /dev/null
+++ b/scripts/errors/coll/LocalConcurrency.py
@@ -0,0 +1,37 @@
+#! /usr/bin/python3
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
+from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
+from scripts.Infrastructure.Template import TemplateManager
+from scripts.Infrastructure.TemplateFactory import get_collective_template
+from scripts.Infrastructure.Variables import *
+
+
+class InvalidRankErrorColl(ErrorGenerator):
+    nbfunc_to_use = ["mpi_iallreduce", "mpi_ialltoall", "mpi_ibcast", "mpi_ireduce", "mpi_iscatter", "mpi_igather",
+                     "mpi_iscan"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["COLL"]
+
+    def generate(self, generate_level, real_world_score_table):
+
+        for func_to_use in self.nbfunc_to_use:
+            tm = get_collective_template(func_to_use)
+
+            tm.set_description("LocalConcurrency-" + func_to_use, "Usage of buffer before operation is completed")
+
+            conflicting_inst = Instruction("buf[2]=1;")
+            conflicting_inst.set_has_error()
+            wait = tm.get_instruction("WAIT", return_list=True)
+            tm.insert_instruction(conflicting_inst, before_instruction=wait)
+
+            yield tm
+
+            if generate_level >= BASIC_TEST_LEVEL:
+                return
diff --git a/scripts/errors/coll/ParamMatching.py b/scripts/errors/coll/ParamMatching.py
new file mode 100644
index 0000000000000000000000000000000000000000..e1bf2f96da99796b4d187b1996dfc18a8d77f201
--- /dev/null
+++ b/scripts/errors/coll/ParamMatching.py
@@ -0,0 +1,88 @@
+#! /usr/bin/python3
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
+from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
+from scripts.Infrastructure.Template import TemplateManager
+from scripts.Infrastructure.TemplateFactory import get_collective_template, get_two_collective_template
+from scripts.Infrastructure.Variables import BASIC_TEST_LEVEL
+
+
+class InvalidComErrorColl(ErrorGenerator):
+    functions_to_use = ["mpi_bcast", "mpi_ibcast", "mpi_reduce", "mpi_ireduce", "mpi_exscan", "mpi_scan", "mpi_iscan",
+                        "mpi_gather", "mpi_igather", "mpi_allgather", "mpi_iallgather", "mpi_allreduce",
+                        "mpi_iallreduce", "mpi_alltoall", "mpi_ialltoall", "mpi_scatter", "mpi_iscatter"]
+    func_with_one_type_arg = ["mpi_bcast", "mpi_reduce", "mpi_exscan", "mpi_scan", "mpi_ibcast", "mpi_ireduce",
+                              "mpi_iscan", "mpi_allreduce", "mpi_iallreduce"]
+    functions_not_supported_yet = ["mpi_reduce_scatter_block", "mpi_allgatherv", "mpi_alltoallv", "mpi_alltoallw",
+                                   "mpi_gatherv", "mpi_reduce_scatter", "mpi_scatterv"]
+    func_with_op = ["mpi_reduce", "mpi_ireduce", "mpi_allreduce", "mpi_iallreduce"]
+    func_with_root = ["mpi_reduce", "mpi_bcast", "mpi_gather", "mpi_scatter", "mpi_ireduce", "mpi_ibcast",
+                      "mpi_igather", "mpi_iscatter"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["COLL"]
+
+    def generate(self, generate_level, real_world_score_table):
+
+        # type missmatches are generated by ParamMatchingTypes.py
+
+        # Generate codes with op mismatch
+        for func_to_use in self.func_with_op:
+            tm = get_collective_template(func_to_use)
+            op_to_use = "MPI_MAX"  # this could be a list of op
+            # TODO implement Real world coverage for that
+            tm.set_description("ParamMatching-Op-" + func_to_use, "Wrong operation matching")
+
+            for call in tm.get_instruction("MPICALL", return_list=True):
+                call.set_rank_executing(0)
+                call.set_arg("op", op_to_use)
+                call.set_has_error()
+                c = CorrectMPICallFactory.get(func_to_use)
+                c.set_rank_executing('not0')
+                c.set_has_error()
+                tm.insert_instruction(c, after_instruction=call)
+
+            yield tm
+            if generate_level <= BASIC_TEST_LEVEL:
+                break
+
+        # Generate codes with root mismatch
+        for func_to_use in self.func_with_root:
+            tm = get_collective_template(func_to_use)
+            rank_to_use = "rank"  # process ID, declared in the template
+            tm.set_description("ParamMatching-Root-" + func_to_use, "Wrong root matching")
+
+            for call in tm.get_instruction("MPICALL", return_list=True):
+                call.set_rank_executing(0)
+                call.set_arg("root", rank_to_use)
+                call.set_has_error()
+                c = CorrectMPICallFactory.get(func_to_use)
+
+            yield tm
+            if generate_level <= BASIC_TEST_LEVEL:
+                break
+
+        # Generate codes with communicator mismatch
+        for func_to_use in self.functions_to_use:
+            tm = get_collective_template(func_to_use)
+            com_to_use = "MPI_COMM_SELF"
+            # TODO use real word coverage for that
+            tm.set_description("ParamMatching-Com-" + func_to_use, "Wrong communicator matching")
+
+            for call in tm.get_instruction("MPICALL", return_list=True):
+                call.set_rank_executing(0)
+                call.set_arg("comm", com_to_use)
+                call.set_has_error()
+                c = CorrectMPICallFactory.get(func_to_use)
+                c.set_rank_executing('not0')
+                c.set_has_error()
+                tm.insert_instruction(c, after_instruction=call)
+
+            yield tm
+            if generate_level <= BASIC_TEST_LEVEL:
+                break
diff --git a/scripts/errors/coll/ParamMatchingType.py b/scripts/errors/coll/ParamMatchingType.py
new file mode 100644
index 0000000000000000000000000000000000000000..ca13a3acd3535119dbb5119004810bf226b9a281
--- /dev/null
+++ b/scripts/errors/coll/ParamMatchingType.py
@@ -0,0 +1,247 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.ScoingModule.ScoringTable import is_combination_important
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
+from scripts.Infrastructure.TemplateFactory import get_collective_template, predefined_types, user_defined_types, \
+    predefined_mpi_dtype_consants, get_type_buffers, get_bytes_size_for_type, get_communicator, get_intercomm, \
+    get_buffer_for_type, get_buffer_for_usertype
+
+# TODO refactor into different file
+# test if the tool chan deal with messages send over different communicators
+predefined_comms = ["MPI_COMM_WORLD"]
+comm_creators = ["mpi_comm_dup", "mpi_comm_dup_with_info", "mpi_comm_idup",
+                 "mpi_comm_idup_with_info", "mpi_comm_create", "mpi_comm_create_group", "mpi_comm_split",
+                 "mpi_comm_split_type", "mpi_comm_create_from_group"
+                 ]
+intercomms = ["mpi_intercomm_create", "mpi_intercomm_merge", "mpi_intercomm_create_from_groups"]
+
+
+def get_local_missmatch(type_1, type_2, func_to_use):
+    tm = get_collective_template(func_to_use)
+    tm.set_description("ParamMatching-Type-" + func_to_use,
+                       "Wrong datatype matching: %s vs %s" % (type_1, type_2))
+    type_var_1, buf_name_1, type_var_2, buf_name_2 = get_type_buffers(tm, type_1, type_2, 1, 1)
+
+    for call in tm.get_instruction("MPICALL", return_list=True):
+
+        call.set_has_error()
+        if call.has_arg("recvtype"):
+            call.set_arg("recvtype", type_var_1)
+            call.set_arg("sendtype", type_var_1)
+        else:
+            call.set_arg("datatype", type_var_1)
+        if call.has_arg("recvbuf"):
+            call.set_arg("recvbuf", buf_name_2)
+            call.set_arg("sendbuf", buf_name_2)
+        else:
+            call.set_arg("buffer", buf_name_2)
+    return tm
+
+
+def get_global_missmatch(type_1, type_2, count_1, count_2, func_to_use, comm):
+    tm = get_collective_template(func_to_use)
+    comm_var_name = "MPI_COMM_WORLD"
+    if comm in comm_creators:
+        comm_var_name = get_communicator(comm, tm)
+
+    if comm in intercomms:
+        comm_var_name = get_intercomm(comm, tm)
+    type_var_1, buf_name_1, type_var_2, buf_name_2 = get_type_buffers(tm, type_1, type_2, count_1, count_2)
+
+    tm.set_description("ParamMatching-Type-" + func_to_use,
+                       "Wrong datatype matching: %s vs %s" % (type_1, type_2))
+
+    for call in tm.get_instruction("MPICALL", return_list=True):
+        call.set_rank_executing(0)
+        call.set_has_error()
+        call.set_arg("comm", comm_var_name)
+        if call.has_arg("recvbuf"):
+            call.set_arg("recvbuf", buf_name_1)
+            call.set_arg("sendbuf", buf_name_1)
+        else:
+            call.set_arg("buffer", buf_name_1)
+        if call.has_arg("recvtype"):
+            call.set_arg("recvtype", type_var_1)
+            call.set_arg("sendtype", type_var_1)
+        else:
+            call.set_arg("datatype", type_var_1)
+        if call.has_arg("recvcount"):
+            call.set_arg("recvcount", count_1)
+            call.set_arg("sendcount", count_1)
+        else:
+            call.set_arg("count", count_1)
+
+        c = CorrectMPICallFactory.get(func_to_use)
+        c.set_rank_executing('not0')
+        call.set_has_error()
+        c.set_arg("comm", comm_var_name)
+        if c.has_arg("recvbuf"):
+            c.set_arg("recvbuf", buf_name_2)
+            c.set_arg("sendbuf", buf_name_2)
+        else:
+            c.set_arg("buffer", buf_name_2)
+        if c.has_arg("recvtype"):
+            c.set_arg("recvtype", type_var_2)
+            c.set_arg("sendtype", type_var_2)
+        else:
+            c.set_arg("datatype", type_var_2)
+        if c.has_arg("recvcount"):
+            c.set_arg("recvcount", count_2)
+            c.set_arg("sendcount", count_2)
+        else:
+            c.set_arg("count", count_2)
+
+        tm.insert_instruction(c, after_instruction=call)
+
+    return tm
+
+
+def get_correct_case(type_1, count_1, func_to_use, comm):
+    tm = get_collective_template(func_to_use)
+    comm_var_name = "MPI_COMM_WORLD"
+    if comm in comm_creators:
+        comm_var_name = get_communicator(comm, tm)
+
+    if comm in intercomms:
+        comm_var_name = get_intercomm(comm, tm)
+
+    if type_1 in predefined_types:
+        buf_alloc = get_buffer_for_type(type_1, count_1)
+        tm.insert_instruction(buf_alloc, before_instruction=tm._instructions[0])
+        buf_name = buf_alloc.get_name()
+        type_var = type_1
+    else:
+        buf_name, type_var = get_buffer_for_usertype(type_1, tm, tm._instructions[0], count_1)
+
+    tm.set_description("Correct-" + func_to_use, "")
+
+    for call in tm.get_instruction("MPICALL", return_list=True):
+        call.set_rank_executing(0)
+        call.set_arg("comm", comm_var_name)
+        if call.has_arg("recvbuf"):
+            call.set_arg("recvbuf", buf_name)
+            call.set_arg("sendbuf", buf_name)
+        else:
+            call.set_arg("buffer", buf_name)
+        if call.has_arg("recvtype"):
+            call.set_arg("recvtype", type_var)
+            call.set_arg("sendtype", type_var)
+        else:
+            call.set_arg("datatype", type_var)
+        if call.has_arg("recvcount"):
+            call.set_arg("recvcount", count_1)
+            call.set_arg("sendcount", count_1)
+        else:
+            call.set_arg("count", count_1)
+
+        c = CorrectMPICallFactory.get(func_to_use)
+        c.set_rank_executing('not0')
+        c.set_arg("comm", comm_var_name)
+        if c.has_arg("recvbuf"):
+            c.set_arg("recvbuf", buf_name)
+            c.set_arg("sendbuf", buf_name)
+        else:
+            c.set_arg("buffer", buf_name)
+        if c.has_arg("recvtype"):
+            c.set_arg("recvtype", type_var)
+            c.set_arg("sendtype", type_var)
+        else:
+            c.set_arg("datatype", type_var)
+        if c.has_arg("recvcount"):
+            c.set_arg("recvcount", count_1)
+            c.set_arg("sendcount", count_1)
+        else:
+            c.set_arg("count", count_1)
+
+        tm.insert_instruction(c, after_instruction=call)
+
+    return tm
+
+
+def is_combination_compatible(a, b):
+    t1, f1, c1 = a
+    t2, f2, c2 = b
+    if t1 in predefined_types and t2 in predefined_types and predefined_mpi_dtype_consants[t1] == \
+            predefined_mpi_dtype_consants[t2] and not (t1 == "MPI_BYTE" or t2 == "MPI_BYTE"):
+        # one type is just the alias of another, this is allowed
+        # but BYTE may not be mixed with other types see standard section 3.3.1
+        return False
+
+    return t1 != t2 and c1 == c2 and f1 == f2
+
+
+class InvalidComErrorColl(ErrorGenerator):
+    functions_to_use = ["mpi_bcast", "mpi_ibcast", "mpi_reduce", "mpi_ireduce", "mpi_exscan", "mpi_scan", "mpi_iscan",
+                        "mpi_gather", "mpi_igather", "mpi_allgather", "mpi_iallgather", "mpi_allreduce",
+                        "mpi_iallreduce", "mpi_alltoall", "mpi_ialltoall", "mpi_scatter", "mpi_iscatter"]
+    func_one_type_arg = ["mpi_bcast", "mpi_reduce", "mpi_exscan", "mpi_scan", "mpi_ibcast", "mpi_ireduce", "mpi_iscan",
+                         "mpi_allreduce", "mpi_iallreduce"]
+    functions_not_supported_yet = ["mpi_reduce_scatter_block", "mpi_allgatherv", "mpi_alltoallv", "mpi_alltoallw",
+                                   "mpi_gatherv", "mpi_reduce_scatter", "mpi_scatterv"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["COLL"]
+
+    def generate(self, generate_level, real_world_score_table):
+
+        types = predefined_types + user_defined_types
+
+        important_cases = []
+
+        for f in self.functions_to_use:
+            for comm in predefined_comms + comm_creators + intercomms:
+                for t1 in types:
+                    if generate_level in REAL_WORLD_FILTERING_LEVELS and not is_combination_important(
+                            real_world_score_table,
+                            f, datatype=t1.lower(),
+                            communicator=comm):
+                        continue
+                    important_cases.append((t1, f, comm))
+
+        combinations_to_use = [(a, b) for a in important_cases for b in important_cases if
+                               is_combination_compatible(a, b)]
+
+        if generate_level == SUFFICIENT_REAL_WORLD_TEST_LEVEL:
+            # ensure each combination is used once but not all combinations of those
+            combinations_to_use = []
+            for a in important_cases:
+                for b in important_cases:
+                    if is_combination_compatible(a, b):
+                        combinations_to_use.append((a, b))
+                        break
+
+        # "re-format"
+        combinations_to_use = [(t1, t2, f, c) for (t1, f, c), (t2, _, _) in combinations_to_use]
+
+        if generate_level == SUFFICIENT_TEST_LEVEL:
+            types_checked = set()
+            combinations_to_use_filtered = []
+            for (t1, t2, f, c) in combinations_to_use:
+                if t1 not in types_checked and t2 not in types_checked:
+                    types_checked.add(t1)
+                    types_checked.add(t2)
+                combinations_to_use_filtered.append((t1, t2, f, c))
+            combinations_to_use = combinations_to_use_filtered
+
+        if generate_level == BASIC_TEST_LEVEL:
+            combinations_to_use = combinations_to_use[0:1]
+
+        correct_types_checked = set()
+        for type_1, type_2, func_to_use, comm in combinations_to_use:
+            if comm == "MPI_COMM_WORLD":
+                yield get_local_missmatch(type_1, type_2, func_to_use)
+
+            yield get_global_missmatch(type_1, type_2, 1, 1, func_to_use, comm)
+            # missmatch with matching sizes
+            yield get_global_missmatch(type_1, type_2, get_bytes_size_for_type(type_2),
+                                       get_bytes_size_for_type(type_1),
+                                       func_to_use, comm)
+
+            if (type_1, comm) not in correct_types_checked:
+                correct_types_checked.add((type_1, comm))
+                yield get_correct_case(type_1, 1, func_to_use, comm)
diff --git a/scripts/errors/coll/RequestLifeCycle.py b/scripts/errors/coll/RequestLifeCycle.py
new file mode 100644
index 0000000000000000000000000000000000000000..691e4c6410ac52e092ce4e2392fc428e634fa684
--- /dev/null
+++ b/scripts/errors/coll/RequestLifeCycle.py
@@ -0,0 +1,39 @@
+#! /usr/bin/python3
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
+from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
+from scripts.Infrastructure.Template import TemplateManager
+from scripts.Infrastructure.TemplateFactory import get_send_recv_template, get_collective_template
+from scripts.Infrastructure.Variables import *
+
+
+class CorrectColl(ErrorGenerator):
+    nbfunc_to_use = ["mpi_ibarrier", "mpi_iallreduce", "mpi_ialltoall", "mpi_ibcast", "mpi_ireduce", "mpi_iscatter",
+                     "mpi_igather", "mpi_iscan"]
+    functions_not_supported_yet = ["mpi_igatherv", "mpi_iscatterv"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["COLL"]
+
+    def generate(self, generate_level, real_world_score_table):
+
+        for func_to_use in self.nbfunc_to_use:
+            tm = get_collective_template(func_to_use)
+
+            tm.set_description("RequestLifeCycle-" + func_to_use,
+                               func_to_use + " is not associated with a completion operation (missing wait)")
+
+            for call in tm.get_instruction("MPICALL", return_list=True):
+                wait = tm.get_instruction("WAIT", return_list=True)
+                tm.remove_instruction(instruction=wait)
+                call.set_has_error()
+
+            yield tm
+
+            if generate_level >= BASIC_TEST_LEVEL:
+                return
diff --git a/scripts/errors/dtypes/DtypeMissmatch.py b/scripts/errors/dtypes/DtypeMissmatch.py
new file mode 100644
index 0000000000000000000000000000000000000000..422a4f3582bfb5caa16eb31d6e2eb13dc792a90e
--- /dev/null
+++ b/scripts/errors/dtypes/DtypeMissmatch.py
@@ -0,0 +1,224 @@
+#! /usr/bin/python3
+from copy import copy
+from random import shuffle
+
+from scripts.Infrastructure.AllocCall import AllocCall
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
+from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory
+from scripts.Infrastructure.ScoingModule.ScoringTable import is_combination_important
+from scripts.Infrastructure.Template import TemplateManager
+from scripts.Infrastructure.TemplateFactory import get_send_recv_template, get_invalid_param_p2p_case, get_communicator, \
+    get_intercomm, predefined_types, user_defined_types, predefined_mpi_dtype_consants, get_type_buffers, \
+    get_bytes_size_for_type, get_buffer_for_type, get_buffer_for_usertype
+
+from itertools import chain
+
+from scripts.Infrastructure.Variables import *
+
+# TODO refactoring into different file
+# test if the tool chan deal with messages send over different communicators
+predefined_comms = ["MPI_COMM_WORLD"]
+comm_creators = ["mpi_comm_dup", "mpi_comm_dup_with_info", "mpi_comm_idup",
+                 "mpi_comm_idup_with_info", "mpi_comm_create", "mpi_comm_create_group", "mpi_comm_split",
+                 "mpi_comm_split_type", "mpi_comm_create_from_group"
+                 ]
+intercomms = ["mpi_intercomm_create", "mpi_intercomm_merge", "mpi_intercomm_create_from_groups"]
+
+
+def get_local_missmatch(type_1, type_2, send_func, recv_func):
+    tm = get_send_recv_template(send_func, recv_func)
+    tm.set_description("LocalParameterMissmatch-Dtype-" + send_func,
+                       "datatype missmatch: Buffer: " + type_1 + " MPI_Call: " + type_2)
+    type_var_1, buf_name_1, type_var_2, buf_name_2 = get_type_buffers(tm, type_1, type_2, 10, 10)
+    # local missmatch
+    for call in tm.get_instruction(identifier="MPICALL", return_list=True):
+        call.set_has_error()
+        call.set_arg("buf", buf_name_1)
+        call.set_arg("datatype", type_var_2)
+
+    return tm
+
+
+def get_correct_case(type_1, size_1, send_func, recv_func, comm):
+    tm = get_send_recv_template(send_func, recv_func)
+    tm.set_description("Correct-" + send_func,
+                       "")
+    comm_var_name = "MPI_COMM_WORLD"
+    if comm in comm_creators:
+        comm_var_name = get_communicator(comm, tm)
+
+    if comm in intercomms:
+        comm_var_name = get_intercomm(comm, tm)
+
+    call = tm.get_instruction(identifier="MPICALL", rank_excuting=1)
+
+    if type_1 in predefined_types:
+        buf_alloc = get_buffer_for_type(type_1, size_1)
+        tm.insert_instruction(buf_alloc,before_instruction=tm._instructions[0])
+        buf_name = buf_alloc.get_name()
+        type_var = type_1
+    else:
+        buf_name, type_var = get_buffer_for_usertype(type_1, tm, tm._instructions[0], size_1)
+
+    call.set_arg("buf", buf_name)
+    call.set_arg("datatype", type_var)
+    call.set_arg("count", size_1)
+    call.set_arg("comm", comm_var_name)
+    call = tm.get_instruction(identifier="MPICALL", rank_excuting=0)
+    call.set_arg("buf", buf_name)
+    call.set_arg("datatype", type_var)
+    call.set_arg("count", size_1)
+    call.set_arg("comm", comm_var_name)
+
+    return tm
+
+
+def get_global_missmatch(type_1, type_2, size_1, size_2, send_func, recv_func, comm):
+    tm = get_send_recv_template(send_func, recv_func)
+    tm.set_description("GlobalParameterMissmatch-Dtype-" + send_func,
+                       "datatype missmatch: Rank0: " + type_1 + " Rank1: " + type_2)
+    comm_var_name = "MPI_COMM_WORLD"
+    if comm in comm_creators:
+        comm_var_name = get_communicator(comm, tm)
+
+    if comm in intercomms:
+        comm_var_name = get_intercomm(comm, tm)
+
+    type_var_1, buf_name_1, type_var_2, buf_name_2 = get_type_buffers(tm, type_1, type_2, size_1, size_2)
+    call = tm.get_instruction(identifier="MPICALL", rank_excuting=1)
+    call.set_has_error()
+    call.set_arg("buf", buf_name_1)
+    call.set_arg("datatype", type_var_1)
+    call.set_arg("count", size_1)
+    call.set_arg("comm", comm_var_name)
+    call = tm.get_instruction(identifier="MPICALL", rank_excuting=0)
+    call.set_has_error()
+    call.set_arg("buf", buf_name_2)
+    call.set_arg("datatype", type_var_2)
+    call.set_arg("count", size_2)
+    call.set_arg("comm", comm_var_name)
+
+    return tm
+
+
+def is_combination_compatible(s, r):
+    t1, send_func, c1 = s
+    t2, recv_func, c2 = r
+
+    if send_func in ["mpi_rsend", "mpi_irsend", "mpi_rsend_init"] and recv_func not in ["mpi_irecv",
+                                                                                        "mpi_recv_init",
+                                                                                        "mpi_precv_init"]:
+        # leads to deadlock
+        return False
+
+    if t1 in predefined_types and t2 in predefined_types and predefined_mpi_dtype_consants[
+        t1] == predefined_mpi_dtype_consants[t2] and not (t1 == "MPI_BYTE" or t2 == "MPI_BYTE"):
+        # one type is just the alias of another, this is allowed
+        # but BYTE may not be mixed with other types see standard section 3.3.1
+        return False
+
+    return c1 == c2 and t1 != t2
+
+
+class DtypeMissmatch(ErrorGenerator):
+    invalid_bufs = [CorrectParameterFactory().buf_var_name, "NULL"]
+    send_funcs = ["mpi_send",
+                  "mpi_isend", "mpi_ssend", "mpi_issend", "mpi_rsend", "mpi_irsend", "mpi_bsend", "mpi_ibsend",
+                  "mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init",
+                  ]
+
+    recv_funcs = ["mpi_recv", "mpi_irecv", "mpi_recv_init", "mpi_precv_init"]
+
+    sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate(self, generate_level, real_world_score_table):
+
+        # (type,func,comm)
+        important_sends = []
+        important_recvs = []  #
+        for type in predefined_types + user_defined_types:
+            for send_func in self.send_funcs:
+                for comm in predefined_comms + comm_creators + intercomms:
+                    important_sends.append((type, send_func, comm))
+
+        for type in predefined_types + user_defined_types:
+            for recv_func in self.recv_funcs:
+                for comm in predefined_comms + comm_creators + intercomms:
+                    important_recvs.append((type, recv_func, comm))
+
+        # filter to only important ones
+        if generate_level in REAL_WORLD_FILTERING_LEVELS:
+            important_sends = [(t, f, c) for (t, f, c) in important_sends if
+                               is_combination_important(real_world_score_table, f,
+                                                        datatype=t.lower(),
+                                                        communicator=c)]
+            important_recvs = [(t, f, c) for (t, f, c) in important_recvs if
+                               is_combination_important(real_world_score_table, f,
+                                                        datatype=t.lower(),
+                                                        communicator=c)]
+
+        # all possible combinations
+        combinations_to_use = [(s, r) for s in important_sends for r in important_recvs if
+                               is_combination_compatible(s, r)]
+
+        if generate_level == SUFFICIENT_REAL_WORLD_TEST_LEVEL:
+            # include every important case once
+            # but not all possible missmatches
+            combinations_to_use = []
+            for r in important_recvs:
+                for s in important_sends:
+                    if is_combination_compatible(s, r):
+                        combinations_to_use.append((s, r))
+                        break
+            # If there are still s values left, pair them with any r
+            for s in important_sends:
+                if all(s != pair[0] for pair in combinations_to_use):  # Check if s is already paired
+                    for r in important_recvs:
+                        if is_combination_compatible(s, r):
+                            combinations_to_use.append((s, r))
+                            break
+
+        # "re-format"
+        combinations_to_use = [(t1, t2, s, r, c) for (t1, s, c), (t2, r, _) in combinations_to_use]
+
+        if generate_level == BASIC_TEST_LEVEL:
+            combinations_to_use = combinations_to_use[0:1]
+
+        if generate_level == SUFFICIENT_TEST_LEVEL:
+            types_checked = set()
+            combinations_to_use_filtered = []
+            for (t1, t2, s, r, c) in combinations_to_use:
+                if t1 not in types_checked and t2 not in types_checked:
+                    types_checked.add(t1)
+                    types_checked.add(t2)
+                combinations_to_use_filtered.append((t1, t2, s, r, c))
+            combinations_to_use = combinations_to_use_filtered
+
+        correct_types_checked = set()
+        for type_1, type_2, send_func, recv_func, comm in combinations_to_use:
+
+            # local missmatch only for one communicator
+            if comm == "MPI_COMM_WORLD":
+                yield get_local_missmatch(type_1, type_2, send_func, recv_func)
+
+            # global missmatch: communicator is important
+            yield get_global_missmatch(type_1, type_2, 1, 1, send_func, recv_func, comm)
+
+            # global missmatch with size = sizeof(a)* sizeof(b) so that total size match both types
+            yield get_global_missmatch(type_1, type_2, get_bytes_size_for_type(type_2),
+                                       get_bytes_size_for_type(type_1), send_func, recv_func, comm)
+
+            if (type_1, comm) not in correct_types_checked:
+                correct_types_checked.add((type_1, comm))
+                yield get_correct_case(type_1, 1, send_func, recv_func, comm)
+
+        # TODO mrecv?
+        # TODO sendrecv?
diff --git a/scripts/errors/other/Initialized.py b/scripts/errors/other/Initialized.py
new file mode 100644
index 0000000000000000000000000000000000000000..7e368ecea7e9dc41826881015167861485590a85
--- /dev/null
+++ b/scripts/errors/other/Initialized.py
@@ -0,0 +1,37 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Template import TemplateManager
+from scripts.Infrastructure.Variables import *
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.MPICallFactory import MPICallFactory
+from scripts.Infrastructure.TemplateFactory import get_communicator, get_intercomm, \
+    get_invalid_param_p2p_case, get_send_recv_template
+
+sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
+
+
+class Initialized(ErrorGenerator):
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["other"]
+
+    def generate(self, generate_level, real_world_score_table):
+        tm = TemplateManager()
+        tm.set_description("InvalidParam-flag-mpi_initialized","Null ptr as flag")
+
+        tm.register_instruction(MPICallFactory.mpi_initialized("NULL"))
+        yield tm
+
+        tm = get_send_recv_template("mpi_send","mpi_recv")
+
+        tm._has_init = False
+
+        tm.set_description("MissingCall-mpi_init", "No MPI_Init")
+
+        for call in tm.get_instruction(identifier="MPICALL",return_list=True):
+            call.set_has_error()
+
+        yield tm
\ No newline at end of file
diff --git a/scripts/errors/pt2pt/Correct.py b/scripts/errors/pt2pt/Correct.py
new file mode 100644
index 0000000000000000000000000000000000000000..a9c6fbffcc6bb58b662d6adfac2a939a46f0c02e
--- /dev/null
+++ b/scripts/errors/pt2pt/Correct.py
@@ -0,0 +1,53 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+from copy import copy
+
+from scripts.Infrastructure.AllocCall import AllocCall
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
+from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory
+from scripts.Infrastructure.Template import TemplateManager
+from scripts.Infrastructure.TemplateFactory import get_send_recv_template, get_invalid_param_p2p_case
+
+from itertools import chain
+
+
+class CorrectP2P(ErrorGenerator):
+    send_funcs = ["mpi_send",
+                  "mpi_isend", "mpi_ssend", "mpi_issend", "mpi_rsend", "mpi_irsend", "mpi_bsend", "mpi_ibsend",
+                  "mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init",
+                  ]
+
+    recv_funcs = ["mpi_recv", "mpi_irecv", "mpi_recv_init", "mpi_precv_init"]
+
+    sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate(self, generate_level, real_world_score_table):
+        for func in self.send_funcs + self.recv_funcs:
+            if func in self.recv_funcs:
+                check_recv = True
+                send_func = "mpi_send"
+                recv_func = func
+            else:
+                check_recv = False
+                send_func = func
+                recv_func = "mpi_irecv"
+
+            tm = get_send_recv_template(send_func, recv_func)
+            tm.set_description("Correct-"+func,"Correct usage of "+func)
+            yield tm
+            if generate_level <= BASIC_TEST_LEVEL:
+                return
+
+        for func in ["mpi_mrecv", "mpi_imrecv"]:
+            tm = get_send_recv_template("mpi_isend", ["mpi_mprobe", func])
+            tm.set_description("Correct-"+func,"Correct usage of "+func)
+            yield tm
+    #TODO missing: probe waitall/testall
\ No newline at end of file
diff --git a/scripts/errors/pt2pt/InvalidBuf.py b/scripts/errors/pt2pt/InvalidBuf.py
new file mode 100644
index 0000000000000000000000000000000000000000..25a734451e89cb713247398efdf8c7b288369456
--- /dev/null
+++ b/scripts/errors/pt2pt/InvalidBuf.py
@@ -0,0 +1,193 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+from copy import copy
+
+from scripts.Infrastructure.AllocCall import AllocCall
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
+from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory
+from scripts.Infrastructure.Template import TemplateManager
+from scripts.Infrastructure.TemplateFactory import get_send_recv_template, get_invalid_param_p2p_case
+
+from itertools import chain
+
+
+class InvalidBufErrorP2P(ErrorGenerator):
+    invalid_bufs = [ "NULL", 'MPI_BOTTOM', 'MPI_IN_PLACE']
+    send_funcs = ["mpi_send",
+                  "mpi_isend", "mpi_ssend", "mpi_issend", "mpi_rsend", "mpi_irsend", "mpi_bsend", "mpi_ibsend",
+                  "mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init",
+                  ]
+
+    recv_funcs = ["mpi_recv", "mpi_irecv", "mpi_recv_init", "mpi_precv_init"]
+
+    sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate(self, generate_level, real_world_score_table):
+        for func in self.send_funcs + self.recv_funcs:
+            if func in self.recv_funcs:
+                check_recv = True
+                send_func = "mpi_send"
+                recv_func = func
+            else:
+                check_recv = False
+                send_func = func
+                recv_func = "mpi_irecv"
+
+            for buf_to_use in self.invalid_bufs:
+                tm = get_invalid_param_p2p_case("buf", buf_to_use, check_recv, send_func, recv_func)
+                tm.set_description("InvalidParam-Buffer-" + func, "Invalid Buffer: "+buf_to_use)
+                yield tm
+
+            tm = get_invalid_param_p2p_case("buf", "not_allocated", check_recv, send_func, recv_func)
+            tm.set_description("InvalidParam-Buffer-" + func, "Invalid Buffer: Not allocated")
+            tm.insert_instruction(Instruction("int* not_allocated;"), before_instruction="ALLOC",
+                                  before_first_of_list=True)
+            yield tm
+
+            # use buffer after free
+            tm = get_invalid_param_p2p_case("buf", "buf", check_recv, send_func, recv_func)
+            tm.set_description("InvalidParam-Buffer-" + func, "Invalid Buffer: Use after Free")
+            call_list = tm.get_instruction(identifier="FREE", return_list=True)
+            tm.remove_instruction(identifier="FREE")
+            tm.insert_instruction(call_list, before_instruction="MPICALL", before_first_of_list=True)
+
+            yield tm
+
+            if generate_level <= BASIC_TEST_LEVEL:
+                return
+
+        for func in ["mpi_mrecv", "mpi_imrecv"]:
+            for buf_to_use in self.invalid_bufs:
+                tm = get_send_recv_template("mpi_isend", ["mpi_mprobe", func])
+                tm.set_description("InvalidParam-Buffer-" + func, "Invalid Buffer: "+buf_to_use)
+                for call in tm.get_instruction(identifier="MATCHEDRECEIVE", return_list=True):
+                    call.set_arg("buf", buf_to_use)
+                    call.set_has_error()
+                yield tm
+
+            tm = get_send_recv_template("mpi_isend", ["mpi_mprobe", func])
+            tm.set_description("InvalidParam-Buffer-" + func, "Invalid Buffer: Not allocated")
+            tm.insert_instruction(Instruction("int* not_allocated;"), before_instruction="ALLOC",
+                                  before_first_of_list=True)
+            for call in tm.get_instruction(identifier="MATCHEDRECEIVE", return_list=True):
+                call.set_arg("buf", "not_allocated")
+                call.set_has_error()
+            yield tm
+
+            tm = get_send_recv_template("mpi_isend", ["mpi_mprobe", func])
+            tm.set_description("InvalidParam-Buffer-" + func, "use after free")
+            tm.insert_instruction(Instruction("int* not_allocated;"), before_instruction="ALLOC",
+                                  before_first_of_list=True)
+
+            call_list = tm.get_instruction(identifier="FREE", return_list=True)
+            tm.remove_instruction(identifier="FREE")
+            tm.insert_instruction(call_list, before_instruction="MPICALL", before_first_of_list=True)
+            yield tm
+
+
+class MessageRaceErrorSendRecv(ErrorGenerator):
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate(self, generate_level, real_world_score_table):
+
+        for buf_to_use in ["buf", "MPI_IN_PLACE"]:
+            tm = get_send_recv_template("mpi_sendrecv", "mpi_sendrecv")
+            tm.set_description("InvalidParam-Buffer-mpi_sendrecv", "send and recv buffer must be disjoint in sendrecv")
+
+            for call in tm.get_instruction(identifier="MPICALL", return_list=True):
+                call.set_arg("recvbuf", buf_to_use)
+                call.set_has_error()
+            yield tm
+
+        # NULL
+        tm = get_send_recv_template("mpi_sendrecv", "mpi_sendrecv")
+        tm.set_description("InvalidParam-Buffer-mpi_sendrecv", "NULL as receive buffer")
+
+        for call in tm.get_instruction(identifier="MPICALL", return_list=True):
+            call.set_arg("recvbuf", "NULL")
+            call.set_has_error()
+        yield tm
+
+        tm = get_send_recv_template("mpi_sendrecv", "mpi_sendrecv")
+        tm.set_description("InvalidParam-Buffer-mpi_sendrecv", "NULL as send buffer")
+
+        for call in tm.get_instruction(identifier="MPICALL", return_list=True):
+            call.set_arg("sendbuf", "NULL")
+            call.set_has_error()
+        yield tm
+
+        # use without alloc
+        tm = get_send_recv_template("mpi_sendrecv", "mpi_sendrecv")
+        tm.set_description("InvalidParam-Buffer-mpi_sendrecv", "not allocated receive buffer")
+        tm.insert_instruction(Instruction("int* not_allocated;"), before_instruction="ALLOC", before_first_of_list=True)
+
+        for call in tm.get_instruction(identifier="MPICALL", return_list=True):
+            call.set_arg("recvbuf", "not_allocated")
+            call.set_has_error()
+        yield tm
+
+        tm = get_send_recv_template("mpi_sendrecv", "mpi_sendrecv")
+        tm.set_description("InvalidParam-Buffer-mpi_sendrecv", "not allocated send buffer")
+        tm.insert_instruction(Instruction("int* not_allocated;"), before_instruction="ALLOC", before_first_of_list=True)
+
+        for call in tm.get_instruction(identifier="MPICALL", return_list=True):
+            call.set_arg("sendbuf", "not_allocated")
+            call.set_has_error()
+        yield tm
+
+        # use after free
+        tm = get_send_recv_template("mpi_sendrecv", "mpi_sendrecv")
+        tm.set_description("InvalidParam-Buffer-mpi_sendrecv", "use after free for buffers")
+        call_list = tm.get_instruction(identifier="FREE", return_list=True)
+        tm.remove_instruction(identifier="FREE")
+        tm.insert_instruction(call_list, before_instruction="MPICALL", before_first_of_list=True)
+        yield tm
+
+
+class Overlapping_buf(ErrorGenerator):
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate(self, generate_level, real_world_score_table):
+        # TODO this case for recv_init, imrecv and precv_init in all combinations?
+        recv_func = "mpi_irecv"
+        tm = get_send_recv_template("mpi_send", recv_func)
+        tm.set_description("LocalConcurrency-" + recv_func, "Overlapping recv buffers")
+        recv = tm.get_instruction(identifier="MPICALL", rank_excuting=0)
+        buf_var = recv.get_arg("buf")
+        buf_len = recv.get_arg("count")
+        second_recv = copy(recv)
+        req = tm.add_stack_variable("MPI_Request")
+        second_recv.set_arg("request", "&" + req)
+        tm.insert_instruction(second_recv, after_instruction=recv)  #
+        wait = tm.get_instruction(identifier="WAIT", rank_excuting=0)
+        second_wait = copy(wait)
+        second_wait.set_arg("request", "&" + req)
+        tm.insert_instruction(second_wait, after_instruction=wait)
+        yield tm
+
+        if generate_level <= BASIC_TEST_LEVEL:
+            return
+
+        tm.set_description("LocalConcurrency-" + recv_func, "partially overlapping recv buffers")
+        recv.set_arg("count", buf_len + "/2")
+        second_recv.set_arg("count", buf_len + "/2")
+        second_recv.set_arg("buf", "&" + buf_var + "[" + buf_len + "/4]")
+        yield tm
diff --git a/scripts/errors/pt2pt/InvalidComm.py b/scripts/errors/pt2pt/InvalidComm.py
new file mode 100644
index 0000000000000000000000000000000000000000..eeb195e94cb2c24c8371eeb48cbc606b6f80314d
--- /dev/null
+++ b/scripts/errors/pt2pt/InvalidComm.py
@@ -0,0 +1,105 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.MPICallFactory import MPICallFactory
+from scripts.Infrastructure.TemplateFactory import get_communicator, get_intercomm, \
+    get_invalid_param_p2p_case
+
+sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
+
+
+class InvalidCommErrorP2P(ErrorGenerator):
+    invalid_comm = ["MPI_COMM_NULL", "NULL"]
+
+    missmatching_comms = ["MPI_COMM_SELF", "mpi_comm_dup", "mpi_comm_dup_with_info", "mpi_comm_idup",
+                          "mpi_comm_idup_with_info", "mpi_comm_create", "mpi_comm_create_group", "mpi_comm_split",
+                          "mpi_comm_split_type", "mpi_comm_create_from_group"
+                          ]
+    intercomms = ["mpi_intercomm_create", "mpi_intercomm_merge", "mpi_intercomm_create_from_groups"]
+
+    # as extended testcases
+
+    comms_to_check = invalid_comm + missmatching_comms + intercomms
+
+    mprobe_funcs = ["mpi_mprobe", "mpi_improbe"]
+    probe_funcs = ["mpi_probe", "mpi_iprobe"]
+
+    functions_to_check = ["mpi_send",
+                          "mpi_recv", "mpi_irecv",
+                          "mpi_isend", "mpi_ssend", "mpi_issend", "mpi_rsend", "mpi_irsend", "mpi_bsend", "mpi_ibsend",
+                          "mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init",
+                          "mpi_precv_init", "mpi_recv_init"
+                          ] + sendrecv_funcs + mprobe_funcs + probe_funcs
+
+    recv_funcs = ["mpi_recv", "mpi_irecv", "mpi_recv_init",
+                  "mpi_precv_init"] + sendrecv_funcs + mprobe_funcs + probe_funcs
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate(self, generate_level, real_world_score_table):
+        for func in self.functions_to_check:
+            if func in self.recv_funcs:
+                check_recv = True
+                send_func = "mpi_send"
+                recv_func = func
+            else:
+                check_recv = False
+                send_func = func
+                recv_func = "mpi_irecv"
+            if func in self.mprobe_funcs:
+                send_func = "mpi_send"
+                recv_func = [func, "mpi_mrecv"]
+            if func in self.probe_funcs:
+                send_func = "mpi_send"
+                recv_func = "mpi_irecv"
+                # not implemented
+                continue
+                # TODO add probe call
+
+            i = 0
+            for comm_to_use in self.comms_to_check:
+                if comm_to_use in self.missmatching_comms + self.intercomms and recv_func == "mpi_irecv" and generate_level < FULL_TEST_LEVEL:
+                    # combination repeated
+                    continue
+                i += 1
+
+                tm = get_invalid_param_p2p_case("comm", comm_to_use, check_recv, send_func, recv_func)
+                error_string = "ParamMatching"
+                if comm_to_use in self.invalid_comm:
+                    error_string = "InvalidParam"
+                tm.set_description(error_string + "-Comm-" + func, error_string + ": %s" % comm_to_use)
+
+                if comm_to_use in self.missmatching_comms and comm_to_use != "MPI_COMM_SELF":
+                    comm_var_name = get_communicator(comm_to_use, tm)
+                    #  change the arg in the MPI call to the result variable name
+                    for call in tm.get_instruction(identifier="MPICALL", return_list=True):
+                        if call.get_arg("comm") == comm_to_use:
+                            call.set_arg("comm", comm_var_name)
+
+                if comm_to_use in self.intercomms:
+                    comm_var_name = get_intercomm(comm_to_use, tm)
+                    for call in tm.get_instruction(identifier="MPICALL", return_list=True):
+                        if call.get_arg("comm") == comm_to_use:
+                            call.set_arg("comm", comm_var_name)
+
+                # if intercomm: set rank to 0 instead of 1 as ther is only one rank in intercomm
+                if comm_to_use in self.intercomms and not comm_to_use == "mpi_intercomm_merge":
+                    for call in tm.get_instruction(identifier="MPICALL", return_list=True):
+                        if call.has_arg("source"):
+                            call.set_arg("source", "0")
+
+                error_string = "ParamMatching"
+                if comm_to_use in self.invalid_comm:
+                    error_string = "InvalidParam"
+                if comm_to_use in self.missmatching_comms + self.intercomms and comm_to_use != "MPI_COMM_SELF":
+                    tm.register_instruction(MPICallFactory().mpi_comm_free("&" + comm_var_name))
+                yield tm
+                if generate_level <= BASIC_TEST_LEVEL and i > 3:
+                    return
+            # end for comm to check
+        # end for send_func in funcs_to_check
diff --git a/scripts/errors/pt2pt/InvalidFlag.py b/scripts/errors/pt2pt/InvalidFlag.py
new file mode 100644
index 0000000000000000000000000000000000000000..1499525bff7d879be95d4bcc64b8952fdb6f20e2
--- /dev/null
+++ b/scripts/errors/pt2pt/InvalidFlag.py
@@ -0,0 +1,127 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+
+from scripts.Infrastructure.MPICallFactory import CorrectMPICallFactory, MPICallFactory
+from scripts.Infrastructure.TemplateFactory import get_send_recv_template, get_invalid_param_p2p_case, replace_wait
+
+
+class InvalidFlagErrorP2P(ErrorGenerator):
+    # statusES ignore is only allowed in test/waitALL functions
+    invalid_flag = ["NULL", "not_allocated"]
+    test_funcs = ["mpi_test", "mpi_testall", "mpi_waitany", "mpi_testany", "mpi_waitsome", "mpi_testsome"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate_mprobe(self, probe_func, recv_func, test_probe, status):
+        send_func = "mpi_send"
+
+        func_to_test = recv_func
+        if test_probe:
+            func_to_test = probe_func
+        tm = get_send_recv_template(send_func, [probe_func, recv_func])
+        tm.set_description("InvalidParam-Status-" + func_to_test, "Invalid Status: %s" % status)
+        if status == "MPI_STATUSES_IGNORE":
+            tm.set_description("InvalidParam-Status-" + func_to_test,
+                               "Invalid Status: %s\n wrong usage of status_ignore vs statusES_ignore" % status)
+
+        if test_probe:
+            for call in tm.get_instruction(identifier="MPICALL", return_list=True):
+                if call.get_rank_executing() == 0:
+                    call.set_arg("status", status)
+                    call.set_has_error()
+        else:
+            for call in tm.get_instruction(identifier="MATCHEDRECEIVE", return_list=True):
+                if call.get_rank_executing() == 0:
+                    call.set_arg("status", status)
+                    call.set_has_error()
+
+        return tm
+
+    def generate_probe(self, probe_to_use, status):
+        tm = get_send_recv_template("mpi_send", "mpi_recv")
+        tm.set_description("InvalidParam-Status-" + probe_to_use, "Invalid Status: %s" % status)
+        if status == "MPI_STATUSES_IGNORE":
+            tm.set_description("InvalidParam-Status-" + probe_to_use,
+                               "Invalid Status: %s\n wrong usage of status_ignore vs statusES_ignore" % status)
+
+        probe_call = CorrectMPICallFactory.get(probe_to_use)
+        probe_call.set_arg("status", status)
+        probe_call.set_has_error()
+        probe_call.set_rank_executing(0)
+
+        if probe_to_use == "mpi_iprobe":
+            tm.add_stack_variable("int")  # the flag
+            tm.insert_instruction(Instruction("int flag=0;", rank=0), before_instruction="MPICALL",
+                                  before_first_of_list=True)
+            tm.insert_instruction(Instruction("while (!flag){", rank=0), before_instruction="MPICALL",
+                                  before_first_of_list=True)
+            tm.insert_instruction(probe_call, before_instruction="MPICALL", before_first_of_list=True)
+            tm.insert_instruction(Instruction("}", rank=0), before_instruction="MPICALL",
+                                  before_first_of_list=True)  # end while
+        else:
+            tm.insert_instruction(probe_call, before_instruction="MPICALL", before_first_of_list=True)
+
+        return tm
+
+    def generate(self, generate_level, real_world_score_table):
+
+        for wait_func in self.test_funcs:
+            for flag in self.invalid_flag:
+                tm = get_send_recv_template("mpi_send", "mpi_irecv")
+                replace_wait(tm.get_instruction(identifier="WAIT"), tm, wait_func)
+                wait_call = tm.get_instruction(identifier="WAIT")
+                wait_call.set_has_error()
+                if flag == "not_allocated":
+                    tm.insert_instruction(Instruction("int* not_allocated;"), before_instruction="ALLOC",
+                                          before_first_of_list=True)  # not allocated
+
+                for arg in ["flag", "outcount", "indx", "array_of_indices"]:
+
+                    if wait_call.has_arg(arg):
+                        prev_value = wait_call.get_arg(arg)
+                        wait_call.set_arg(arg, flag)
+                        tm.set_description("InvalidParam-" + arg + "-" + wait_func, ("Invalid %s: %s" % (arg, flag)))
+                        yield tm
+                        wait_call.set_arg(arg, prev_value)
+
+            if generate_level <= BASIC_TEST_LEVEL:
+                return
+
+        for flag in self.invalid_flag:
+            tm = get_send_recv_template("mpi_send", "mpi_recv")
+            if flag == "not_allocated":
+                tm.insert_instruction(Instruction("int* not_allocated;"), before_instruction="ALLOC",
+                                      before_first_of_list=True)  # not allocated
+            arg = "flag"
+            wait_func="mpi_iprobe"
+            tm.set_description("InvalidParam-" + arg + "-" + wait_func, ("Invalid %s: %s" % (arg, flag)))
+            probe_call = CorrectMPICallFactory.get("mpi_iprobe")
+            probe_call.set_arg("flag", flag)
+            probe_call.set_has_error()
+            probe_call.set_rank_executing(0)
+            tm.insert_instruction(Instruction("int flag=0;", rank=0), before_instruction="MPICALL",
+                                  before_first_of_list=True)
+            tm.insert_instruction(Instruction("while (!flag){", rank=0), before_instruction="MPICALL",
+                                  before_first_of_list=True)
+            tm.insert_instruction(probe_call, before_instruction="MPICALL", before_first_of_list=True)
+            tm.insert_instruction(Instruction("}", rank=0), before_instruction="MPICALL", before_first_of_list=True)
+            yield tm
+            tm = get_send_recv_template("mpi_send", ["mpi_improbe", "mpi_mrecv"])
+            arg = "flag"
+            wait_func = "mpi_improbe"
+            tm.set_description("InvalidParam-" + arg + "-" + wait_func, ("Invalid %s: %s" % (arg, flag)))
+            if flag == "not_allocated":
+                tm.insert_instruction(Instruction("int* not_allocated;"), before_instruction="ALLOC",
+                                      before_first_of_list=True)  # not allocated
+            for call in tm.get_instruction(identifier="MPICALL", return_list=True):
+                if call.get_rank_executing() == 0:
+                    call.set_arg("flag", flag)
+                    call.set_has_error()
+            yield tm
diff --git a/scripts/errors/pt2pt/InvalidRank.py b/scripts/errors/pt2pt/InvalidRank.py
new file mode 100644
index 0000000000000000000000000000000000000000..f77a6872ce4079ace62ea26f4fc5a06c14043779
--- /dev/null
+++ b/scripts/errors/pt2pt/InvalidRank.py
@@ -0,0 +1,96 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+
+from scripts.Infrastructure.MPICallFactory import CorrectMPICallFactory
+from scripts.Infrastructure.TemplateFactory import get_send_recv_template, get_invalid_param_p2p_case, insert_probe
+
+
+class InvalidRankErrorP2P(ErrorGenerator):
+    invalid_ranks = ["-1", "nprocs", "MPI_PROC_NULL"]
+    send_funcs = ["mpi_send",
+                  "mpi_isend", "mpi_ssend", "mpi_issend", "mpi_rsend", "mpi_irsend", "mpi_bsend", "mpi_ibsend",
+                  "mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init"
+                  ]
+    recv_funcs = ["mpi_recv", "mpi_irecv", "mpi_recv_init", "mpi_precv_init"]
+    probe_recv_funcs = ["mpi_mprobe", "mpi_improbe"]
+
+    sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate_mprobe(self, recv_func, rank_to_use):
+        send_func = "mpi_isend"
+
+        tm = get_send_recv_template(send_func, [recv_func, "mpi_mrecv"])
+
+        tm.set_description("InvalidParam-Rank-" + recv_func, "Invalid Rank: %s" % rank_to_use)
+
+        for call in tm.get_instruction(identifier="MPICALL", return_list=True):
+            if call.get_rank_executing() == 0:
+                call.set_arg("source", rank_to_use)
+                call.set_has_error()
+
+        return tm
+
+    def generate_probe(self, probe_to_use, rank_to_use):
+        tm = get_send_recv_template("mpi_send", "mpi_recv")
+
+        tm.set_description("InvalidParam-Rank-" + probe_to_use, "Invalid Rank: %s" % rank_to_use)
+
+        recv_call = tm.get_instruction(identifier="MPICALL", rank_excuting=0)
+        probe_call = insert_probe(tm, probe_to_use, recv_call)
+        probe_call.set_arg("source", rank_to_use)
+        probe_call.set_has_error()
+
+        return tm
+
+    def generate(self, generate_level, real_world_score_table):
+        for func in self.send_funcs + self.recv_funcs:
+            if func in self.recv_funcs:
+                check_recv = True
+                send_func = "mpi_send"
+                recv_func = func
+            else:
+                check_recv = False
+                send_func = func
+                recv_func = "mpi_irecv"
+            for rank_to_use in self.invalid_ranks:
+
+                if check_recv:
+                    tm = get_invalid_param_p2p_case("source", rank_to_use, check_recv, send_func, recv_func)
+                else:
+                    tm = get_invalid_param_p2p_case("dest", rank_to_use, check_recv, send_func, recv_func)
+
+                tm.set_description("InvalidParam-Rank-" + func, "Invalid Rank: %s" % rank_to_use)
+
+                yield tm
+
+            if generate_level <= BASIC_TEST_LEVEL:
+                return
+
+        for rank_to_use in self.invalid_ranks:
+            check_recv = False
+            func = "mpi_sendrecv"
+            send_func = func
+            recv_func = func
+            tm = get_invalid_param_p2p_case("dest", rank_to_use, check_recv, send_func, recv_func)
+            tm.set_description("InvalidParam-Rank-" + func, "Invalid Rank: %s" % rank_to_use)
+            yield tm
+
+            check_recv = True
+            tm = get_invalid_param_p2p_case("source", rank_to_use, check_recv, send_func, recv_func)
+            tm.set_description("InvalidParam-Rank-" + func, "Invalid Rank: %s" % rank_to_use)
+            yield tm
+
+        for rank_to_use in self.invalid_ranks:
+            yield self.generate_probe("mpi_probe", rank_to_use)
+            yield self.generate_probe("mpi_iprobe", rank_to_use)
+            yield self.generate_mprobe("mpi_mprobe", rank_to_use)
+            yield self.generate_mprobe("mpi_improbe", rank_to_use)
diff --git a/scripts/errors/pt2pt/InvalidRequest.py b/scripts/errors/pt2pt/InvalidRequest.py
new file mode 100644
index 0000000000000000000000000000000000000000..975c06bf6a473ba186159b06dc9aad3c9b3f6f00
--- /dev/null
+++ b/scripts/errors/pt2pt/InvalidRequest.py
@@ -0,0 +1,217 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.MPICall import MPICall
+from scripts.Infrastructure.TemplateFactory import get_send_recv_template, replace_wait
+from scripts.Infrastructure.MPICallFactory import MPICallFactory
+
+
+class InvalidRequestErrorP2P(ErrorGenerator):
+    invalid_requests = ["MPI_REQUEST_NULL",  # probably triggers compiler warning
+                        # "&MPI_REQUEST_NULL" #TODO test this as well?? depending on MPI implementation: may be compiler error
+                        "NULL"]
+    functions_to_check = ["mpi_irecv", "mpi_isend",
+                          "mpi_issend", "mpi_irsend", "mpi_ibsend",
+                          "mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init",
+                          "mpi_precv_init", "mpi_recv_init", "mpi_imrecv"
+                          ]
+
+    recv_funcs = ["mpi_irecv", "mpi_recv_init", "mpi_precv_init", "mpi_imrecv"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate(self, generate_level, real_world_score_table):
+
+        for send_func in self.functions_to_check:
+            for req_to_use in self.invalid_requests:
+
+                check_receive = False
+                recv_func = "mpi_irecv"
+                if send_func in self.recv_funcs:
+                    check_receive = True
+                    recv_func = send_func
+                    send_func = "mpi_isend"
+
+                recv_func_to_use = recv_func
+                if recv_func == "mpi_imrecv":
+                    recv_func_to_use = ["mpi_mprobe", "mpi_imrecv"]
+
+                tm = get_send_recv_template(send_func, recv_func_to_use)
+
+                if check_receive:
+                    tm.set_description("InvalidParam-Request-" + recv_func, "Invalid Request: %s" % req_to_use)
+                else:
+                    tm.set_description("InvalidParam-Request-" + send_func, "Invalid Request: %s" % req_to_use)
+
+                kind = 1
+                if check_receive:
+                    kind = 0
+
+                for inst in tm.get_instruction(identifier="MPICALL", return_list=True) + tm.get_instruction(
+                        identifier="MATCHEDRECEIVE", return_list=True):
+                    if inst.get_rank_executing() == kind:
+                        if isinstance(inst, MPICall) and inst.has_arg("request"):
+                            inst.set_arg("request", req_to_use)
+                            inst.set_has_error()
+
+                yield tm
+
+            if generate_level <= BASIC_TEST_LEVEL:
+                return
+
+
+class InvalidRequestArrayP2P(ErrorGenerator):
+    test_funcs = ["mpi_testall", "mpi_waitany", "mpi_testany", "mpi_waitsome", "mpi_testsome"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate(self, generate_level, real_world_score_table):
+        for wait_func in self.test_funcs:
+            tm = get_send_recv_template("mpi_send", "mpi_irecv")
+            tm.set_description("InvalidParam-Request-" + wait_func,
+                               "Invalid Request Array: the same request is not allowed to be in the array multiple times")
+            replace_wait(tm.get_instruction(identifier="WAIT"), tm, wait_func)
+            wait_call = tm.get_instruction(identifier="WAIT")
+            wait_call.set_has_error()
+
+            prev_req = wait_call.get_arg("array_of_requests")[1:]  # remove leading &
+            wait_call.set_arg("array_of_requests", "req_array")
+
+            tm.insert_instruction(Instruction("MPI_Request req_array[2];", rank=wait_call.get_rank_executing()),
+                                  before_instruction=wait_call)
+            tm.insert_instruction(Instruction("req_array[0] = " + prev_req + ";", rank=wait_call.get_rank_executing()),
+                                  before_instruction=wait_call)
+            tm.insert_instruction(Instruction("req_array[1] = " + prev_req + ";", rank=wait_call.get_rank_executing()),
+                                  before_instruction=wait_call)
+
+            yield tm
+            if generate_level <= BASIC_TEST_LEVEL:
+                return
+
+            wait_call.set_arg("array_of_requests", "NULL")
+            tm.set_description("InvalidParam-Request-" + wait_func,
+                               "Invalid Request Array: NULL")
+            yield tm
+
+        # NULL error also for wait and test
+        tm = get_send_recv_template("mpi_send", "mpi_irecv")
+        tm.set_description("InvalidParam-Request-mpi_wait",
+                           "Invalid Request: NULL")
+        wait_call = tm.get_instruction(identifier="WAIT")
+        wait_call.set_has_error()
+        wait_call.set_arg("request", "NULL")
+        yield tm
+
+        replace_wait(wait_call, tm, "mpi_test")
+        wait_call = tm.get_instruction(identifier="WAIT")
+        wait_call.set_has_error()
+        wait_call.set_arg("request", "NULL")
+        tm.set_description("InvalidParam-Request-mpi_test",
+                           "Invalid Request: NULL")
+        yield tm
+
+
+# TODO startall
+class InvalidPersistentRequestUsage(ErrorGenerator):
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate(self, generate_level, real_world_score_table):
+        tm = get_send_recv_template("mpi_send_init", "mpi_recv_init")
+        start_call = tm.get_instruction(identifier="START", rank_excuting=0)
+
+        tm.set_description("InvalidParam-Request-mpi_start", "Invalid Param: NULL")
+
+        start_call.set_arg("request", "NULL")
+        start_call.set_has_error()
+        yield tm
+
+        tm = get_send_recv_template("mpi_send_init", "mpi_recv_init")
+        start_call = tm.get_instruction(identifier="START", rank_excuting=0)
+
+        tm.set_description("InvalidParam-Request-mpi_start", "Starting an active request again")
+
+        second_start = MPICallFactory.mpi_start(start_call.get_arg("request"))
+        second_start.set_rank_executing(0)
+        second_start.set_has_error()
+
+        tm.insert_instruction(second_start, after_instruction=start_call)
+
+        yield tm
+
+        tm = get_send_recv_template("mpi_send_init", "mpi_recv_init")
+
+        tm.set_description("InvalidParam-Request-mpi_request_free", "freeing an active request")
+
+        # move wait calls to the end
+        wait_call = tm.get_instruction(identifier="WAIT", rank_excuting=0)
+        tm.remove_instruction(instruction=wait_call)
+        tm.register_instruction(wait_call)
+        wait_call = tm.get_instruction(identifier="WAIT", rank_excuting=1)
+        tm.remove_instruction(instruction=wait_call)
+        tm.register_instruction(wait_call)
+
+        for call in tm.get_instruction("FREE", return_list=True):
+            if isinstance(call, MPICall):
+                if call.get_function() == "mpi_request_free":
+                    call.set_has_error()
+
+        # yield tm
+        # according to the standard this is permitted (although strongly discouraged) #see section 3.7.3
+
+        tm = get_send_recv_template("mpi_send_init", "mpi_recv_init")
+        start_call = tm.get_instruction(identifier="START", rank_excuting=0)
+
+        tm.set_description("InvalidParam-Request-mpi_startall", "Starting an request twice")
+
+        prev_req = start_call.get_arg("request")
+
+        tm.insert_instruction(Instruction("MPI_Request req_array[2];", rank=start_call.get_rank_executing()),
+                              before_instruction=start_call)
+        tm.insert_instruction(Instruction("req_array[0] = " + prev_req + ";", rank=start_call.get_rank_executing()),
+                              before_instruction=start_call)
+        tm.insert_instruction(Instruction("req_array[1] = " + prev_req + ";", rank=start_call.get_rank_executing()),
+                              before_instruction=start_call)
+
+        startall_call = MPICallFactory.mpi_startall(2, "req_array")
+        startall_call.set_has_error()
+        startall_call.set_rank_executing(start_call.get_rank_executing())
+        tm.insert_instruction(startall_call, before_instruction=start_call)
+        tm.remove_instruction(start_call)
+
+        yield tm
+
+        tm = get_send_recv_template("mpi_send_init", "mpi_recv_init")
+        start_call = tm.get_instruction(identifier="START", rank_excuting=0)
+
+        tm.set_description("InvalidParam-Request-mpi_startall", "array contains NULL")
+
+        prev_req = start_call.get_arg("request")
+
+        tm.insert_instruction(Instruction("MPI_Request req_array[2];", rank=start_call.get_rank_executing()),
+                              before_instruction=start_call)
+        tm.insert_instruction(Instruction("req_array[0] = " + prev_req + ";", rank=start_call.get_rank_executing()),
+                              before_instruction=start_call)
+        tm.insert_instruction(Instruction("req_array[1] = " + "NULL" + ";", rank=start_call.get_rank_executing()),
+                              before_instruction=start_call)
+
+        startall_call = MPICallFactory.mpi_startall(2, "req_array")
+        startall_call.set_has_error()
+        startall_call.set_rank_executing(start_call.get_rank_executing())
+        tm.insert_instruction(startall_call, before_instruction=start_call)
+        tm.remove_instruction(start_call)
+
+        yield tm
diff --git a/scripts/errors/pt2pt/InvalidStatus.py b/scripts/errors/pt2pt/InvalidStatus.py
new file mode 100644
index 0000000000000000000000000000000000000000..6587da60dfa276ecb0003d3690135d091d429040
--- /dev/null
+++ b/scripts/errors/pt2pt/InvalidStatus.py
@@ -0,0 +1,121 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+
+from scripts.Infrastructure.MPICallFactory import CorrectMPICallFactory, MPICallFactory
+from scripts.Infrastructure.TemplateFactory import get_send_recv_template, get_invalid_param_p2p_case, replace_wait, \
+    insert_probe
+
+
+class InvalidStatusErrorP2P(ErrorGenerator):
+    # statusES ignore is only allowed in test/waitALL functions
+    invalid_status = ["NULL", "MPI_STATUSES_IGNORE"]
+    recv_funcs = ["mpi_recv"]
+    probe_recv_funcs = ["mpi_mprobe", "mpi_improbe"]
+    test_funcs = ["mpi_wait", "mpi_test", "mpi_waitall", "mpi_testall", "mpi_waitany", "mpi_testany", "mpi_waitsome",
+                  "mpi_testsome"]
+    # , "mpi_testall", "mpi_testsome", "mpi_testany"]
+    sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate_mprobe(self, probe_func, recv_func, test_probe, status):
+        send_func = "mpi_send"
+
+        func_to_test = recv_func
+        if test_probe:
+            func_to_test = probe_func
+        tm = get_send_recv_template(send_func, [probe_func, recv_func])
+        tm.set_description("InvalidParam-Status-" + func_to_test, "Invalid Status: %s" % status)
+        if status == "MPI_STATUSES_IGNORE":
+            tm.set_description("InvalidParam-Status-" + func_to_test,
+                               "Invalid Status: %s\n wrong usage of status_ignore vs statusES_ignore" % status)
+
+        if test_probe:
+            for call in tm.get_instruction(identifier="MPICALL", return_list=True):
+                if call.get_rank_executing() == 0:
+                    call.set_arg("status", status)
+                    call.set_has_error()
+        else:
+            for call in tm.get_instruction(identifier="MATCHEDRECEIVE", return_list=True):
+                if call.get_rank_executing() == 0:
+                    call.set_arg("status", status)
+                    call.set_has_error()
+
+        return tm
+
+    def generate_probe(self, probe_to_use, status):
+        tm = get_send_recv_template("mpi_send", "mpi_recv")
+        tm.set_description("InvalidParam-Status-" + probe_to_use, "Invalid Status: %s" % status)
+        if status == "MPI_STATUSES_IGNORE":
+            tm.set_description("InvalidParam-Status-" + probe_to_use,
+                               "Invalid Status: %s\n wrong usage of status_ignore vs statusES_ignore" % status)
+
+        recv_call = tm.get_instruction(identifier="MPICALL", rank_excuting=0)
+        probe_call = insert_probe(tm, probe_to_use, recv_call)
+        probe_call.set_arg("status", status)
+        probe_call.set_has_error()
+
+        return tm
+
+    def generate(self, generate_level, real_world_score_table):
+
+        check_recv = True
+        send_func = "mpi_send"
+        recv_func = "mpi_recv"
+        for status in self.invalid_status:
+            tm = get_invalid_param_p2p_case("status", status, check_recv, send_func, recv_func)
+            tm.set_description("InvalidParam-Status-" + "mpi_recv", "Invalid Status: %s" % status)
+            if status == "MPI_STATUSES_IGNORE":
+                tm.set_description("InvalidParam-Status-" + "mpi_recv",
+                                   "Invalid Status: %s\n wrong usage of status_ignore vs statusES_ignore" % status)
+            yield tm
+
+        if generate_level <= BASIC_TEST_LEVEL:
+            return
+
+        for wait_func in self.test_funcs:
+            for status in self.invalid_status:
+                tm = get_send_recv_template("mpi_send", "mpi_irecv")
+                tm.set_description("InvalidParam-Status-" + wait_func, "Invalid Status: %s" % status)
+                if status == "MPI_STATUSES_IGNORE":
+                    tm.set_description("InvalidParam-Status-" + wait_func,
+                                       "Invalid Status: %s\n wrong usage of status_ignore vs statusES_ignore" % status)
+                # replace with wrong wait call
+                replace_wait(tm.get_instruction(identifier="WAIT"), tm, wait_func)
+                wait_call = tm.get_instruction(identifier="WAIT")
+                if wait_call.has_arg("status"):
+                    wait_call.set_arg("status", status)
+                else:
+                    s_to_use = status
+                    if status == "MPI_STATUSES_IGNORE":
+                        s_to_use = "MPI_STATUS_IGNORE"
+                    wait_call.set_arg("array_of_statuses", s_to_use)
+                wait_call.set_has_error()
+
+                yield tm
+
+        for status in self.invalid_status:
+            yield self.generate_probe("mpi_probe", status)
+            yield self.generate_probe("mpi_iprobe", status)
+            yield self.generate_mprobe("mpi_mprobe", "mpi_mrecv", True, status)
+            yield self.generate_mprobe("mpi_mprobe", "mpi_mrecv", False, status)
+            yield self.generate_mprobe("mpi_improbe", "mpi_mrecv", True, status)
+
+            # sendrecv cases
+            for func in self.sendrecv_funcs:
+                tm = get_send_recv_template(func, func)
+                tm.set_description("InvalidParam-Status-" + func, "Invalid Status: %s" % status)
+                if status == "MPI_STATUSES_IGNORE":
+                    tm.set_description("InvalidParam-Status-" + func,
+                                       "Invalid Status: %s\n wrong usage of status_ignore vs statusES_ignore" % status)
+                for call in tm.get_instruction(identifier="MPICALL", return_list=True):
+                    call.set_has_error()
+                    call.set_arg("status", status)
+                yield tm
diff --git a/scripts/errors/pt2pt/InvalidTag.py b/scripts/errors/pt2pt/InvalidTag.py
new file mode 100644
index 0000000000000000000000000000000000000000..5c50bfb5c551a403ec0c2151f01551f00c6b1556
--- /dev/null
+++ b/scripts/errors/pt2pt/InvalidTag.py
@@ -0,0 +1,114 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.MPICall import MPICall
+from scripts.Infrastructure.MPICallFactory import CorrectMPICallFactory
+from scripts.Infrastructure.TemplateFactory import get_send_recv_template, get_invalid_param_p2p_case, insert_probe
+
+
+class InvalidTagErrorP2P(ErrorGenerator):
+    invalid_tags = ["-1", "MPI_TAG_UB+1", CorrectParameterFactory.tag * 2, "MPI_ANY_TAG"]
+    send_funcs = ["mpi_send",
+                  "mpi_isend", "mpi_ssend", "mpi_issend", "mpi_rsend", "mpi_irsend", "mpi_bsend", "mpi_ibsend",
+                  "mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init",
+                  ]
+
+    recv_funcs = ["mpi_recv", "mpi_irecv", "mpi_recv_init", "mpi_precv_init"]
+    sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate_impl(self, send_func, recv_func, check_receive):
+        for tag_to_use in self.invalid_tags:
+            tm = get_invalid_param_p2p_case("tag", tag_to_use, check_receive, send_func, recv_func)
+
+            error_string = "InvalidParam"
+            if tag_to_use == CorrectParameterFactory.tag * 2:
+                error_string = "ParamMatching"
+
+            if check_receive:
+                if tag_to_use == "MPI_ANY_TAG":
+                    # correct case
+                    continue
+                if tag_to_use == CorrectParameterFactory().tag * 2 and recv_func == "mpi_irecv":
+                    # combination repeated
+                    continue
+                tm.set_description(error_string + "-Tag-" + recv_func, "Invalid Tag: %s" % tag_to_use)
+            else:
+                tm.set_description(error_string + "-Tag-" + send_func, "Invalid Tag: %s" % tag_to_use)
+
+            if tag_to_use == CorrectParameterFactory.tag * 2:
+                # missmatch is between both, mark other as well
+                for c in tm.get_instruction("MPICALL", return_list=True):
+                    if isinstance(c, MPICall):
+                        c.set_has_error()
+
+            yield tm
+
+    def generate_mprobe(self, probe_to_use):
+        for tag_to_use in self.invalid_tags:
+            if tag_to_use == "MPI_ANY_TAG":
+                # correct case
+                continue
+
+            error_string = "InvalidParam"
+            if tag_to_use == CorrectParameterFactory.tag * 2:
+                error_string = "ParamMatching"
+
+            tm = get_send_recv_template("mpi_send", [probe_to_use, "mpi_mrecv"])
+
+            tm.set_description(error_string + "-Tag-" + probe_to_use, "Invalid Tag: %s" % tag_to_use)
+
+            for call in tm.get_instruction("MPICALL", return_list=True):
+                if call.get_rank_executing() == 0:
+                    call.set_arg("tag", tag_to_use)
+                    call.set_has_error()
+
+            yield tm
+
+    def generate_probe(self, probe_to_use):
+        for tag_to_use in self.invalid_tags:
+            if tag_to_use == "MPI_ANY_TAG":
+                # correct case
+                continue
+
+            error_string = "InvalidParam"
+            if tag_to_use == CorrectParameterFactory.tag * 2:
+                error_string = "ParamMatching"
+
+            tm = get_send_recv_template("mpi_send", "mpi_recv")
+
+            tm.set_description(error_string + "-Tag-" + probe_to_use, "Invalid Tag: %s" % tag_to_use)
+
+            recv_call = tm.get_instruction(identifier="MPICALL", rank_excuting=0)
+            probe_call = insert_probe(tm, probe_to_use, recv_call)
+            probe_call.set_arg("tag", tag_to_use)
+            probe_call.set_has_error()
+
+            yield tm
+
+    def generate(self, generate_level, real_world_score_table):
+
+        for send_func in self.send_funcs:
+            yield from self.generate_impl(send_func, "mpi_irecv", False)
+            if generate_level <= BASIC_TEST_LEVEL:
+                return
+        for recv_func in self.recv_funcs:
+            yield from self.generate_impl("mpi_send", recv_func, True)
+
+        for func in self.sendrecv_funcs:
+            # yield from self.generate_impl(func, func, True)
+            # yield from self.generate_impl(func, func, False)
+            pass
+
+        yield from self.generate_mprobe("mpi_mprobe")
+        yield from self.generate_mprobe("mpi_improbe")
+        yield from self.generate_probe("mpi_probe")
+        yield from self.generate_probe("mpi_iprobe")
diff --git a/scripts/errors/pt2pt/LocalConcurrency.py b/scripts/errors/pt2pt/LocalConcurrency.py
new file mode 100644
index 0000000000000000000000000000000000000000..7ef200d9bc1b9285be7cbc6e558c52ff385bef1e
--- /dev/null
+++ b/scripts/errors/pt2pt/LocalConcurrency.py
@@ -0,0 +1,63 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.TemplateFactory import get_send_recv_template
+
+sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
+
+
+class LocalConcurrencyErrorP2P(ErrorGenerator):
+    functions_to_check = ["mpi_irecv",
+                          "mpi_isend", "mpi_issend", "mpi_irsend", "mpi_ibsend",
+                          "mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init",
+                          "mpi_precv_init", "mpi_recv_init", "mpi_imrecv"
+                          ]
+    recv_funcs = ["mpi_irecv", "mpi_recv_init", "mpi_precv_init", "mpi_imrecv"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate(self, generate_level, real_world_score_table):
+        for send_func in self.functions_to_check:
+
+            check_receive = False
+            recv_func = "mpi_irecv"
+            if send_func in self.recv_funcs:
+                check_receive = True
+                recv_func = send_func
+                send_func = "mpi_isend"
+
+            recv_func_to_use = recv_func
+            if recv_func == "mpi_imrecv":
+                recv_func_to_use = ["mpi_mprobe", "mpi_imrecv"]
+            tm = get_send_recv_template(send_func, recv_func_to_use)
+
+            if check_receive:
+                tm.set_description("LocalConcurrency-receive-" + recv_func,
+                                   "usage of receive buffer before operation is completed")
+            else:
+                tm.set_description("LocalConcurrency-send-" + send_func,
+                                   "usage of send buffer before operation is completed")
+
+            conflicting_inst = Instruction("buf[2]=1;")
+            conflicting_inst.set_has_error()
+            kind = 1
+            if check_receive:
+                kind = 0
+            conflicting_inst.set_rank_executing(kind)
+            for c in tm.get_instruction("MPICALL", return_list=True):
+                if c.get_rank_executing() == kind:
+                    c.set_has_error()
+
+            for c in tm.get_instruction("WAIT", return_list=True):
+                if c.get_rank_executing() == kind:
+                    tm.insert_instruction(conflicting_inst,before_instruction=c)
+
+            yield tm
+            if generate_level <= BASIC_TEST_LEVEL:
+                return
diff --git a/scripts/errors/pt2pt/MessageRace.py b/scripts/errors/pt2pt/MessageRace.py
new file mode 100644
index 0000000000000000000000000000000000000000..22862efe3708120ae79dcc43bd6b5d564139c1b2
--- /dev/null
+++ b/scripts/errors/pt2pt/MessageRace.py
@@ -0,0 +1,95 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.MPICallFactory import CorrectMPICallFactory
+from scripts.Infrastructure.Template import TemplateManager
+
+
+class MessageRaceErrorAnyTag(ErrorGenerator):
+    # TODO do we need to generate it for all combinations of send and recv?
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate(self, generate_level, real_world_score_table):
+        tm = TemplateManager()
+        tm.set_description("MsgRace-ANY_TAG", "order of messages is indeterministic, may lead to a deadlock")
+
+        tm.register_instruction(CorrectParameterFactory().get_buffer_alloc())
+
+
+        # send part
+        tm.register_instruction("for(int i =0; i < 10; ++i) {", rank_to_execute=1)
+        tm.register_instruction("buf[0]=i;", rank_to_execute=1)
+        send_call = CorrectMPICallFactory().mpi_send()
+        send_call.set_arg("tag", "i")
+        tm.register_instruction(send_call, rank_to_execute=1)
+        tm.register_instruction("}", rank_to_execute=1)
+
+        # the final msg after the loop
+        send_call = CorrectMPICallFactory().mpi_send()
+
+        tm.register_instruction(send_call, rank_to_execute=1)
+        # recv part
+        tm.register_instruction("for(int i =0; i < 10; ++i) {", rank_to_execute=0)
+        recv_call = CorrectMPICallFactory().mpi_recv()
+        recv_call.set_arg("tag", "MPI_ANY_TAG")
+        recv_call.set_rank_executing(0)
+        tm.register_instruction(recv_call)
+
+        tm.register_instruction("if(buf[0]!=i){", rank_to_execute=0)
+        additional_recv = CorrectMPICallFactory().mpi_recv()
+        additional_recv.set_has_error()  # additional recv may lead to deadlock
+        tm.register_instruction(additional_recv,rank_to_execute=0)
+        tm.register_instruction(" }", rank_to_execute=0)  # end if
+        tm.register_instruction("}", rank_to_execute=0)  # end for
+
+        tm.register_instruction(CorrectParameterFactory().get_buffer_free())
+
+        yield tm
+
+
+class MessageRaceErrorAnysource(ErrorGenerator):
+    # TODO do we need to generate it for all combinations of send and recv?
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate(self, generate_level, real_world_score_table):
+        tm = TemplateManager(min_ranks=3)
+        tm.set_description("MsgRace-ANY_SOURCE", "order of messages is indeterministic, may lead to a deadlock")
+
+        tm.register_instruction(CorrectParameterFactory().get_buffer_alloc())
+
+        # send part
+        tm.register_instruction("buf[0]=rank;", rank_to_execute='not0')
+        send_call = CorrectMPICallFactory().mpi_send()
+        tm.register_instruction(send_call, rank_to_execute='not0')
+        # rank 1 sends an additional msg
+        send_call = CorrectMPICallFactory().mpi_send()
+        tm.register_instruction(send_call, rank_to_execute=1)
+
+        # recv part
+        tm.register_instruction("for(int i =1; i < nprocs; ++i) {", rank_to_execute=0)
+        recv_call = CorrectMPICallFactory().mpi_recv()
+        recv_call.set_arg("source", "MPI_ANY_SOURCE")
+        tm.register_instruction(recv_call, rank_to_execute=0)
+        tm.register_instruction("if(buf[0]!=i){", rank_to_execute=0)
+        additional_recv = CorrectMPICallFactory().mpi_recv()
+        additional_recv.set_has_error()  # additional recv leads to deadlock
+        tm.register_instruction(additional_recv, rank_to_execute=0)
+        tm.register_instruction(" }", rank_to_execute=0)  # end if
+        tm.register_instruction("}", rank_to_execute=0)  # end for
+
+        tm.register_instruction(CorrectParameterFactory().get_buffer_free())
+
+
+        yield tm
diff --git a/scripts/errors/pt2pt/RecvBeforeSend.py b/scripts/errors/pt2pt/RecvBeforeSend.py
new file mode 100644
index 0000000000000000000000000000000000000000..f105894090b1e21ddcd866170fcb01513d540059
--- /dev/null
+++ b/scripts/errors/pt2pt/RecvBeforeSend.py
@@ -0,0 +1,123 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+import itertools
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Instruction import Instruction
+from scripts.Infrastructure.MPICallFactory import CorrectMPICallFactory
+from scripts.Infrastructure.TemplateFactory import get_send_recv_template, insert_probe
+
+
+class RecvBeforeSend(ErrorGenerator):
+    recv_funcs = ["mpi_recv", "mpi_irecv", "mpi_recv_init", "mpi_precv_init"]
+    probe_recv_funcs = ["mpi_mprobe", "mpi_improbe"]
+    probe_funcs = ["mpi_probe", "mpi_iprobe"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def insert_reverse_msg(self, tm):
+        send = CorrectMPICallFactory.mpi_send()
+        send.set_arg("dest", "1")
+        send.set_identifier("REVERSE_MSG_SEND")
+        send.set_rank_executing(0)
+        tm.insert_instruction(send, before_instruction="FREE", before_first_of_list=True)
+        recv = CorrectMPICallFactory.mpi_recv()
+        recv.set_arg("source", "0")
+        recv.set_rank_executing(1)
+        recv.set_identifier("REVERSE_MSG_RECEIVE")
+        tm.insert_instruction(recv, before_instruction="FREE", before_first_of_list=True)
+
+    def generate_impl(self, send_func, recv_func):
+
+        tm = get_send_recv_template(send_func, recv_func)
+
+        tm.set_description("CallOrdering-" + send_func + "-" + recv_func,
+                           "Call Ordering: both ranks try to receive before sending")
+
+        self.insert_reverse_msg(tm)
+        # this is a correct case with no deadlock
+
+        # introduce deadlock by moving rank 1s send after the recv
+        rank_1_send_instr = (
+                [c for c in tm.get_instruction("MPI_CALL", return_list=True) if c.get_rank_executing() == 1]
+                + [c for c in tm.get_instruction("START", return_list=True) if
+                   c.get_rank_executing() == 1])
+
+        wait_calls = [c for c in tm.get_instruction("WAIT", return_list=True) if c.get_rank_executing() == 1]
+
+        for c in rank_1_send_instr + wait_calls:
+            tm.remove_instruction(instruction=c)
+        tm.insert_instruction(rank_1_send_instr, before_instruction="FREE", before_first_of_list=True)
+        tm.insert_instruction(wait_calls, before_instruction="FREE", before_first_of_list=True)
+
+        yield tm
+        assert send_func == "mpi_send"
+        tm.set_description("MissingCall-" + send_func, "Deadlock, as call to send is missing")
+
+        for c in tm.get_instruction("REVERSE_MSG_SEND", return_list=True)+tm.get_instruction("MPICALL", return_list=True):
+            if c.get_function() == send_func:
+                tm.remove_instruction(instruction=c)
+        yield tm
+
+    def generate_probe(self, probe_to_use):
+        tm = get_send_recv_template("mpi_send", "mpi_recv")
+
+        tm.set_description("CallOrdering-" + probe_to_use + "-" + "mpi_send",
+                           "Call Ordering: probe for message before it is going to be send")
+
+        self.insert_reverse_msg(tm)
+
+        # add the probe to rank 1 before the send
+        recv_call = tm.get_instruction(identifier="MPICALL", rank_excuting=0)
+        probe_call = insert_probe(tm, probe_to_use, recv_call)
+        probe_call.set_arg("source", "0")  # recv from 1 is the default
+        probe_call.set_has_error()
+
+        yield tm
+
+    def generate_mprobe(self, probe_to_use):
+        tm = get_send_recv_template("mpi_send", "mpi_recv")
+        self.insert_reverse_msg(tm)
+
+        tm.add_stack_variable("MPI_Message")
+
+        tm.set_description("CallOrdering-" + probe_to_use + "-" + "mpi_send",
+                           "Call Ordering: probe for message before it is going to be send")
+
+        # use mrecv instead of recv
+        mrecv = CorrectMPICallFactory.mpi_mrecv()
+        mrecv.set_rank_executing(1)
+        tm.replace_instruction(mrecv, identifier="REVERSE_MSG_RECEIVE")
+
+        # add the probe to rank 1 before the send
+        probecall = CorrectMPICallFactory.get(probe_to_use)
+        probecall.set_arg("source", "0")  # recv from 1 is the default
+        probecall.set_has_error()
+        probecall.set_rank_executing(1)
+        if probe_to_use == "mpi_improbe":
+            tm.add_stack_variable("int")  # the flag
+            tm.insert_instruction(Instruction("int flag=0;", rank=1), before_instruction="MPICALL",
+                                  before_first_of_list=True)
+            tm.insert_instruction(Instruction("while (!flag){", rank=1), before_instruction="MPICALL",
+                                  before_first_of_list=True)
+            tm.insert_instruction(probecall, before_instruction="MPICALL", before_first_of_list=True)
+            tm.insert_instruction(Instruction("}", rank=1), before_instruction="MPICALL",
+                                  before_first_of_list=True)  # end while
+        else:
+            tm.insert_instruction(probecall, before_instruction="MPICALL", before_first_of_list=True)
+        yield tm
+
+    def generate(self, generate_level, real_world_score_table):
+        for recv_func in self.recv_funcs:
+            yield from self.generate_impl("mpi_send", recv_func)
+            if generate_level <= BASIC_TEST_LEVEL:
+                return
+
+        yield from self.generate_probe("mpi_probe")
+        yield from self.generate_probe("mpi_iprobe")
+        yield from self.generate_mprobe("mpi_mprobe")
+        yield from self.generate_mprobe("mpi_improbe")
diff --git a/scripts/errors/pt2pt/Unmatched.py b/scripts/errors/pt2pt/Unmatched.py
new file mode 100644
index 0000000000000000000000000000000000000000..165da282adb058e6d4cae9a585e64314d1151cc7
--- /dev/null
+++ b/scripts/errors/pt2pt/Unmatched.py
@@ -0,0 +1,116 @@
+#! /usr/bin/python3
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
+from scripts.Infrastructure.Template import TemplateManager
+from scripts.Infrastructure.TemplateFactory import get_send_recv_template
+
+
+class UnmatchedP2Pcall(ErrorGenerator):
+    send_funcs = ["mpi_send",
+                  "mpi_isend", "mpi_ssend", "mpi_issend", "mpi_rsend", "mpi_irsend", "mpi_bsend", "mpi_ibsend",
+                  "mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init"
+                  ]
+    recv_funcs = ["mpi_recv", "mpi_irecv", "mpi_recv_init", "mpi_precv_init"]
+    probe_recv_funcs = ["mpi_mprobe", "mpi_improbe"]
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate_impl(self, func_to_check, check_receive):
+
+        send_func = "mpi_send"
+        recv_func = "mpi_irecv"
+        if check_receive:
+            recv_func = func_to_check
+            if func_to_check in self.probe_recv_funcs:
+                recv_func = [func_to_check, "mpi_mrecv"]
+        else:
+            send_func = func_to_check
+
+        tm = get_send_recv_template(send_func, recv_func)
+
+        tm.set_description("Correct-" + func_to_check,
+                           "correct message exchange")
+        yield tm
+
+        tm.set_description("CallOrdering-unmatched-" + func_to_check,
+                           "The %s does not have a counterpart" % ("receive" if check_receive else "send"))
+
+        calls = tm.get_instruction(identifier="MPICALL", return_list=True)
+        for call in calls:
+            call.set_has_error()
+
+        # remove send/recv COUNTER-part
+        kind = 0
+        if check_receive:
+            kind = 1
+            # comm calls and the corresponding wait calls
+        for call in calls + tm.get_instruction(identifier="WAIT", return_list=True):
+            if call.get_rank_executing() == kind:
+                tm.remove_instruction(instruction=call)
+        yield tm
+
+    def generate(self, generate_level, real_world_score_table):
+        for func in self.recv_funcs + self.probe_recv_funcs:
+            yield from self.generate_impl(func, True)
+            if generate_level <= BASIC_TEST_LEVEL:
+                return
+        for func in self.send_funcs:
+            yield from self.generate_impl(func, False)
+
+
+class ComplexMissmach(ErrorGenerator):
+
+    def __init__(self):
+        pass
+
+    def get_feature(self):
+        return ["P2P"]
+
+    def generate(self, generate_level, real_world_score_table):
+        code = """
+#define MSG_TAG_A 124523
+#define N 10
+#define EVEN 0
+
+  int buffer[N];
+  int i;
+
+  MPI_Request request;
+  MPI_Status status;
+
+  int countEvenNumbers = 0;
+
+  for (i = 0; i < 10; i++) {
+    if (rank == 0) {
+      int tag_sender = i * N;
+      MPI_Isend(buffer, 1, MPI_INT, 1, tag_sender, MPI_COMM_WORLD, &request);
+      MPI_Wait(&request, &status);
+    }
+
+    else if (rank == 1) {
+        int tag_receiver = i * N;
+        
+        if (i % 2 == EVEN) {
+        (countEvenNumbers)++;
+        }
+
+        if ((countEvenNumbers) == (N / 2)) {
+            tag_receiver++; // mismatch
+        }
+
+      printf(\"Count Even Numbers: %d \\n\", countEvenNumbers);
+      MPI_Irecv(buffer, 1, MPI_INT, 0, tag_receiver, MPI_COMM_WORLD, &request);
+      MPI_Wait(&request, &status);
+    }
+  }
+"""
+        tm = TemplateManager()
+        tm.register_instruction(code)
+
+        tm.set_description("Matching-tag-mpi_send", "Missmatching message tags in iteration 10")
+        yield tm
diff --git a/scripts/errors/rma/EpochLifeCycle.py b/scripts/errors/rma/EpochLifeCycle.py
index 789913e7bd912582a4bee01ba0175195e6d73cac..3c0a0d8d807a2cc72e57c698ff9f8fb9bcf5664d 100644
--- a/scripts/errors/rma/EpochLifeCycle.py
+++ b/scripts/errors/rma/EpochLifeCycle.py
@@ -1,4 +1,5 @@
 #! /usr/bin/python3
+from __future__ import annotations
 from scripts.Infrastructure.Variables import *
 
 from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
@@ -18,7 +19,7 @@ class EpochLifeCycleRMA(ErrorGenerator):
     def get_feature(self):
         return ["RMA"]
 
-    def generate(self, generate_level):
+    def generate(self, generate_level, real_world_score_table):
         for sync_mode in ["fence", "winlockall", "winlock"]:
             for rma_func in ["mpi_get", "mpi_put", "mpi_accumulate"]:
                 # epoch is not closed
@@ -46,7 +47,8 @@ class EpochLifeCycleRMA(ErrorGenerator):
                 tm.insert_instruction(new_instruction=startrmaepoch, after_instruction="STARTRMAEPOCH")
                 tm.set_description("EpochLifeCycle", "RMA epoch opened twice")
                 yield tm
-
+                if generate_level <= BASIC_TEST_LEVEL:
+                    break
 
         for rma_func in ["mpi_get", "mpi_put", "mpi_accumulate"]:
             # mix fence with lockall, this should not be done at all
@@ -69,5 +71,6 @@ class EpochLifeCycleRMA(ErrorGenerator):
             tm.insert_instruction(new_instruction=lock, after_instruction="STARTRMAEPOCH")
             tm.insert_instruction(new_instruction=unlock, before_instruction="ENDRMAEPOCH")
             tm.set_description("EpochLifeCycle", "Mixing fence with lock synchronization")
-
             yield tm
+            if generate_level <= BASIC_TEST_LEVEL:
+                break
diff --git a/scripts/errors/rma/GlobalConcurrency.py b/scripts/errors/rma/GlobalConcurrency.py
index dd4bd2756454e21b88cc23b871833b87b0b56301..642801bee79b2627ef5398a9263bb3cb8c89f1c5 100644
--- a/scripts/errors/rma/GlobalConcurrency.py
+++ b/scripts/errors/rma/GlobalConcurrency.py
@@ -1,4 +1,6 @@
 #! /usr/bin/python3
+from __future__ import annotations
+
 from scripts.Infrastructure.Variables import *
 
 from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
@@ -194,7 +196,7 @@ class GlobalConcurrencyErrorRMA(ErrorGenerator):
         else:
             return ([], self.buf_instructions[name], None)
 
-    def generate(self, generate_level):
+    def generate(self, generate_level, real_world_score_table):
 
         if generate_level == 1:
             # only basic calls
@@ -238,7 +240,7 @@ class GlobalConcurrencyErrorRMA(ErrorGenerator):
 
         sync_modes = [self.fence, self.lockall, self.lockflush, self.request, self.pscw]
 
-        if generate_level <= 2:
+        if generate_level <= SUFFICIENT_TEST_LEVEL:
             # go through all sync modes, but only one access combination per sync mode, fill with fence
             combos = itertools.zip_longest(
                 remote_access_combinations, sync_modes, fillvalue=self.fence)
diff --git a/scripts/errors/rma/InvalidBuffer.py b/scripts/errors/rma/InvalidBuffer.py
index 77ff11daedc5e09d40ecda426b4065f09d200c57..c97a25315d2ef9ce263bb5aa2472fd8614775f6b 100644
--- a/scripts/errors/rma/InvalidBuffer.py
+++ b/scripts/errors/rma/InvalidBuffer.py
@@ -1,4 +1,5 @@
 #! /usr/bin/python3
+from __future__ import annotations
 from scripts.Infrastructure.Variables import *
 
 from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
@@ -16,7 +17,7 @@ class InvalidBufferErrorRMA(ErrorGenerator):
     def get_feature(self):
         return ["RMA"]
 
-    def generate(self, generate_level):
+    def generate(self, generate_level, real_world_score_table):
         rma_funcs = ["mpi_get", "mpi_rget", "mpi_put", "mpi_rput", "mpi_accumulate", "mpi_raccumulate",
                  "mpi_get_accumulate", "mpi_rget_accumulate", "mpi_fetch_and_op", "mpi_compare_and_swap"]
 
diff --git a/scripts/errors/rma/InvalidDataType.py b/scripts/errors/rma/InvalidDataType.py
index 7d53f364bf2e93a14dd83aa2210c9f013a7dc07d..d8496e755044353b87246270dee3252c8a3d25bd 100644
--- a/scripts/errors/rma/InvalidDataType.py
+++ b/scripts/errors/rma/InvalidDataType.py
@@ -1,4 +1,5 @@
 #! /usr/bin/python3
+from __future__ import annotations
 from scripts.Infrastructure.Variables import *
 
 from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
@@ -15,7 +16,7 @@ class InvalidDatatypeErrorRMA(ErrorGenerator):
     def get_feature(self):
         return ["RMA"]
 
-    def generate(self, generate_level):
+    def generate(self, generate_level, real_world_score_table):
         rma_funcs = []
         if generate_level <= BASIC_TEST_LEVEL:
             rma_funcs = ["mpi_get", "mpi_put", "mpi_accumulate"]
diff --git a/scripts/errors/rma/InvalidRank.py b/scripts/errors/rma/InvalidRank.py
index 4ffe6e10df92100a84e7efb0c050c067ba51b4d0..6890552bea14be7173a9279c61d0d49dc5b7382a 100644
--- a/scripts/errors/rma/InvalidRank.py
+++ b/scripts/errors/rma/InvalidRank.py
@@ -1,4 +1,5 @@
 #! /usr/bin/python3
+from __future__ import annotations
 from scripts.Infrastructure.Variables import *
 
 from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
@@ -17,7 +18,7 @@ class InvalidRankErrorRMA(ErrorGenerator):
     def get_feature(self):
         return ["RMA"]
 
-    def generate(self, generate_level):
+    def generate(self, generate_level, real_world_score_table):
         rma_funcs = []
         if generate_level <= BASIC_TEST_LEVEL:
             rma_funcs = ["mpi_get", "mpi_put", "mpi_accumulate"]
diff --git a/scripts/errors/rma/InvalidWin.py b/scripts/errors/rma/InvalidWin.py
index d7a725f42c432a3d67be20491589fd21aa5d2c22..b520ca0cc16fa6b5038229a22566d27adbd9ca97 100644
--- a/scripts/errors/rma/InvalidWin.py
+++ b/scripts/errors/rma/InvalidWin.py
@@ -1,4 +1,5 @@
 #! /usr/bin/python3
+from __future__ import annotations
 from scripts.Infrastructure.Variables import *
 
 from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
@@ -17,7 +18,7 @@ class InvalidWinErrorRMA(ErrorGenerator):
     def get_feature(self):
         return ["RMA"]
 
-    def generate(self, generate_level):
+    def generate(self, generate_level, real_world_score_table):
         tm = get_rma_template()
         tm.remove_instruction("RMA_WIN_ALLOC") # remove window allocation
         # opening epoch on non-initialized window is the actual error
@@ -28,7 +29,7 @@ class InvalidWinErrorRMA(ErrorGenerator):
 
         # free window too early
         tm = get_rma_template()
-        win_free_early = Instruction(f"MPI_Win_free(&{CorrectParameterFactory().get("win")});")
+        win_free_early = Instruction(f"MPI_Win_free(&{CorrectParameterFactory().get('win')});")
         win_free_early.set_has_error()
         tm.insert_instruction(new_instruction=win_free_early, before_instruction="STARTRMAEPOCH")
 
diff --git a/scripts/errors/rma/LocalConcurrency.py b/scripts/errors/rma/LocalConcurrency.py
index a8e636b4f1637a9dfe2941fe4b8920c5268f47d6..de841d3b23e9c3004f029fd233d851a0cc56b8ab 100644
--- a/scripts/errors/rma/LocalConcurrency.py
+++ b/scripts/errors/rma/LocalConcurrency.py
@@ -1,4 +1,6 @@
 #! /usr/bin/python3
+from __future__ import annotations
+
 from scripts.Infrastructure.Variables import *
 
 from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
@@ -247,11 +249,11 @@ class LocalConcurrencyErrorRMA(ErrorGenerator):
         else:
             return ([], self.buf_instructions[name], None)
 
-    def generate(self, generate_level):
+    def generate(self, generate_level, real_world_score_table):
         # build set of calls based on generate level, for level 1 just a few basic calls,
         # for level >= 2 all calls
 
-        if generate_level == 1:
+        if generate_level <= BASIC_TEST_LEVEL:
             # only basic calls
             local_origin_addr_read = ["mpi_put", "mpi_accumulate"]
             local_origin_addr_write = ["mpi_get"]
@@ -288,7 +290,7 @@ class LocalConcurrencyErrorRMA(ErrorGenerator):
 
         sync_modes = [self.fence, self.lockallflush, self.lockallflushlocal, self.lockflush, self.lockflushlocal, self.lockunlock, self.request, self.pscw]
 
-        if generate_level <= 2:
+        if generate_level <= SUFFICIENT_TEST_LEVEL:
             # go through all sync modes, but only one access combination per sync mode, fill with fence
             combos = itertools.zip_longest(
                 local_access_combinations, sync_modes, fillvalue=self.fence)
diff --git a/scripts/main.py b/scripts/main.py
index 990682746ee1e8cbbe4fc36ffefa48b56dfc9711..2844c20c3f3c621ef11b677d989a0a83d9c24423 100644
--- a/scripts/main.py
+++ b/scripts/main.py
@@ -4,13 +4,19 @@ import os
 import argparse
 
 from scripts.Infrastructure.GeneratorManager import GeneratorManager
+from scripts.Infrastructure.Variables import *
+
+from scripts.Infrastructure.ScoingModule.ScoringTable import get_scoring_table
+import pandas as pd
 
 if __name__ == "__main__":
     parser = argparse.ArgumentParser("generate MBB Testcases")
-    parser.add_argument("--outpath", action="store", default="gencodes",required=False,
+    parser.add_argument("--outpath", action="store", default="gencodes", required=False,
                         help="Path to generate the codes into")
     parser.add_argument("--level", action="store", default=1, type=int, required=False,
-                        help="Level of tests to generate. 1: simple cases, 2: caseswith sufficient coverage, 3: all possible usage combinations")
+                        help="Level of tests to generate. 1: simple cases, 2: caseswith sufficient coverage, 3: usage combinations found in real wolrd dataset, 4: all possible usage combinations")
+    parser.add_argument("--real_world_data", default=None, required=False, action="store",
+                        help="file of real world dataset, only relevant if level is 3")
     parser.add_argument("--mpi_version", default="4.1", required=False, help="maximum MPI versions to use")
 
     debug = parser.add_argument_group("Debug", "Arguments used to debug and test the generation progess")
@@ -24,7 +30,12 @@ if __name__ == "__main__":
 
     gencodes_dir = ARGS.outpath
 
-    gm = GeneratorManager(ARGS.generator_dir)
+    score_table = None
+    if ARGS.level in REAL_WORLD_FILTERING_LEVELS:
+        print("Read real world dataset and build scoring table")
+        score_table = get_scoring_table(pd.read_csv(ARGS.real_world_data, header=0, low_memory=False), c_only=True)
+
+    gm = GeneratorManager(ARGS.generator_dir, score_table)
 
     # remove all testcases from previous execution (ease of debugging)
     if ARGS.remove_previous_generation_results: