Skip to content
Snippets Groups Projects
Commit f603dbf4 authored by Jammer, Tim's avatar Jammer, Tim
Browse files

Merge branch 'devel-TJ' into 'main'

Devel tj

See merge request !4
parents 82e0f8be b11b5e10
Branches
No related tags found
1 merge request!4Devel tj
......@@ -2,7 +2,7 @@
alloc_template = """
@{NAME}@ = (@{TYPE}@*) @{FUNCTION}@(@{NUM}@ @{SEP}@ sizeof(@{TYPE}@));
@{TYPE}@* @{NAME}@ = (@{TYPE}@*) @{FUNCTION}@(@{NUM}@ @{SEP}@ sizeof(@{TYPE}@));
"""
"""
......@@ -50,7 +50,7 @@ class AllocCall:
.replace("@{TYPE}@", self._type)
.replace("@{FUNCTION}@", func)
.replace("@{NUM}@", str(self._num_elements))
.replace("@{SEP}", delim))
.replace("@{SEP}@", delim))
def set_num_elements(self, num_elements):
self._num_elements = num_elements
......
#! /usr/bin/python3
from scripts.Infrastructure.MPICallFactory import MPICallFactory
from scripts.Infrastructure.Template import InstructionBlock
from scripts.Infrastructure.AllocCall import AllocCall
from scripts.Infrastructure.AllocCall import AllocCall, get_free
class CorrectParameterFactory:
......@@ -19,27 +19,35 @@ class CorrectParameterFactory:
b.register_operation(AllocCall(self.dtype[0], self.buf_size, self.buf_var_name, use_malloc=False), kind='all')
return b
def get_buffer_free(self):
b = InstructionBlock("free")
b.register_operation(get_free(AllocCall(self.dtype[0], self.buf_size, self.buf_var_name, use_malloc=False)),
kind='all')
return b
def get(self, param, func=None):
if param == "BUFFER" or param == "buf" or param == "buffer" or param == "sendbuf" or param == "recvbuf":
if param in ["BUFFER", "buf", "buffer", "sendbuf", "recvbuf"]:
return self.buf_var_name
if param == "COUNT" or param == "count":
if param in ["COUNT", "count", "sendcount", "recvcount"]:
return str(self.buf_size)
if param == "DATATYPE" or param == "datatype":
if param in ["DATATYPE", "datatype", "sendtype", "recvtype"]:
return self.dtype[1]
if param == "DEST" or param == "dest":
if param in ["DEST", "dest"]:
return "0"
if param == "SRC" or param == "source":
if param in ["SRC", "source"]:
return "1"
if param == "RANK" or param == "root":
if param in ["RANK", "root"]:
return "0"
if param == "TAG" or param == "tag":
if param in ["TAG", "tag", "sendtag", "recvtag"]:
return str(self.tag)
if param == "COMM" or param == "comm":
if param in ["COMM", "comm"]:
return "MPI_COMM_WORLD"
if param == "STATUS" or param == "status":
if param in ["STATUS", "status"]:
return "MPI_STATUS_IGNORE"
if param == "OPERATION" or param == "op":
if param in ["OPERATION", "op"]:
return "MPI_SUM"
if param in ["REQUEST", "request"]:
return "&request"
print("Not Implemented: " + param)
assert False, "Param not known"
......
......@@ -3,6 +3,7 @@ import inspect
import os
import importlib
import importlib.util
import subprocess
# for printing a nice progress bar
import tqdm
......@@ -40,11 +41,16 @@ class GeneratorManager:
return case_name + "-" + str(num).zfill(digits_to_use) + suffix
def generate(self, outpath, filterlist_=None, print_progress_bar=True, overwrite=True,generate_full_set=False):
def generate(self, outpath, filterlist_=None, print_progress_bar=True, overwrite=True, generate_full_set=False,
try_compile=False):
filterlist = filterlist_
if filterlist is None:
filterlist = featurelist
if try_compile:
mpicc = os.environ.get('MPICC')
assert mpicc and "Environment var MPICC not set"
print("Generate Testcases")
# use generator if at least one feature of the generator matches the filterlist
......@@ -83,6 +89,10 @@ class GeneratorManager:
with open(full_name, "w") as text_file:
text_file.write(str(result_error))
if try_compile:
subprocess.check_call([mpicc,full_name])
# raises CalledProcessError if code does not compile
++cases_generated
if print_progress_bar:
progress_bar.update(1)
......
#! /usr/bin/python3
from scripts.Infrastructure.AllocCall import AllocCall
from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory
from scripts.Infrastructure.InstructionBlock import InstructionBlock
from scripts.Infrastructure.MPICall import MPI_Call
from scripts.Infrastructure.MPICallFactory import CorrectMPICallFactory
from scripts.Infrastructure.MPICallFactory import CorrectMPICallFactory, MPICallFactory
from scripts.Infrastructure.Template import TemplateManager
......@@ -25,13 +26,40 @@ def get_send_recv_template(send_func, recv_func):
The function is contained in a block named MPICALL with seperate calls for rank 1 and 2)
"""
assert send_func == "mpi_send" or send_func == "mpi_ssend"
assert recv_func == "mpi_recv"
# currently supported:
assert send_func in ["mpi_send", "mpi_ssend", "mpi_isend", "mpi_issend", "mpi_sendrecv", "mpi_rsend", "mpi_irsend",
"mpi_bsend", "mpi_ibsend","mpi_sendrecv", "mpi_sendrecv_replace", "mpi_isendrecv",
"mpi_isendrecv_replace"]
assert recv_func in ["mpi_recv", "mpi_irecv", "mpi_sendrecv", "mpi_sendrecv_replace", "mpi_isendrecv",
"mpi_isendrecv_replace"]
sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
if send_func in sendrecv_funcs or recv_func == sendrecv_funcs:
assert recv_func == send_func
# default template generation only supports if both use same mechanism
if send_func in ["mpi_rsend", "mpi_irsend"]:
assert recv_func == "mpi_irecv" # else: deadlock
tm = TemplateManager()
cf = CorrectParameterFactory()
tm.register_instruction_block(cf.get_buffer_alloc())
if send_func in ["mpi_bsend", "mpi_ibsend"]:
b = InstructionBlock("buf_attach")
buf_size = "sizeof(int)*10 + MPI_BSEND_OVERHEAD"
b.register_operation(AllocCall("char", buf_size, "mpi_buf"))
b.register_operation(MPICallFactory().mpi_buffer_attach("mpi_buf", buf_size))
tm.register_instruction_block(b)
if send_func in sendrecv_funcs:
# spilt send and recv buf
b = cf.get_buffer_alloc()
b.get_operation('all',0).set_name("recv_buf")
tm.register_instruction_block(b)
cmpicf = CorrectMPICallFactory()
send_func_creator_function = getattr(cmpicf, send_func)
s = send_func_creator_function()
......@@ -39,14 +67,63 @@ def get_send_recv_template(send_func, recv_func):
recv_func_creator_function = getattr(cmpicf, recv_func)
r = recv_func_creator_function()
if send_func in sendrecv_funcs:
# sending the second msg
s.set_arg("source", "0")
r.set_arg("dest", "1")
if s.has_arg("recvbuf"):
s.set_arg("recvbuf", "recv_buf")
if r.has_arg("recvbuf"):
r.set_arg("recvbuf", "recv_buf")
if send_func.startswith("mpi_i") or recv_func.startswith("mpi_i"):
b = InstructionBlock("MPI_REQUEST")
b.register_operation("MPI_Request request;", 'all')
tm.register_instruction_block(b)
if send_func in ["mpi_rsend", "mpi_irsend"]:
b = InstructionBlock("SYNC")
b.register_operation(CorrectMPICallFactory().mpi_barrier(), 1)
tm.register_instruction_block(b)
b = InstructionBlock("MPICALL")
b.register_operation(s, 1)
b.register_operation(r, 0)
tm.register_instruction_block(b)
if send_func in ["mpi_rsend", "mpi_irsend"]:
b = InstructionBlock("SYNC")
b.register_operation(CorrectMPICallFactory().mpi_barrier(), 1)
tm.register_instruction_block(b)
if send_func.startswith("mpi_i"):
b = InstructionBlock("WAIT")
b.register_operation(CorrectMPICallFactory().mpi_wait(), 1)
tm.register_instruction_block(b)
if recv_func.startswith("mpi_i"):
b = InstructionBlock("WAIT")
b.register_operation(CorrectMPICallFactory().mpi_wait(), 0)
tm.register_instruction_block(b)
b.register_operation(cf.get_buffer_free())
if send_func in ["mpi_bsend", "mpi_ibsend"]:
b = InstructionBlock("buf_detach")
b.register_operation("int freed_size;")
b.register_operation(MPICallFactory().mpi_buffer_detach("mpi_buf", "&freed_size"))
b.register_operation("free(mpi_buf);")
tm.register_instruction_block(b)
if send_func in sendrecv_funcs:
# spilt send and recv buf
b = InstructionBlock("buf_free")
b.register_operation("free(recv_buf);")
tm.register_instruction_block(b)
return tm
def get_collective_template(collective_func, seperate=True):
"""
Contructs a default template for the given mpi collecive
......@@ -73,4 +150,6 @@ def get_collective_template(collective_func,seperate=True):
tm.register_instruction_block(b)
b.register_operation(cf.get_buffer_free())
return tm
#! /usr/bin/python3
from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
from scripts.Infrastructure.InstructionBlock import InstructionBlock
from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
from scripts.Infrastructure.Template import TemplateManager
from scripts.Infrastructure.TemplateFactory import get_send_recv_template, get_collective_template
class InvalidRankErrorColl(ErrorGenerator):
invalid_ranks = ["-1", "nprocs", "MPI_PROC_NULL"]
functions_to_use = ["mpi_reduce", "mpi_bcast"]
def __init__(self):
pass
def get_num_errors(self):
return len(self.invalid_ranks) * len(self.functions_to_use)
# the number of errors to produce in the extended mode (all possible combinations)
def get_num_errors_extended(self):
return len(self.invalid_ranks) * len(self.functions_to_use)
def get_feature(self):
return ["COLL"]
def generate(self, i):
rank_to_use = self.invalid_ranks[i // len(self.functions_to_use)]
func_to_use = self.functions_to_use[i % len(self.functions_to_use)]
tm = get_collective_template(func_to_use, seperate=False)
arg_to_replace = "root"
tm.set_description("InvalidParam-Rank-"+func_to_use, "Invalid Rank: %s" % rank_to_use)
tm.get_block("MPICALL").get_operation(kind='all', index=0).set_arg(arg_to_replace, rank_to_use)
tm.get_block("MPICALL").get_operation(kind='all', index=0).set_has_error()
return tm
......@@ -5,63 +5,64 @@ from scripts.Infrastructure.InstructionBlock import InstructionBlock
from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
from scripts.Infrastructure.Template import TemplateManager
from scripts.Infrastructure.TemplateFactory import get_send_recv_template, get_collective_template
from scripts.Infrastructure.TemplateFactory import get_send_recv_template
from itertools import chain
sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
class InvalidRankErrorP2P(ErrorGenerator):
invalid_ranks = ["-1", "size", "MPI_PROC_NULL"]
invalid_ranks = ["-1", "nprocs", "MPI_PROC_NULL"]
functions_to_check = ["mpi_send",
"mpi_recv", "mpi_irecv",
"mpi_isend", "mpi_ssend", "mpi_issend", "mpi_rsend", "mpi_irsend", "mpi_bsend", "mpi_ibsend"
] + sendrecv_funcs + sendrecv_funcs
# chekc sendrecv funcs two times: the send and recv part
recv_funcs = ["mpi_recv", "mpi_irecv"] + sendrecv_funcs
def __init__(self):
pass
def get_num_errors(self):
return len(self.invalid_ranks)
# send + receive = only check the first two functions
return len(self.invalid_ranks) * 2
# the number of errors to produce in the extended mode (all possible combinations)
def get_num_errors_extended(self):
return len(self.invalid_ranks)
return len(self.invalid_ranks) * len(self.functions_to_check)
def get_feature(self):
return ["P2P"]
def generate(self, i):
rank_to_use = self.invalid_ranks[i]
tm = get_send_recv_template("mpi_send", "mpi_recv")
tm.set_description("InvalidParam-Rank-MPI_Send", "Invalid Rank: %s" % rank_to_use)
rank_to_use = self.invalid_ranks[i // len(self.functions_to_check)]
send_func = self.functions_to_check[i % len(self.functions_to_check)]
check_receive = False
recv_func = "mpi_irecv"
if send_func in self.recv_funcs:
check_receive = True
recv_func = send_func
send_func = "mpi_send"
if recv_func in sendrecv_funcs:
send_func = recv_func
if i % len(self.functions_to_check) >= len(self.functions_to_check) - len(sendrecv_funcs):
# check the send part of sendrecv
check_receive = False
tm = get_send_recv_template(send_func, recv_func)
if check_receive:
tm.set_description("InvalidParam-Rank-" + recv_func, "Invalid Rank: %s" % rank_to_use)
else:
tm.set_description("InvalidParam-Rank-" + send_func, "Invalid Rank: %s" % rank_to_use)
if check_receive:
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_arg("source", rank_to_use)
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_has_error()
return tm
class InvalidRankErrorColl(ErrorGenerator):
invalid_ranks = ["-1", "size", "MPI_PROC_NULL"]
functions_to_use = ["mpi_reduce", "mpi_bcast"]
def __init__(self):
pass
def get_num_errors(self):
return len(self.invalid_ranks) * len(self.functions_to_use)
# the number of errors to produce in the extended mode (all possible combinations)
def get_num_errors_extended(self):
return len(self.invalid_ranks) * len(self.functions_to_use)
def get_feature(self):
return ["COLL"]
def generate(self, i):
rank_to_use = self.invalid_ranks[i // len(self.functions_to_use)]
func_to_use = self.functions_to_use[i % len(self.functions_to_use)]
tm = get_collective_template(func_to_use, seperate=False)
arg_to_replace = "root"
tm.set_description("InvalidParam-Rank-"+func_to_use, "Invalid Rank: %s" % rank_to_use)
tm.get_block("MPICALL").get_operation(kind='all', index=0).set_arg(arg_to_replace, rank_to_use)
tm.get_block("MPICALL").get_operation(kind='all', index=0).set_has_error()
else:
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_arg("dest", rank_to_use)
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_has_error()
return tm
#! /usr/bin/python3
from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
from scripts.Infrastructure.InstructionBlock import InstructionBlock
from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
from scripts.Infrastructure.Template import TemplateManager
from scripts.Infrastructure.TemplateFactory import get_send_recv_template
from itertools import chain
sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
class InvalidTagErrorP2P(ErrorGenerator):
invalid_tags = ["-1", "MPI_TAG_UB+1"]
functions_to_check = ["mpi_send",
"mpi_recv", "mpi_irecv",
"mpi_isend", "mpi_ssend", "mpi_issend", "mpi_rsend", "mpi_irsend", "mpi_bsend", "mpi_ibsend"
] + sendrecv_funcs + sendrecv_funcs
# chekc sendrecv funcs two times: the send and recv part
recv_funcs = ["mpi_recv", "mpi_irecv"] + sendrecv_funcs
def __init__(self):
pass
def get_num_errors(self):
# send + receive = only check the first two functions
return len(self.invalid_tags) * 2
# the number of errors to produce in the extended mode (all possible combinations)
def get_num_errors_extended(self):
return len(self.invalid_tags) * len(self.functions_to_check)
def get_feature(self):
return ["P2P"]
def generate(self, i):
tag_to_use = self.invalid_tags[i // len(self.functions_to_check)]
send_func = self.functions_to_check[i % len(self.functions_to_check)]
check_receive = False
recv_func = "mpi_irecv"
if send_func in self.recv_funcs:
check_receive = True
recv_func = send_func
send_func = "mpi_send"
if recv_func in sendrecv_funcs:
send_func = recv_func
if i % len(self.functions_to_check) >= len(self.functions_to_check) - len(sendrecv_funcs):
# check the send part of sendrecv
check_receive = False
tm = get_send_recv_template(send_func, recv_func)
if check_receive:
tm.set_description("InvalidParam-Tag-" + recv_func, "Invalid Rank: %s" % tag_to_use)
else:
tm.set_description("InvalidParam-Tag-" + send_func, "Invalid Rank: %s" % tag_to_use)
if check_receive:
if tm.get_block("MPICALL").get_operation(kind=0, index=0).has_arg("tag"):
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_arg("tag", tag_to_use)
else:
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_arg("recvtag", tag_to_use)
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_has_error()
else:
if tm.get_block("MPICALL").get_operation(kind=1, index=0).has_arg("tag"):
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_arg("tag", tag_to_use)
else:
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_arg("sendtag", tag_to_use)
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_has_error()
return tm
#! /usr/bin/python3
from scripts.Infrastructure.GeneratorManager import GeneratorManager
if __name__ == "__main__":
gm = GeneratorManager("./errors")
gm.generate("../gencodes")
gm.generate("../gencodes", try_compile=True, generate_full_set=True)
pass
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment