Skip to content
Snippets Groups Projects
Commit 135dbf4a authored by Jammer, Tim's avatar Jammer, Tim
Browse files

removed the testcases to develop them in extra branches

parent 1eb914fb
No related branches found
No related tags found
1 merge request!9Infrastructure: Type Hints, Instruction class and lists of instructions
#! /usr/bin/python3
from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
from scripts.Infrastructure.InstructionBlock import InstructionBlock
from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
from scripts.Infrastructure.Template import TemplateManager
from scripts.Infrastructure.TemplateFactory import get_send_recv_template, get_collective_template
class InvalidRankErrorColl(ErrorGenerator):
invalid_ranks = ["-1", "nprocs", "MPI_PROC_NULL"]
functions_to_use = ["mpi_reduce", "mpi_bcast"]
def __init__(self):
pass
def get_feature(self):
return ["COLL"]
def generate(self, generate_full_set):
for func_to_use in self.functions_to_use:
for rank_to_use in self.invalid_ranks:
tm = get_collective_template(func_to_use, seperate=False)
arg_to_replace = "root"
tm.set_description("InvalidParam-Rank-"+func_to_use, "Invalid Rank: %s" % rank_to_use)
tm.get_block("MPICALL").get_operation(kind='all', index=0).set_arg(arg_to_replace, rank_to_use)
tm.get_block("MPICALL").get_operation(kind='all', index=0).set_has_error()
yield tm
if not generate_full_set:
return
#! /usr/bin/python3
from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
from scripts.Infrastructure.InstructionBlock import InstructionBlock
from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
from scripts.Infrastructure.Template import TemplateManager
from scripts.Infrastructure.TemplateFactory import get_send_recv_template
from itertools import chain
class InvalidBufErrorP2P(ErrorGenerator):
invalid_bufs = [CorrectParameterFactory().buf_var_name, "NULL"]
send_funcs = ["mpi_send",
"mpi_isend", "mpi_ssend", "mpi_issend", "mpi_rsend", "mpi_irsend", "mpi_bsend", "mpi_ibsend",
"mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init",
]
recv_funcs = ["mpi_recv", "mpi_irecv", "mpi_recv_init", "mpi_precv_init"]
sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
def __init__(self):
pass
def get_feature(self):
return ["P2P"]
def generate_impl(self, send_func, recv_func, check_receive):
for buf_to_use in self.invalid_bufs:
tm = get_send_recv_template(send_func, recv_func)
if buf_to_use == CorrectParameterFactory().buf_var_name:
if tm.get_block("alloc").has_operation(kind='all', index=1) and check_receive:
name = tm.get_block("alloc").get_operation(kind='all', index=1).get_name()
type = tm.get_block("alloc").get_operation(kind='all', index=1).get_type()
tm.get_block("alloc").replace_operation(
CorrectParameterFactory.dtype[0] + "* " + name + ";", kind='all', index=1)
else:
tm.get_block("alloc").replace_operation(
CorrectParameterFactory.dtype[0] + "* " + CorrectParameterFactory.buf_var_name + ";")
# without allocation: usage is undefined behaviour
if check_receive:
tm.set_description("InvalidParam-Buffer-" + recv_func, "Invalid Buffer: %s" % buf_to_use)
else:
tm.set_description("InvalidParam-Buffer-" + send_func, "Invalid Buffer: %s" % buf_to_use)
if check_receive:
if tm.get_block("MPICALL").get_operation(kind=0, index=0).has_arg("buf"):
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_arg("buf", buf_to_use)
else:
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_arg("recvbuf", buf_to_use)
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_has_error()
else:
if tm.get_block("MPICALL").get_operation(kind=1, index=0).has_arg("buf"):
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_arg("buf", buf_to_use)
else:
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_arg("recvbuf", buf_to_use)
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_has_error()
yield tm
def generate(self, generate_full_set):
for func in self.send_funcs:
yield from self.generate_impl(func, 'mpi_irecv', False)
if not generate_full_set:
return
for func in self.recv_funcs:
yield from self.generate_impl("mpi_send", func, True)
yield from self.generate_impl("mpi_sendrecv", "mpi_sendrecv", True)
yield from self.generate_impl("mpi_sendrecv", "mpi_sendrecv", False)
yield from self.generate_impl("mpi_sendrecv_replace", "mpi_sendrecv_replace", True)
class MessageRaceErrorSendRecv(ErrorGenerator):
def __init__(self):
pass
def get_feature(self):
return ["P2P"]
def generate(self, generate_full_set):
for buf_to_use in ["buf", "MPI_IN_PLACE"]:
tm = get_send_recv_template("mpi_sendrecv", "mpi_sendrecv")
tm.set_description("InvalidParam-Buffer-mpi_sendrecv", "send and recv buffer must be disjoint in sendrecv")
for k in [0, 1]:
tm.get_block("MPICALL").get_operation(kind=k, index=0).set_arg("recvbuf", buf_to_use)
tm.get_block("MPICALL").get_operation(kind=k, index=0).set_has_error()
yield tm
#! /usr/bin/python3
from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
from scripts.Infrastructure.InstructionBlock import InstructionBlock
from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
from scripts.Infrastructure.Template import TemplateManager
from scripts.Infrastructure.TemplateFactory import get_send_recv_template, get_communicator, get_intercomm
from itertools import chain
sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
class InvalidCommErrorP2P(ErrorGenerator):
invalid_comm = ["MPI_COMM_NULL", "NULL"]
missmatching_comms = ["MPI_COMM_SELF", "mpi_comm_dup", "mpi_comm_dup_with_info", "mpi_comm_idup",
"mpi_comm_idup_with_info", "mpi_comm_create", "mpi_comm_create_group", "mpi_comm_split",
"mpi_comm_split_type", "mpi_comm_create_from_group"
]
intercomms = ["mpi_intercomm_create", "mpi_intercomm_merge", "mpi_intercomm_create_from_groups"]
# as extended testcases
comms_to_check = invalid_comm + missmatching_comms + intercomms
functions_to_check = ["mpi_send",
"mpi_recv", "mpi_irecv",
"mpi_isend", "mpi_ssend", "mpi_issend", "mpi_rsend", "mpi_irsend", "mpi_bsend", "mpi_ibsend",
"mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init",
"mpi_precv_init", "mpi_recv_init"
] + sendrecv_funcs
recv_funcs = ["mpi_recv", "mpi_irecv", "mpi_recv_init", "mpi_precv_init"] + sendrecv_funcs
def __init__(self):
pass
def get_feature(self):
return ["P2P"]
def generate(self, generate_full_set):
for send_func in self.functions_to_check:
for comm_to_use in self.comms_to_check:
check_receive = False
recv_func = "mpi_irecv"
if send_func in self.recv_funcs:
check_receive = True
recv_func = send_func
send_func = "mpi_send"
if recv_func in sendrecv_funcs:
send_func = recv_func
tm = get_send_recv_template(send_func, recv_func)
if comm_to_use in self.missmatching_comms and comm_to_use != "MPI_COMM_SELF":
b = get_communicator(comm_to_use, comm_to_use)
tm.insert_block(b, after_block_name="alloc")
if comm_to_use in self.intercomms:
b = get_intercomm(comm_to_use, comm_to_use)
tm.insert_block(b, after_block_name="alloc")
error_string = "ParamMatching"
if comm_to_use in self.invalid_comm:
error_string = "InvalidParam"
if check_receive:
if comm_to_use in self.missmatching_comms and recv_func == "mpi_irecv":
# combination repeated
continue
tm.set_description(error_string + "-Comm-" + recv_func, "Invalid Rank: %s" % comm_to_use)
else:
tm.set_description(error_string + "-Comm-" + send_func, "Invalid Rank: %s" % comm_to_use)
if check_receive:
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_arg("comm", comm_to_use)
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_has_error()
if comm_to_use in self.missmatching_comms:
# missmatch is between both
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_has_error()
else:
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_arg("comm", comm_to_use)
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_has_error()
if comm_to_use in self.missmatching_comms:
# missmatch is between both
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_has_error()
# an intercomm has only one rank (the other group)
if comm_to_use in self.intercomms and not comm_to_use == "mpi_intercomm_merge":
# intercomm merge results in an equivalent comm again
if tm.get_block("MPICALL").get_operation(kind=0, index=0).has_arg("source"):
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_arg("source", "0")
if tm.get_block("MPICALL").get_operation(kind=1, index=0).has_arg("source"):
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_arg("source", "0")
if comm_to_use in self.missmatching_comms + self.intercomms and comm_to_use != "MPI_COMM_SELF":
b = InstructionBlock("comm_free")
b.register_operation(MPICallFactory().mpi_comm_free("&" + comm_to_use))
tm.register_instruction_block(b)
yield tm
# end for comm to check
if not generate_full_set:
return
#! /usr/bin/python3
from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
from scripts.Infrastructure.InstructionBlock import InstructionBlock
from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
from scripts.Infrastructure.Template import TemplateManager
from scripts.Infrastructure.TemplateFactory import get_send_recv_template
from itertools import chain
class InvalidRankErrorP2P(ErrorGenerator):
invalid_ranks = ["-1", "nprocs", "MPI_PROC_NULL"]
send_funcs = ["mpi_send",
"mpi_isend", "mpi_ssend", "mpi_issend", "mpi_rsend", "mpi_irsend", "mpi_bsend", "mpi_ibsend",
"mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init"
]
recv_funcs = ["mpi_recv", "mpi_irecv", "mpi_recv_init", "mpi_precv_init"]
sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
def __init__(self):
pass
def get_feature(self):
return ["P2P"]
def generate_impl(self, send_func, recv_func, check_receive):
for rank_to_use in self.invalid_ranks:
tm = get_send_recv_template(send_func, recv_func)
if check_receive:
tm.set_description("InvalidParam-Rank-" + recv_func, "Invalid Rank: %s" % rank_to_use)
else:
tm.set_description("InvalidParam-Rank-" + send_func, "Invalid Rank: %s" % rank_to_use)
if check_receive:
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_arg("source", rank_to_use)
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_has_error()
else:
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_arg("dest", rank_to_use)
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_has_error()
yield tm
def generate(self, generate_full_set):
for send_func in self.send_funcs:
yield from self.generate_impl(send_func, "mpi_irecv", False)
if not generate_full_set:
return
for func in self.recv_funcs:
yield from self.generate_impl("mpi_send", func, True)
for func in self.sendrecv_funcs:
yield from self.generate_impl(func, func, True)
yield from self.generate_impl(func, func, False)
#! /usr/bin/python3
from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
from scripts.Infrastructure.InstructionBlock import InstructionBlock
from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
from scripts.Infrastructure.Template import TemplateManager
from scripts.Infrastructure.TemplateFactory import get_send_recv_template
from itertools import chain
class InvalidRequestErrorP2P(ErrorGenerator):
invalid_requests = ["MPI_REQUEST_NULL", # probably triggers compiler warning
"NULL"]
functions_to_check = ["mpi_irecv", "mpi_isend",
"mpi_issend", "mpi_irsend", "mpi_ibsend",
"mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init",
"mpi_precv_init", "mpi_recv_init"
]
recv_funcs = ["mpi_irecv", "mpi_recv_init", "mpi_precv_init"]
def __init__(self):
pass
def get_num_errors(self):
# send + receive = only check the first two functions
return len(self.invalid_requests) * 2
# the number of errors to produce in the extended mode (all possible combinations)
def get_num_errors_extended(self):
return len(self.invalid_requests) * len(self.functions_to_check)
def get_feature(self):
return ["P2P"]
def generate(self, generate_full_set):
for send_func in self.functions_to_check:
for req_to_use in self.invalid_requests:
check_receive = False
recv_func = "mpi_irecv"
if send_func in self.recv_funcs:
check_receive = True
recv_func = send_func
send_func = "mpi_isend"
tm = get_send_recv_template(send_func, recv_func)
if check_receive:
tm.set_description("InvalidParam-Request-" + recv_func, "Invalid Request: %s" % req_to_use)
else:
tm.set_description("InvalidParam-Request-" + send_func, "Invalid Request: %s" % req_to_use)
if check_receive:
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_arg("request", req_to_use)
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_has_error()
else:
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_arg("request", req_to_use)
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_has_error()
yield tm
if not generate_full_set:
return
#! /usr/bin/python3
from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
from scripts.Infrastructure.InstructionBlock import InstructionBlock
from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
from scripts.Infrastructure.Template import TemplateManager
from scripts.Infrastructure.TemplateFactory import get_send_recv_template
from itertools import chain
class InvalidTagErrorP2P(ErrorGenerator):
invalid_tags = ["-1", "MPI_TAG_UB+1", CorrectParameterFactory.tag * 2, "MPI_ANY_TAG"]
send_funcs = ["mpi_send",
"mpi_isend", "mpi_ssend", "mpi_issend", "mpi_rsend", "mpi_irsend", "mpi_bsend", "mpi_ibsend",
"mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init",
]
recv_funcs = ["mpi_recv", "mpi_irecv", "mpi_recv_init", "mpi_precv_init"]
sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
def __init__(self):
pass
def get_feature(self):
return ["P2P"]
def generate_impl(self, send_func, recv_func, check_receive):
for tag_to_use in self.invalid_tags:
tm = get_send_recv_template(send_func, recv_func)
error_string = "InvalidParam"
if tag_to_use == CorrectParameterFactory.tag * 2:
error_string = "ParamMatching"
if check_receive:
if tag_to_use == "MPI_ANY_TAG":
# correct case
continue
if tag_to_use == CorrectParameterFactory().tag * 2 and recv_func == "mpi_irecv":
# combination repeated
continue
tm.set_description(error_string + "-Tag-" + recv_func, "Invalid Rank: %s" % tag_to_use)
else:
tm.set_description(error_string + "-Tag-" + send_func, "Invalid Rank: %s" % tag_to_use)
if check_receive:
if tm.get_block("MPICALL").get_operation(kind=0, index=0).has_arg("tag"):
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_arg("tag", tag_to_use)
else:
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_arg("recvtag", tag_to_use)
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_has_error()
if tag_to_use == CorrectParameterFactory.tag * 2:
# missmatch is between both
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_has_error()
else:
if tm.get_block("MPICALL").get_operation(kind=1, index=0).has_arg("tag"):
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_arg("tag", tag_to_use)
else:
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_arg("sendtag", tag_to_use)
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_has_error()
if tag_to_use == CorrectParameterFactory.tag * 2:
# missmatch is between both
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_has_error()
yield tm
def generate(self, generate_full_set):
for send_func in self.send_funcs:
yield from self.generate_impl(send_func, "mpi_irecv", False)
if not generate_full_set:
return
for recv_func in self.recv_funcs:
yield from self.generate_impl("mpi_send", recv_func, True)
for func in self.sendrecv_funcs:
yield from self.generate_impl(func, func, True)
yield from self.generate_impl(func, func, False)
#! /usr/bin/python3
from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
from scripts.Infrastructure.InstructionBlock import InstructionBlock
from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
from scripts.Infrastructure.Template import TemplateManager
from scripts.Infrastructure.TemplateFactory import get_send_recv_template
from itertools import chain
from scripts.Infrastructure.Variables import ERROR_MARKER_COMMENT
sendrecv_funcs = ["mpi_sendrecv", "mpi_sendrecv_replace"]
class LocalConcurrencyErrorP2P(ErrorGenerator):
functions_to_check = ["mpi_irecv",
"mpi_isend", "mpi_issend", "mpi_irsend", "mpi_ibsend",
"mpi_send_init", "mpi_ssend_init", "mpi_bsend_init", "mpi_rsend_init", "mpi_psend_init",
"mpi_precv_init", "mpi_recv_init"
]
recv_funcs = ["mpi_irecv", "mpi_recv_init", "mpi_precv_init"]
def __init__(self):
pass
def get_feature(self):
return ["P2P"]
def generate(self, generate_full_set):
for send_func in self.functions_to_check:
check_receive = False
recv_func = "mpi_irecv"
if send_func in self.recv_funcs:
check_receive = True
recv_func = send_func
send_func = "mpi_isend"
tm = get_send_recv_template(send_func, recv_func)
if check_receive:
tm.set_description("LocalConcurrency-receive-" + recv_func,
"usage of receive buffer before operation is completed")
else:
tm.set_description("LocalConcurrency-send-" + send_func,
"usage of send buffer before operation is completed")
if check_receive:
tm.get_block("MPICALL").get_operation(kind=0, index=0).set_has_error()
tm.get_block("MPICALL").register_operation("buf[2]=1;" + ERROR_MARKER_COMMENT, kind=1)
else:
tm.get_block("MPICALL").get_operation(kind=1, index=0).set_has_error()
tm.get_block("MPICALL").register_operation("buf[2]=1;" + ERROR_MARKER_COMMENT, kind=1)
yield tm
if not generate_full_set:
return
#! /usr/bin/python3
from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
from scripts.Infrastructure.InstructionBlock import InstructionBlock
from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
from scripts.Infrastructure.CorrectParameter import CorrectParameterFactory, get_matching_recv
from scripts.Infrastructure.Template import TemplateManager
from scripts.Infrastructure.TemplateFactory import get_send_recv_template
from itertools import chain
class MessageRaceErrorAnyTag(ErrorGenerator):
# TODO do we need to generate it for all combinations of send and recv?
def __init__(self):
pass
def get_feature(self):
return ["P2P"]
def generate(self, generate_full_set):
tm = TemplateManager()
tm.set_description("MsgRace-ANY_TAG", "order of messages is indeterministic, may lead to a deadlock")
b = InstructionBlock("alloc")
b.register_operation(CorrectParameterFactory().get_buffer_alloc())
tm.register_instruction_block(b)
b = InstructionBlock("MPICALL")
# send part
b.register_operation("for(int i =0; i < 10; ++i) {", kind=1)
b.register_operation("buf[0]=i;", kind=1)
send_call = CorrectMPICallFactory().mpi_send()
send_call.set_arg("tag", "i")
b.register_operation(send_call, kind=1)
b.register_operation("}", kind=1)
# recv part
b.register_operation("for(int i =0; i < 10; ++i) {", kind=0)
recv_call = CorrectMPICallFactory().mpi_recv()
recv_call.set_arg("tag", "MPI_ANY_TAG")
b.register_operation(recv_call, kind=0)
b.register_operation("if(buf[0]!=i){", kind=0)
additional_recv = CorrectMPICallFactory().mpi_recv()
additional_recv.set_has_error() # additional recv leads to deadlock
b.register_operation(additional_recv, kind=0)
b.register_operation(" }", kind=0) # end if
b.register_operation("}", kind=0) # end for
tm.register_instruction_block(b)
b = InstructionBlock("free")
b.register_operation(CorrectParameterFactory().get_buffer_free())
tm.register_instruction_block(b)
yield tm
class MessageRaceErrorAnysource(ErrorGenerator):
# TODO do we need to generate it for all combinations of send and recv?
def __init__(self):
pass
def get_feature(self):
return ["P2P"]
def generate(self, generate_full_set):
tm = TemplateManager(min_ranks=3)
tm.set_description("MsgRace-ANY_SOURCE", "order of messages is indeterministic, may lead to a deadlock")
b = InstructionBlock("alloc")
b.register_operation(CorrectParameterFactory().get_buffer_alloc())
tm.register_instruction_block(b)
b = InstructionBlock("MPICALL")
# send part
b.register_operation("buf[0]=rank;", kind='not0')
send_call = CorrectMPICallFactory().mpi_send()
b.register_operation(send_call, kind='not0')
# recv part
b.register_operation("for(int i =1; i < nprocs; ++i) {", kind=0)
recv_call = CorrectMPICallFactory().mpi_recv()
recv_call.set_arg("source", "MPI_ANY_SOURCE")
b.register_operation(recv_call, kind=0)
b.register_operation("if(buf[0]!=i){", kind=0)
additional_recv = CorrectMPICallFactory().mpi_recv()
additional_recv.set_has_error() # additional recv leads to deadlock
b.register_operation(additional_recv, kind=0)
b.register_operation(" }", kind=0) # end if
b.register_operation("}", kind=0) # end for
tm.register_instruction_block(b)
b = InstructionBlock("free")
b.register_operation(CorrectParameterFactory().get_buffer_free())
tm.register_instruction_block(b)
yield tm
#! /usr/bin/python3
from scripts.Infrastructure.ErrorGenerator import ErrorGenerator
from scripts.Infrastructure.InstructionBlock import InstructionBlock
from scripts.Infrastructure.MPICallFactory import MPICallFactory, CorrectMPICallFactory
from scripts.Infrastructure.CorrectParameter import (
CorrectParameterFactory,
get_matching_recv,
)
from scripts.Infrastructure.Template import TemplateManager
from scripts.Infrastructure.TemplateFactory import get_allocated_window, get_rma_call
from scripts.Infrastructure.AllocCall import AllocCall
from scripts.Infrastructure.MPICall import MPI_Call
import itertools
from scripts.Infrastructure.Variables import ERROR_MARKER_COMMENT
from typing import Tuple, List
class LocalConcurrencyErrorRMA(ErrorGenerator):
local_origin_addr_write = ["mpi_get", "mpi_rget"]
local_origin_addr_read = [
"mpi_put",
"mpi_rput",
"mpi_accumulate",
"mpi_raccumulate",
"mpi_get_accumulate",
"mpi_rget_accumulate",
"mpi_fetch_and_op",
"mpi_compare_and_swap",
]
functions_to_check = ["mpi_put", "mpi_get", "mpi_rput", "mpi_rget"]
# recv_funcs = ["mpi_irecv", "mpi_recv_init", "mpi_precv_init"]
def __init__(self):
pass
def get_feature(self):
return ["RMA"]
def generate(self, generate_full_set):
cf = CorrectParameterFactory()
cfmpi = CorrectMPICallFactory()
mpi_buf_read = [
get_rma_call("mpi_put", 0),
get_rma_call("mpi_rput", 0),
get_rma_call("mpi_accumulate", 0),
get_rma_call("mpi_raccumulate", 0),
get_rma_call("mpi_get_accumulate", 0),
get_rma_call("mpi_rget_accumulate", 0),
get_rma_call("mpi_fetch_and_op", 0),
get_rma_call("mpi_compare_and_swap", 0),
]
mpi_buf_write = [get_rma_call("mpi_get", 0), get_rma_call("mpi_rget", 0)]
bufread = InstructionBlock("bufread")
bufread.register_operation(f'printf("buf is %d\\n", {cf.buf_var_name}[1]);', 0)
bufwrite = InstructionBlock("write")
bufwrite.register_operation(f'{cf.buf_var_name}[1] = 42;', 0)
# 7 possible combinations of local buffer accesses (hasconflict = True | False)
local_access_combinations: List[Tuple[List[InstructionBlock], List[InstructionBlock], bool]] = [
(mpi_buf_read, [bufread], False),
(mpi_buf_read, [bufwrite], True),
(mpi_buf_write, [bufread], True),
(mpi_buf_write, [bufwrite], True),
(mpi_buf_read, mpi_buf_read, False),
(mpi_buf_read, mpi_buf_write, True),
(mpi_buf_write, mpi_buf_write, True),
]
for ops1, ops2, hasconflict in local_access_combinations:
for (op1, op2) in itertools.product(ops1, ops2):
tm = TemplateManager()
# window allocation boilerplate
b = get_allocated_window("mpi_win_create", "win", "winbuf", "int", "2")
tm.register_instruction_block(b)
# local buffer allocation
alloc = InstructionBlock("alloc")
alloc.register_operation(
AllocCall(cf.dtype[0], cf.buf_size, cf.buf_var_name)
)
tm.register_instruction_block(alloc)
if hasconflict:
op1.get_operation(kind=0, index=-1).set_has_error()
op2.get_operation(kind=0, index=-1).set_has_error()
# fuse instructions blocks
# combined_ops = InstructionBlock("COMBINED")
# combined_ops.register_operations(op1.get_operations(kind=0), kind=0)
# combined_ops.register_operations(op2.get_operations(kind=0), kind=0)
tm.register_instruction_block(op1)
tm.register_instruction_block(op2)
tm.set_description(
("LocalConcurrency" if hasconflict else "Correct") +
"-"
+ op1.name
+ "_"
+ op2.name,
"full description",
)
yield tm
# get RMA call
# rmaop = get_rma_call(function_to_check, 0)
# tm.register_instruction_block(rmaop)
# bufstring = ""
# if bufop == "read": # local buffer access is read
# bufstring = f'printf("buf is %d\\n", {cf.buf_var_name}[1]);'
# # if RMA call performs local buffer write, this is a race, otherwise no race
# if function_to_check in local_origin_addr_write:
# bufstring += ERROR_MARKER_COMMENT
# # mark RMA call as erroneous
# tm.get_block("RMACALL").get_operation(
# kind=0, index=-1
# ).set_has_error()
# if bufop == "write":
# # a buffer write is always a race
# bufstring = f"{cf.buf_var_name}[1] = 42;" + ERROR_MARKER_COMMENT
# # mark RMA call as erroneous
# tm.get_block("RMACALL").get_operation(
# kind=0, index=-1
# ).set_has_error()
# # finally register buffer access
# tm.get_block("RMACALL").register_operation(bufstring, 0)
# if not generate_full_set:
# return
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment