diff --git a/City/database/to-do.txt b/City/database/to-do.txt
deleted file mode 100644
index d5c8595f5464c3cafffdbad4bb63e1f3fe804ef3..0000000000000000000000000000000000000000
--- a/City/database/to-do.txt
+++ /dev/null
@@ -1 +0,0 @@
-Add pre-configured cities
diff --git a/City/model/to-do.txt b/City/model/to-do.txt
deleted file mode 100644
index 370185e140f93f7313a89890ecba7683fc5f7dff..0000000000000000000000000000000000000000
--- a/City/model/to-do.txt
+++ /dev/null
@@ -1 +0,0 @@
-Insert city model
diff --git a/City/scripts/extract_inputs.py b/City/scripts/extract_inputs.py
deleted file mode 100644
index b5a432719e12cbc3da0ba5017e55096c1dab5026..0000000000000000000000000000000000000000
--- a/City/scripts/extract_inputs.py
+++ /dev/null
@@ -1 +0,0 @@
-# Add scripts - this script has to be inherited from extract_inputs in District->scripts
\ No newline at end of file
diff --git a/Component/__init__.py b/Component/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/Component/data/BHKW/BHKW1.json b/Component/data/BHKW/BHKW1.json
deleted file mode 100644
index 4103d79b7b980e19cf2cad8e07fca800e9fef474..0000000000000000000000000000000000000000
--- a/Component/data/BHKW/BHKW1.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
-    "efficiency_heat": 0.49,
-    "efficiency_elec": 0.344,
-    "elec_heat_ratio": 0.7020000000000001,
-    "cost": 3400,
-    "fix_cost_factor": 0.03,
-    "service_life": 20,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.05,
-    "servicing_effort_hours": 20
-}
\ No newline at end of file
diff --git a/Component/data/BHKW/Technische_Beschreibung_BHKW_Vitobloc_200_EM-199_263.pdf b/Component/data/BHKW/Technische_Beschreibung_BHKW_Vitobloc_200_EM-199_263.pdf
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/Component/data/BHKW/VTB200.json b/Component/data/BHKW/VTB200.json
deleted file mode 100644
index 9854f62a589120feba7150ac73ed55b78f10d64a..0000000000000000000000000000000000000000
--- a/Component/data/BHKW/VTB200.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-    "efficiency_heat_under50": 0.5805,
-    "efficiency_elec_under50": 0.3065,
-    "efficiency_heat_over50": 0.5575,
-    "efficiency_elec_over50": 0.359,
-    "cost": 3400,
-    "fix_cost_factor": 0.03,
-    "service_life": 20,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.05,
-    "servicing_effort_hours": 20
-}
\ No newline at end of file
diff --git a/Component/data/BHKW/VTB200_0.json b/Component/data/BHKW/VTB200_0.json
deleted file mode 100644
index e6b2e2b5aaf67b356770e508d425d33bf2cdd0fb..0000000000000000000000000000000000000000
--- a/Component/data/BHKW/VTB200_0.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
-    "efficiency_heat": 0.598,
-    "efficiency_elec": 0.316,
-    "ratio": 0.528,
-    "cost": 3400,
-    "fix_cost_factor": 0.03,
-    "service_life": 20,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.05,
-    "servicing_effort_hours": 20
-}
\ No newline at end of file
diff --git a/Component/data/CHP/CHP1.json b/Component/data/CHP/CHP1.json
deleted file mode 100644
index b56d90da1f05c187e2d53607b27358120646c26e..0000000000000000000000000000000000000000
--- a/Component/data/CHP/CHP1.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-    "efficiency_heat": 0.52,
-    "efficiency_elec": 0.38,
-    "cost": 1100,
-    "fix_cost_factor": 0.02,
-    "service_life": 15,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.05,
-    "servicing_effort_hours": 20
-}
\ No newline at end of file
diff --git a/Component/data/DcDcConverter/INVBAT.json b/Component/data/DcDcConverter/INVBAT.json
deleted file mode 100644
index d64d68c8fb2b4e9011aae50ad2744feccaba9838..0000000000000000000000000000000000000000
--- a/Component/data/DcDcConverter/INVBAT.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-    "efficiency": 0.97,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "service_life": 20,
-    "cost": 250
-}
\ No newline at end of file
diff --git a/Component/data/DynamicBiInverter/INV1.json b/Component/data/DynamicBiInverter/INV1.json
deleted file mode 100644
index 755695207a41b9d09d86ebd62f6333c2ac20dc25..0000000000000000000000000000000000000000
--- a/Component/data/DynamicBiInverter/INV1.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
-    "p1": 0.05,
-    "p2": 0.55,
-    "p3": 0.95,
-    "eta1": 0.1441,
-    "eta2": 0.8959,
-    "eta3": 0.8911,
-    "nominal_efficiency": 0.89,
-    "curve_type": 1,
-    "cost": 250,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "service_life": 20
-}
\ No newline at end of file
diff --git a/Component/data/DynamicBiInverter/STP-7000TL-20.json b/Component/data/DynamicBiInverter/STP-7000TL-20.json
deleted file mode 100644
index 3bf14db7a6ace5cc8114aee5633088ff9ac13a7f..0000000000000000000000000000000000000000
--- a/Component/data/DynamicBiInverter/STP-7000TL-20.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
-    "p1": 0.05,
-    "p2": 0.1,
-    "p3": 0.75,
-    "eta1": 0.88,
-    "eta2": 0.9279999999999999,
-    "eta3": 0.973,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "service_life": 20,
-    "cost": 250,
-    "nominal_efficiency": 0.973,
-    "curve_type": 1
-}
\ No newline at end of file
diff --git a/Component/data/DynamicBiInverter/STP-7000TL-20_ori.json b/Component/data/DynamicBiInverter/STP-7000TL-20_ori.json
deleted file mode 100644
index 8c282671c8cb9c438e0965456f2949e6852dbc0a..0000000000000000000000000000000000000000
--- a/Component/data/DynamicBiInverter/STP-7000TL-20_ori.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
-    "p1": 0.05,
-    "p2": 0.1,
-    "p3": 0.75,
-    "eta1": 0.88,
-    "eta2": 0.9279999999999999,
-    "eta3": 0.973,
-    "nominal_efficiency": 0.973,
-    "curve_type": 1,
-    "fit": 0.6
-}
\ No newline at end of file
diff --git a/Component/data/DynamicInverter/INV1.json b/Component/data/DynamicInverter/INV1.json
deleted file mode 100644
index 755695207a41b9d09d86ebd62f6333c2ac20dc25..0000000000000000000000000000000000000000
--- a/Component/data/DynamicInverter/INV1.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
-    "p1": 0.05,
-    "p2": 0.55,
-    "p3": 0.95,
-    "eta1": 0.1441,
-    "eta2": 0.8959,
-    "eta3": 0.8911,
-    "nominal_efficiency": 0.89,
-    "curve_type": 1,
-    "cost": 250,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "service_life": 20
-}
\ No newline at end of file
diff --git a/Component/data/DynamicInverter/STP-7000TL-20.json b/Component/data/DynamicInverter/STP-7000TL-20.json
deleted file mode 100644
index 3bf14db7a6ace5cc8114aee5633088ff9ac13a7f..0000000000000000000000000000000000000000
--- a/Component/data/DynamicInverter/STP-7000TL-20.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
-    "p1": 0.05,
-    "p2": 0.1,
-    "p3": 0.75,
-    "eta1": 0.88,
-    "eta2": 0.9279999999999999,
-    "eta3": 0.973,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "service_life": 20,
-    "cost": 250,
-    "nominal_efficiency": 0.973,
-    "curve_type": 1
-}
\ No newline at end of file
diff --git a/Component/data/DynamicInverter/STP-7000TL-20_ori.json b/Component/data/DynamicInverter/STP-7000TL-20_ori.json
deleted file mode 100644
index 8c282671c8cb9c438e0965456f2949e6852dbc0a..0000000000000000000000000000000000000000
--- a/Component/data/DynamicInverter/STP-7000TL-20_ori.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
-    "p1": 0.05,
-    "p2": 0.1,
-    "p3": 0.75,
-    "eta1": 0.88,
-    "eta2": 0.9279999999999999,
-    "eta3": 0.973,
-    "nominal_efficiency": 0.973,
-    "curve_type": 1,
-    "fit": 0.6
-}
\ No newline at end of file
diff --git a/Component/data/ElecBus/BAT1.json b/Component/data/ElecBus/BAT1.json
deleted file mode 100644
index 2ef0240c9fed6a55d3e0806d7231f7784f6c80a4..0000000000000000000000000000000000000000
--- a/Component/data/ElecBus/BAT1.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
-    "input_efficiency": 0.99,
-    "output_efficiency": 0.99,
-    "cost": 1000,
-    "fix_cost_factor": 0.008,
-    "service_life": 20,
-    "e2p_in": 1,
-    "e2p_out": 1,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "max_soe": 1,
-    "min_soe": 0,
-    "init_soe": 0.5
-}
\ No newline at end of file
diff --git a/Component/data/ElecBus/BAT1_OHL.json b/Component/data/ElecBus/BAT1_OHL.json
deleted file mode 100644
index 3d1ae03499e38dbc7cffb97cfad4f6cb9fa9eb15..0000000000000000000000000000000000000000
--- a/Component/data/ElecBus/BAT1_OHL.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
-    "input_efficiency": 0.99,
-    "output_efficiency": 0.99,
-    "cost": 1000,
-    "fix_cost_factor": 0.008,
-    "service_life": 20,
-    "e2p_in": 0.2,
-    "e2p_out": 0.2,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "max_soe": 1,
-    "min_soe": 0,
-    "init_soe": 0.5
-}
\ No newline at end of file
diff --git a/Component/data/ElectricBoiler/BOI2.json b/Component/data/ElectricBoiler/BOI2.json
deleted file mode 100644
index d799c2ce0d690877bac39484ba41f3c26b4c1149..0000000000000000000000000000000000000000
--- a/Component/data/ElectricBoiler/BOI2.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    "efficiency1": 0.98,
-    "inv_cost": 137,
-    "fix_cost_factor": 5.2
-}
\ No newline at end of file
diff --git a/Component/data/ElectricRadiator/RAD1.json b/Component/data/ElectricRadiator/RAD1.json
deleted file mode 100644
index 80253431474f5ce333311bb04e1475d8b6aa264b..0000000000000000000000000000000000000000
--- a/Component/data/ElectricRadiator/RAD1.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-    "efficiency1": 0.9
-}
\ No newline at end of file
diff --git a/Component/data/ElectricalGrid/GRD1.json b/Component/data/ElectricalGrid/GRD1.json
deleted file mode 100644
index a91574e7a4b2cf207908e5f62ee075b021f6c219..0000000000000000000000000000000000000000
--- a/Component/data/ElectricalGrid/GRD1.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-    "service_life": 20,
-    "cost": 0,
-    "factor_repair_effort": 0,
-    "factor_servicing_effort": 0,
-    "servicing_effort_hours": 0
-}
\ No newline at end of file
diff --git a/Component/data/GasBoiler/BOI1.json b/Component/data/GasBoiler/BOI1.json
deleted file mode 100644
index 857c8691fb9f84cc9c34bc149111068ce69fac0d..0000000000000000000000000000000000000000
--- a/Component/data/GasBoiler/BOI1.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-    "efficiency": 0.85,
-    "max_input_power": 5,
-    "service_life": 40,
-    "cost": 300.5,
-    "c": 200,
-    "c0": 7439.11,
-    "c1": -1.325,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20
-}
\ No newline at end of file
diff --git a/Component/data/GasGrid/GAS1.json b/Component/data/GasGrid/GAS1.json
deleted file mode 100644
index a91574e7a4b2cf207908e5f62ee075b021f6c219..0000000000000000000000000000000000000000
--- a/Component/data/GasGrid/GAS1.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-    "service_life": 20,
-    "cost": 0,
-    "factor_repair_effort": 0,
-    "factor_servicing_effort": 0,
-    "servicing_effort_hours": 0
-}
\ No newline at end of file
diff --git a/Component/data/HeatExchanger/HE1.json b/Component/data/HeatExchanger/HE1.json
deleted file mode 100644
index 998fe920fbaa9069557da60110ff47ae733cda2d..0000000000000000000000000000000000000000
--- a/Component/data/HeatExchanger/HE1.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-    "efficiency": 0.85,
-    "max_input_power": 5,
-    "service_life": 20,
-    "cost": 300.5,
-    "c": 200,
-    "c0": 7439.11,
-    "c1": -1.325,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20
-}
\ No newline at end of file
diff --git a/Component/data/HeatGrid/HG1.json b/Component/data/HeatGrid/HG1.json
deleted file mode 100644
index a91574e7a4b2cf207908e5f62ee075b021f6c219..0000000000000000000000000000000000000000
--- a/Component/data/HeatGrid/HG1.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-    "service_life": 20,
-    "cost": 0,
-    "factor_repair_effort": 0,
-    "factor_servicing_effort": 0,
-    "servicing_effort_hours": 0
-}
\ No newline at end of file
diff --git a/Component/data/HeatPump/EHP1.json b/Component/data/HeatPump/EHP1.json
deleted file mode 100644
index 66b097e8b10f953f0919c988a8abb7749bcd7021..0000000000000000000000000000000000000000
--- a/Component/data/HeatPump/EHP1.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-    "efficiency": 0.45,
-    "temperature_hot": 55,
-    "cost": 770,
-    "fix_cost_factor": 0.02,
-    "service_life": 20,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20
-}
\ No newline at end of file
diff --git a/Component/data/HotWaterStorage/TES1.json b/Component/data/HotWaterStorage/TES1.json
deleted file mode 100644
index 20517403ca3d1e0810afb85e317255b00af646f8..0000000000000000000000000000000000000000
--- a/Component/data/HotWaterStorage/TES1.json
+++ /dev/null
@@ -1,17 +0,0 @@
-{
-    "input_efficiency": 0.9,
-    "output_efficiency": 0.9,
-    "self_discharge_efficiency": 0.99,
-    "max_soe": 1,
-    "min_soe": 0,
-    "max_input": 1000,
-    "max_output": 1,
-    "service_life": 10,
-    "cost": 30,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "e2p_in": 1,
-    "e2p_out": 1,
-    "init_soe": 0.5
-}
\ No newline at end of file
diff --git a/Component/data/LiionBattery/BAT1.json b/Component/data/LiionBattery/BAT1.json
deleted file mode 100644
index 2ef0240c9fed6a55d3e0806d7231f7784f6c80a4..0000000000000000000000000000000000000000
--- a/Component/data/LiionBattery/BAT1.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
-    "input_efficiency": 0.99,
-    "output_efficiency": 0.99,
-    "cost": 1000,
-    "fix_cost_factor": 0.008,
-    "service_life": 20,
-    "e2p_in": 1,
-    "e2p_out": 1,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "max_soe": 1,
-    "min_soe": 0,
-    "init_soe": 0.5
-}
\ No newline at end of file
diff --git a/Component/data/LiionBattery/Sonnen9.json b/Component/data/LiionBattery/Sonnen9.json
deleted file mode 100644
index b5fb74f9045419bb197c9195d94d971de83360bc..0000000000000000000000000000000000000000
--- a/Component/data/LiionBattery/Sonnen9.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    "efficiency1": 0.99,
-    "efficiency2": 0.99,
-    "init_soe": 0.5
-}
\ No newline at end of file
diff --git a/Component/data/PEMElectrolyzer/ELEC1.json b/Component/data/PEMElectrolyzer/ELEC1.json
deleted file mode 100644
index ccfd5cde63f5a749e0ba8c9822e237d6852ba8c4..0000000000000000000000000000000000000000
--- a/Component/data/PEMElectrolyzer/ELEC1.json
+++ /dev/null
@@ -1,9 +0,0 @@
-{
-    "efficiency": 0.75,
-    "cost": 4000,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "fix_cost_factor": 0.025,
-    "service_life": 20
-}
\ No newline at end of file
diff --git a/Component/data/PEMFuelCell/FCELL1.json b/Component/data/PEMFuelCell/FCELL1.json
deleted file mode 100644
index 1c449539b63683091dcdaf3ed66debe22db33c5f..0000000000000000000000000000000000000000
--- a/Component/data/PEMFuelCell/FCELL1.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-    "elec_eff": 0.38,
-    "heat_eff": 0.54,
-    "cost": 4000,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "fix_cost_factor": 0.025,
-    "service_life": 20
-}
\ No newline at end of file
diff --git a/Component/data/PEMFuelCell/FCELL2.json b/Component/data/PEMFuelCell/FCELL2.json
deleted file mode 100644
index be40cee9a5350918516de6d81e0230c3252d9588..0000000000000000000000000000000000000000
--- a/Component/data/PEMFuelCell/FCELL2.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-    "elec_eff": 0.57,
-    "heat_eff": 0.33,
-    "cost": 4000,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "fix_cost_factor": 0.025,
-    "service_life": 20
-}
\ No newline at end of file
diff --git a/Component/data/PVGenerator/PV2.json b/Component/data/PVGenerator/PV2.json
deleted file mode 100644
index e91a6d183ade693dddaba258a7e5ac6f425b73a1..0000000000000000000000000000000000000000
--- a/Component/data/PVGenerator/PV2.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
-    "temp_coefficient": 0.0043,
-    "NOCT": 45.5,
-    "panel_size": 0.325,
-    "cost": 1000,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "fix_cost_factor": 0.025,
-    "service_life": 20
-}
\ No newline at end of file
diff --git a/Component/data/PVGenerator/Vitovolt300.json b/Component/data/PVGenerator/Vitovolt300.json
deleted file mode 100644
index c406414973958a5699861876a5bf49c09b15ea19..0000000000000000000000000000000000000000
--- a/Component/data/PVGenerator/Vitovolt300.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
-    "temp_coefficient": 0.0039,
-    "NOCT": 45,
-    "panel_size": 1.87644,
-    "cost": 1168,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "fix_cost_factor": 0.025,
-    "service_life": 20
-}
\ No newline at end of file
diff --git a/Component/data/PressureStorage/PST1.json b/Component/data/PressureStorage/PST1.json
deleted file mode 100644
index ecd6d78a1d77e459741b966b5030948f23340921..0000000000000000000000000000000000000000
--- a/Component/data/PressureStorage/PST1.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
-    "input_efficiency": 0.8,
-    "output_efficiency": 1,
-    "cost": 100,
-    "fix_cost_factor": 0.008,
-    "service_life": 20,
-    "e2p_in": 1,
-    "e2p_out": 1,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "max_soe": 1,
-    "min_soe": 0,
-    "init_soe": 0.5
-}
\ No newline at end of file
diff --git a/Component/data/PublicChargingStation/INVBAT.csv b/Component/data/PublicChargingStation/INVBAT.csv
deleted file mode 100644
index 77a1ddb5703db042c5ded7904257b090ce21543c..0000000000000000000000000000000000000000
--- a/Component/data/PublicChargingStation/INVBAT.csv
+++ /dev/null
@@ -1,2 +0,0 @@
-efficiency,factor repair effort,factor servicing effort,servicing effort hours,service life,cost
-0.95,0.01,0.015,20,20,250
\ No newline at end of file
diff --git a/Component/data/SolarThermalCollector/ST1.json b/Component/data/SolarThermalCollector/ST1.json
deleted file mode 100644
index ef03a08560b5a10e039d8c93e729d8f90200fd82..0000000000000000000000000000000000000000
--- a/Component/data/SolarThermalCollector/ST1.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-    "efficiency": 0.5,
-    "Specific_Nominal_Capacity": 0.672,
-    "service_life": 20,
-    "cost": 1000,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "Unnamed:_7": NaN
-}
\ No newline at end of file
diff --git a/Component/data/SolarThermalCollector/Vitosol200_TypSV2G.json b/Component/data/SolarThermalCollector/Vitosol200_TypSV2G.json
deleted file mode 100644
index 71d0e9cc9be439bf7234adefa479a09a576baca0..0000000000000000000000000000000000000000
--- a/Component/data/SolarThermalCollector/Vitosol200_TypSV2G.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-    "optimal_efficiency": 0.75,
-    "panel_size": 2.56,
-    "cost": 1125,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "fix_cost_factor": 0.025,
-    "service_life": 20
-}
\ No newline at end of file
diff --git a/Component/data/StaticBiInverter/INV2.json b/Component/data/StaticBiInverter/INV2.json
deleted file mode 100644
index 06e4ae85476b8223325c251c53d1cbfe4b262a6c..0000000000000000000000000000000000000000
--- a/Component/data/StaticBiInverter/INV2.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
-    "efficiency": 0.975,
-    "inv_cost": 200,
-    "fix_cost_factor": 0.01,
-    "service_life": 20
-}
\ No newline at end of file
diff --git a/Component/data/StaticBiInverter/INVBAT.json b/Component/data/StaticBiInverter/INVBAT.json
deleted file mode 100644
index 7088a9e8c1f7828b479019a9d9faf0de1afd7dc4..0000000000000000000000000000000000000000
--- a/Component/data/StaticBiInverter/INVBAT.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-    "efficiency": 0.95,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "service_life": 20,
-    "cost": 250
-}
\ No newline at end of file
diff --git a/Component/data/StaticBiInverter/INVPV.json b/Component/data/StaticBiInverter/INVPV.json
deleted file mode 100644
index 1ea3912fe8a18eff3970f1130c13e0854794f07c..0000000000000000000000000000000000000000
--- a/Component/data/StaticBiInverter/INVPV.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-    "efficiency": 0.9694,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "service_life": 20,
-    "cost": 242
-}
\ No newline at end of file
diff --git a/Component/data/StaticInverter/INV2.json b/Component/data/StaticInverter/INV2.json
deleted file mode 100644
index 06e4ae85476b8223325c251c53d1cbfe4b262a6c..0000000000000000000000000000000000000000
--- a/Component/data/StaticInverter/INV2.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
-    "efficiency": 0.975,
-    "inv_cost": 200,
-    "fix_cost_factor": 0.01,
-    "service_life": 20
-}
\ No newline at end of file
diff --git a/Component/data/StaticInverter/INVBAT.json b/Component/data/StaticInverter/INVBAT.json
deleted file mode 100644
index 7088a9e8c1f7828b479019a9d9faf0de1afd7dc4..0000000000000000000000000000000000000000
--- a/Component/data/StaticInverter/INVBAT.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-    "efficiency": 0.95,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "service_life": 20,
-    "cost": 250
-}
\ No newline at end of file
diff --git a/Component/data/StaticInverter/INVPV.json b/Component/data/StaticInverter/INVPV.json
deleted file mode 100644
index 1ea3912fe8a18eff3970f1130c13e0854794f07c..0000000000000000000000000000000000000000
--- a/Component/data/StaticInverter/INVPV.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-    "efficiency": 0.9694,
-    "factor_repair_effort": 0.01,
-    "factor_servicing_effort": 0.015,
-    "servicing_effort_hours": 20,
-    "service_life": 20,
-    "cost": 242
-}
\ No newline at end of file
diff --git a/Component/load_component_library.py b/Component/load_component_library.py
deleted file mode 100644
index ac5740e3fe5c9d0c81983cffa893f022f94bc02f..0000000000000000000000000000000000000000
--- a/Component/load_component_library.py
+++ /dev/null
@@ -1,46 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import importlib
-import inspect
-import os
-import sys
-
-def load_component_library(root):
-    package = 'Model_Library.Component.model'
-    sys.path.append(root)
-    module_paths = []
-    classes = {}
-    for directory, _, files in os.walk(root):
-        for file in files:
-            if file.endswith('.py') and file != '__init__.py':
-                module_path = directory
-                module_path = module_path.replace(root, '')
-                module_path = module_path.replace(os.sep, '.')
-                if module_path != '' and module_path not in module_paths:
-                    module_paths.append(module_path)
-                    module = importlib.import_module(module_path, package=package)
-                    for name, klass in inspect.getmembers(module, inspect.isclass):
-                        classes[name] = klass
-    return classes
diff --git a/Component/model/AbstractComponent.py b/Component/model/AbstractComponent.py
deleted file mode 100644
index 61adcc352d06deb38c20236e66a9b19efe475010..0000000000000000000000000000000000000000
--- a/Component/model/AbstractComponent.py
+++ /dev/null
@@ -1,115 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-import pandas as pd
-from enum import Enum
-
-class ComponentKind(Enum):
-    ALL = 1,
-    BASE = 2,
-    BUSBAR = 3,
-    CONSUMPTION = 4,
-    GENERATION = 5,
-    GRID = 6,
-    STORAGE = 7
-
-class ComponentCommodity(Enum):
-    ALL = 1,
-    ELECTRICITY = 2,
-    HEAT = 3,
-    GAS = 4,
-    COLD = 5,
-    HYDROGEN = 6
-
-class AbstractComponent:
-
-    def __init__(self, name, type, commodity_1, commodity_2, commodity_3, commodity_4, min_size, max_size, flexible, dynamic):
-        self.name = name
-        self.type = type
-        self.input_commodity_1 = commodity_1
-        self.input_commodity_2 = commodity_2
-        self.output_commodity_1 = commodity_3
-        self.output_commodity_2 = commodity_4
-        self.min_size = min_size
-        self.max_size = max_size
-        self.flexible = flexible
-        self.dynamic = dynamic
-        self.flex = pd.DataFrame()
-        self.temp_flex = pd.DataFrame()
-
-    def get_input_output_commodities(self):
-        return (self.input_commodity_1, self.input_commodity_2, self.output_commodity_1, self.output_commodity_2)
-    
-    def get_base_variable_names(self):
-        pass
-
-    def build_model(self, model, configuration):
-        prefix = (self.name,)
-        self._add_variables(model, prefix)
-        self._add_constraints(model, prefix, configuration)
-
-    def _add_variables(self, model, prefix):
-        pass
-
-    def _add_constraints(sefl, model, prefix, configuration):
-        pass
-    
-    def add_capital_costs(self, model, prosumer_configuration):
-        return None
-    
-    def add_operating_costs(self, model, configuration):
-        return None
-
-    def add_co2_emissions(self, model, configuration):
-        # heat https://www.uni-goettingen.de/de/document/download/e778b3727c64ed6f962e4c1cea80fa2f.pdf/CO2%20Emissionen_2016.pdf
-        # https://www.umweltbundesamt.de/presse/pressemitteilungen/bilanz-2019-co2-emissionen-pro-kilowattstunde-strom
-        pass
-
-    def add_peak_power_costs(self, model):
-        return None
-
-    # Flexibility stuff
-
-    def calc_flex_comp(self, results, dynamic, init_results):
-        pass
-
-    def check_limits(self, input_flows, output_flows, results, dynamic):
-        pass
-
-    def adjust_flex_with_efficiency(self, results, dynamic):
-        """
-        Function is only called for not flexible components. BaseComponent overwrites this function. BaseConsumption and BaseGrid do not have a efficiency so the just copy over the values.
-        Adjust the DF values from other components.
-        The theoretically available DF has to be adjusted according to the components efficiency.
-
-        Parameters
-        ----------
-        results: result df from initial scheduling
-        input_profiles: not needed here
-        T: time steps of current RH interval
-        """
-        self.flex_pos_inc_old = self.temp_flex['flex_pos_inc'].copy()
-        self.flex_neg_dec_old = self.temp_flex['flex_neg_dec'].copy()
-
-    def calc_correction_factors(self, time_steps, c_f_df, results):
-        return c_f_df
diff --git a/Component/model/BaseBusBar.py b/Component/model/BaseBusBar.py
deleted file mode 100644
index 96d78981ce02b29c9d56f97f508e2b92448c58ba..0000000000000000000000000000000000000000
--- a/Component/model/BaseBusBar.py
+++ /dev/null
@@ -1,63 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import AbstractComponent, ComponentKind, ComponentCommodity
-
-import pyomo.environ as pyo
-
-class BaseBusBar(AbstractComponent):
-
-    def __init__(self, name, type, commodity, dynamic):
-        super().__init__(name=name,
-                         type=type,
-                         commodity_1=commodity,
-                         commodity_2=None,
-                         commodity_3=commodity,
-                         commodity_4=None,
-                         min_size=None,
-                         max_size=None,
-                         flexible=False,
-                         dynamic=dynamic)
-        
-        self.dynamic = dynamic
-
-    def match(self, kind=ComponentKind.ALL, commodity=ComponentCommodity.ALL):
-        match_kind = kind == ComponentKind.ALL or kind == ComponentKind.BUSBAR
-        match_commodity = commodity == ComponentCommodity.ALL or \
-            commodity == self.commodity or \
-            (isinstance(commodity, list) and self.commodity in commodity)
-        return match_kind and match_commodity
-    
-    def get_base_variable_names(self):
-        return [((self.name, 'input_1'), 'T'), ((self.name, 'output_1'), 'T')]
-
-    def _add_variables(self, model, prefix):
-        model.add(prefix + ('input_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_1',), pyo.Var(model.T, bounds=(0, None)))
-
-    def _add_constraints(self, model, prefix, configuration):
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] == model.component_dict[prefix + ('input_1',)][t]
-        model.add(prefix + ('conser',), pyo.Constraint(model.T, rule = rule))
diff --git a/Component/model/BaseComponent.py b/Component/model/BaseComponent.py
deleted file mode 100644
index bd72ee66ae07efdcd9584c120bfc2c103cedc048..0000000000000000000000000000000000000000
--- a/Component/model/BaseComponent.py
+++ /dev/null
@@ -1,292 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import AbstractComponent, ComponentKind, ComponentCommodity
-
-import warnings
-import math
-import pyomo.environ as pyo
-from Model_Library.Prosumer.scripts import calc_annuity_vdi2067
-import pandas as pd
-import json
-import os
-
-class BaseComponent(AbstractComponent):
-
-    def __init__(self, name, type, commodity_1, commodity_2, commodity_3, commodity_4, configuration, model_directory, dynamic):
-        super().__init__(name=name, 
-                         type=type,
-                         commodity_1=commodity_1,
-                         commodity_2=commodity_2,
-                         commodity_3=commodity_3,
-                         commodity_4=commodity_4,
-                         min_size=configuration['min_size'],
-                         max_size=configuration['max_size'],
-                         flexible=False,
-                         dynamic=dynamic)
-
-        with open((os.path.join(model_directory, configuration['type'], configuration['model'] + '.json'))) as f:
-            model = json.load(f)
-        if type not in ['BHKW', 'CHP', 'DynamicBiInverter', 'DynamicInverter', 'PEMFuelCell', 'PVGenerator', 'SolarThermalCollector']:
-            if 'efficiency' in model:
-                self.efficiency = model['efficiency']
-            else:
-                warnings.warn(f"No column for efficiency for component {self.name}!")
-        if 'service_life' in model:
-            self.life = model['service_life']
-        else:
-            warnings.warn(f"No column for service_life for component {self.name}!")
-        if 'cost' in model:
-            self.cost = model['cost']
-        else:
-            warnings.warn(f"No column for cost for component {self.name}!")
-        if 'factor_repair_effort' in model:
-            self.f_inst = model['factor_repair_effort']
-        else:
-            warnings.warn(f"No column for factor_repair_effort for component {self.name}!")
-        if 'factor_servicing_effort' in model:
-            self.f_w = model['factor_servicing_effort']
-        else:
-            warnings.warn(f"No column for factor_servicing_effort for component {self.name}!")
-        if 'servicing_effort_hours' in model:
-            self.f_op = model['servicing_effort_hours']
-        else:
-            warnings.warn(f"No column for servicing_effort_hours for component {self.name}!")
-
-    def match(self, kind=ComponentKind.ALL, commodity=ComponentCommodity.ALL):
-        match_kind = kind == ComponentKind.ALL or kind == ComponentKind.BASE
-        match_commodity = commodity == ComponentCommodity.ALL or \
-            commodity == self.input_commodity_1 or \
-            commodity == self.input_commodity_2 or \
-            commodity == self.output_commodity_1 or \
-            commodity == self.output_commodity_2 or \
-            (isinstance(commodity, list) and \
-                (self.input_commodity_1 in commodity or \
-                self.input_commodity_2 in commodity or \
-                self.output_commodity_1 in commodity or \
-                self.output_commodity_2 in commodity))
-        return match_kind and match_commodity
-            
-    def get_base_variable_names(self):
-        return self.get_power_variable_names() + self.get_input_output_variable_names()
-    
-    def get_power_variable_names(self):
-        return [((self.name, 'capacity'), None)]
-    
-    def get_input_output_variable_names(self):
-        return [((self.name, 'input_1'), 'T'), ((self.name, 'output_1'), 'T')]
-
-    def _add_variables(self, model, prefix):
-        self.add_power_variables(model, prefix)
-        self.add_input_output_variables(model, prefix)
-
-    def add_power_variables(self, model, prefix):
-        lb_capacity = self.min_size
-        ub_capacity = self.max_size
-        if math.isinf(self.min_size):
-            lb_capacity = None
-        if math.isinf(self.max_size):
-            ub_capacity = None
-
-        model.add(prefix + ('capacity',), pyo.Var(bounds=(lb_capacity, ub_capacity)))
-    
-    def add_input_output_variables(self, model, prefix):
-        model.add(prefix + ('input_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_1',), pyo.Var(model.T, bounds=(0, None)))
-    
-    def _add_constraints(self, model, prefix, configuration):
-        self._constraint_capacity(model, prefix)
-        self._constraint_conser(model, prefix, configuration)
-
-        if 'fix_sizing' in configuration:
-            model.add(prefix + ('fix_capacity',), pyo.Constraint(expr = model.component_dict[prefix + ('capacity',)] == configuration['fix_sizing']['values'][self.name]))
-
-    def _constraint_capacity(self, model, prefix):
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_1',)][t] <= model.component_dict[prefix + ('capacity',)]
-        model.add(prefix + ('capacity_cons',), pyo.Constraint(model.T, rule = rule))
-
-    def _constraint_conser(self, model, prefix, configuration):
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] == model.component_dict[prefix + ('input_1',)][t] * self.efficiency
-        model.add(prefix + ('conser',), pyo.Constraint(model.T, rule = rule))
-    
-    def add_capital_costs(self, model, prosumer_configuration):
-        prefix = (self.name,)
-        
-        model.add(prefix + ('capital_cost',), pyo.Var(bounds=(0, None)))
-        
-        capital_cost = calc_annuity_vdi2067.run(prosumer_configuration['planning_horizon'],
-                                               self.life,
-                                               self.cost,
-                                               model.component_dict[prefix + ('capacity',)],
-                                               self.f_inst,
-                                               self.f_w,
-                                               self.f_op,
-                                               prosumer_configuration['yearly_interest'])
-        model.add(prefix + ('capital_cost_cons',), pyo.Constraint(expr = model.component_dict[prefix + ('capital_cost',)] == capital_cost))
-        return prefix + ('capital_cost',)
-
-    # Flexibility stuff
-
-    def check_limits(self, input_flows, output_flows, results, dynamic):
-        """
-        ToDO JBR: clean
-        calculate the consequences of maximal positive and negative flexibility. Positive flexibility can
-        increase  (pos_flex_inc) and decrease (pos_flex_dec) the inputs of an Inverter.
-        The negative flexibility increases/decreases the outputs of the Inverter.
-        Therefore, they have to be transformed to input values.
-
-        Parameters
-        ----------
-        input_flows: incoming power flows
-        output_flows: outgoing power flows
-        results: df with iniital prosumer results. Needed for obtaining the initial component size.
-        time_steps: time steps of current RH interval
-        """
-
-        if not hasattr(self, 'efficiency'):
-            return
-        if (self.name, 'capacity') not in results:
-            return
-
-        # flexibility that was already translated to the input side by def adjust_with_efficiency
-        pos_flex_dec = self.temp_flex['flex_pos_dec'][dynamic.time_steps()]
-        neg_flex_inc = self.temp_flex['flex_neg_inc'][dynamic.time_steps()]
-
-        power_limit = 1 * results[(self.name, 'capacity')]  # excluding the excess power for safety
-
-        # ----------------CHECK POWER--------------------
-
-        # upper limit for pos-inc df
-        upper_limit_pos_inc = power_limit - input_flows
-        upper_limit_pos_inc.loc[upper_limit_pos_inc < 0] = 0  # account for the case input_flows > power_limit (excess)
-
-        # max pos-inc df on input side of converter
-        max_pos_inc = self.flex_pos_inc_old.combine(upper_limit_pos_inc, min)
-
-        # transferring the maximal allower pos-inc flex to the output side as it lays nearer to the grid
-        max_pos_inc_out = max_pos_inc * self.efficiency
-        max_pos_inc_out.loc[max_pos_inc_out < 0] = 0
-
-        # upper limit for neg-inc df
-        upper_limit_neg_inc = power_limit - input_flows
-        upper_limit_neg_inc.loc[upper_limit_neg_inc < 0] = 0  # account for the case input_flows > power_limit (excess)
-
-        # max neg-inc df on input side of converter
-        max_neg_inc = neg_flex_inc.combine(upper_limit_neg_inc, min)
-
-        # negative flex doesn't need conversion as it already has been
-        max_neg_inc_out = max_neg_inc
-
-        self.temp_flex['flex_pos_inc'] = max_pos_inc_out  # excess has to be transformed to inv output value
-        # self.temp_flex['flex_pos_inc'].loc[self.temp_flex['flex_pos_inc'] < 0] = 0
-        self.temp_flex.loc[self.temp_flex['flex_pos_inc'] < 0, 'flex_pos_inc'] = 0
-        self.temp_flex['flex_neg_inc'] = max_neg_inc_out
-        # self.temp_flex['flex_neg_inc'].loc[self.temp_flex['flex_neg_inc'] < 0] = 0
-        self.temp_flex.loc[self.temp_flex['flex_neg_inc'] < 0, 'flex_neg_inc'] = 0
-
-        self.temp_flex['flex_pos'] = self.temp_flex['flex_pos_inc'] + self.temp_flex['flex_pos_dec']
-        self.temp_flex['flex_neg'] = self.temp_flex['flex_neg_inc'] + self.temp_flex['flex_neg_dec']
-
-    def adjust_flex_with_efficiency(self, results, dynamic):
-        """
-        ToDO JBR: clean
-        Adjust the DF values from other components.
-        The theoretically available DF has to be adjusted according to the components efficiency.
-
-        Parameters
-        ----------
-        results: result df from initial scheduling
-        input_profiles: not needed here
-        T: time steps of current RH interval
-        """
-        if not hasattr(self, 'efficiency'):
-            self.flex_pos_inc_old = self.temp_flex['flex_pos_inc'].copy()
-            self.flex_neg_dec_old = self.temp_flex['flex_neg_dec'].copy()
-            return
-
-        # ToDo: Is this correct? Efficiency effects energy as well because in the end the community has to know how much
-        # energy is still available after each flex activation. e.g.: If the efficiency would not affect the energy,
-        # the community would always underestimate the effect of a positive flexibility activation on the energy amount
-        # left
-
-        self.flex_pos_inc_old = self.temp_flex['flex_pos_inc'].copy()
-        self.flex_neg_dec_old = self.temp_flex['flex_neg_dec'].copy()
-
-        # ------------EFFICIENCY--------------
-        # the idea here is that the flexibility that comes out of the inverter is
-        # related to the efficiency of the inverter.
-        # So considering the power flow of the flexibility type,
-        # the net flexibility gets higher or lower compared to the flexibility at the origin component
-
-        # ------POSITIVE--------------
-        # increasing positive flex means lower flex at grid
-        self.temp_flex['flex_pos_inc'] = self.temp_flex['flex_pos_inc'] * self.efficiency
-        # decreasing positive flex means higher flex at grid
-        self.temp_flex['flex_pos_dec'] = self.temp_flex['flex_pos_dec'] / self.efficiency
-
-        self.temp_flex['flex_pos'] = self.temp_flex['flex_pos_inc'] + self.temp_flex['flex_pos_dec']
-
-        # ------NEGATIVE--------------
-        # increasing negative flex means higher flex at grid
-        self.temp_flex['flex_neg_inc'] = self.temp_flex['flex_neg_inc'] / self.efficiency
-        # decreasing neg flex means lower flex at grid
-        self.temp_flex['flex_neg_dec'] = self.temp_flex['flex_neg_dec'] * self.efficiency
-
-        self.temp_flex['flex_neg'] = (self.temp_flex['flex_neg_inc'] + self.temp_flex['flex_neg_dec'])
-
-    def calc_correction_factors(self, time_steps, c_f_df, results):
-        """
-        Calculates the correction factors (CF).
-        A general component will only have an input and output efficiency.
-        If not, no calculation will be done and the cf df will not be changed.
-
-        Parameters
-        ----------
-        T: time steps of current RH interval
-        c_f_df: correction factors of all components that have aleary been searched
-        results: results of initial scheduling to get comp. size
-        input_profiles: not necessary in this method.
-
-        Returns
-        -------
-        c_f_df: updated CF dataframe
-        """
-        try:
-            c_f_dch = pd.Series(data=1/self.output_efficiency, index=time_steps)
-            c_static_dch =pd.Series(data=0, index=time_steps)
-            c_f_cha = pd.Series(data=self.input_efficiency, index=time_steps)
-            c_static_cha = pd.Series(data=0, index=time_steps)
-
-            c_f_df['c_f_dch'] = c_f_dch * c_f_df['c_f_dch'],
-            c_f_df['c_static_dch'] = c_static_dch + c_f_df['c_static_dch']
-
-            c_f_df['c_f_cha'] = c_f_cha * c_f_df['c_f_cha']
-            c_f_df['c_static_cha'] = c_static_cha + c_f_df['c_static_cha']
-        except:
-            pass
-
-        return c_f_df
diff --git a/Component/model/BaseConsumption.py b/Component/model/BaseConsumption.py
deleted file mode 100644
index f2c1e6ae5bc579714b3b6c532201a6095f63b673..0000000000000000000000000000000000000000
--- a/Component/model/BaseConsumption.py
+++ /dev/null
@@ -1,92 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import AbstractComponent, ComponentKind, ComponentCommodity
-from Tooling.predictor.Predictor import Predictor
-from Tooling.dynamics.Dynamic import resample
-
-import pyomo.environ as pyo
-
-class BaseConsumption(AbstractComponent):
-
-    def __init__(self, name, type, commodity, configuration, profiles, dynamic):
-        super().__init__(name=name,
-                         type=type,
-                         commodity_1=commodity,
-                         commodity_2=None,
-                         commodity_3=None,
-                         commodity_4=None,
-                         min_size=None,
-                         max_size=None,
-                         flexible=None,
-                         dynamic=dynamic)
-        
-        self.commodity = commodity
-
-        if configuration['type'] != 'DrivingConsumption':
-            if isinstance(configuration['consumption'], str):
-                self.consumption = resample(profiles[configuration['consumption']][0], profiles[configuration['consumption']][1], dynamic)
-            elif isinstance(configuration['consumption'], dict):
-                self.consumption = Predictor(resample(profiles[configuration['consumption']['profile']][0], profiles[configuration['consumption']['profile']][1], dynamic), configuration['consumption']['type'], configuration['consumption']['method'], dynamic)
-
-    def match(self, kind=ComponentKind.ALL, commodity=ComponentCommodity.ALL):
-        match_kind = kind == ComponentKind.ALL or kind == ComponentKind.CONSUMPTION
-        match_commodity = commodity == ComponentCommodity.ALL or \
-            commodity == self.commodity or \
-            (isinstance(commodity, list) and self.commodity in commodity)
-        return match_kind and match_commodity
-
-    def get_base_variable_names(self):
-        return [((self.name, 'input_1'), 'T')]
-
-    def _add_variables(self, model, prefix):
-        model.add(prefix + ('input_1',), pyo.Var(model.T, bounds=(0, None)))
-
-    def _add_constraints(self, model, prefix, configuration):
-        model.add(prefix + ('consumption',), pyo.Param(model.T, mutable = True))
-
-        if isinstance(self.consumption, Predictor):
-            if 'predict' in configuration:
-                consumption = self.consumption.predict(list(model.T))
-            else:
-                consumption = resample(self.consumption.profile, self.dynamic, model.dynamic)
-        else:
-            consumption = resample(self.consumption, self.dynamic, model.dynamic)
-
-        model.set_value(prefix + ('consumption',), consumption)
-
-        if 'consumption_slack' in configuration and self.match(commodity=configuration['consumption_slack']['commodity']):
-            model.add(prefix + ('s_consumption',), pyo.Var(model.T, bounds=(0, None)))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('input_1',)][t] + model.component_dict[prefix + ('s_consumption',)][t] == model.component_dict[prefix + ('consumption',)][t]
-            model.add(prefix + ('consumption_cons',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('s_consumption',)][t] * configuration["consumption_slack"]["strategy_factor"]
-            model.add_objective_term(pyo.Expression(model.T, rule=rule))
-        else:
-            def rule(m, t):
-                return model.component_dict[prefix + ('input_1',)][t] == model.component_dict[prefix + ('consumption',)][t]
-            model.add(prefix + ('consumption_cons',), pyo.Constraint(model.T, rule = rule))
\ No newline at end of file
diff --git a/Component/model/BaseGeneration.py b/Component/model/BaseGeneration.py
deleted file mode 100644
index 021758f3006b4433a0e742f494e5d90bf7a9da8b..0000000000000000000000000000000000000000
--- a/Component/model/BaseGeneration.py
+++ /dev/null
@@ -1,81 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import AbstractComponent, ComponentKind, ComponentCommodity
-from Tooling.predictor.Predictor import Predictor
-from Tooling.dynamics.Dynamic import resample
-
-import pyomo.environ as pyo
-
-class BaseGeneration(AbstractComponent):
-
-    def __init__(self, name, type, commodity, configuration, profiles, dynamic):
-        super().__init__(name=name,
-                         type=type,
-                         commodity_1=None,
-                         commodity_2=None,
-                         commodity_3=commodity,
-                         commodity_4=None,
-                         min_size=None,
-                         max_size=None,
-                         flexible=None,
-                         dynamic=dynamic)
-        
-        self.commodity = commodity
-
-        if configuration['type'] != 'ElectricalGeneration':
-            if isinstance(configuration['generation'], str):
-                self.generation = resample(profiles[configuration['generation']][0], profiles[configuration['generation']][1], dynamic)
-            elif isinstance(configuration['generation'], dict):
-                self.generation = Predictor(resample(profiles[configuration['generation']['profile']][0], profiles[configuration['generation']['profile']][1], dynamic), configuration['generation']['type'], configuration['generation']['method'], dynamic)
-
-    def match(self, kind=ComponentKind.ALL, commodity=ComponentCommodity.ALL):
-        match_kind = kind == ComponentKind.ALL or kind == ComponentKind.GENERATION
-        match_commodity = commodity == ComponentCommodity.ALL or \
-            commodity == self.commodity or \
-            (isinstance(commodity, list) and self.commodity in commodity)
-        return match_kind and match_commodity
-
-    def get_base_variable_names(self):
-        return [((self.name, 'output_1'), 'T')]
-    
-    def _add_variables(self, model, prefix):
-        model.add(prefix + ('output_1',), pyo.Var(model.T, bounds=(0, None)))
-
-    def _add_constraints(self, model, prefix, configuration):
-        model.add(prefix + ('generation',), pyo.Param(model.T, mutable = True))
-
-        if isinstance(self.generation, Predictor):
-            if 'predict' in configuration:
-                generation = self.generation.predict(list(model.T))
-            else:
-                generation = resample(self.generation.profile, self.dynamic, model.dynamic)
-        else:
-            generation = resample(self.generation, self.dynamic, model.dynamic)
-
-        model.set_value(prefix + ('generation',), generation)
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] == model.component_dict[prefix + ('generation',)][t]
-        model.add(prefix + ('generation_cons',), pyo.Constraint(model.T, rule = rule))
diff --git a/Component/model/BaseGrid.py b/Component/model/BaseGrid.py
deleted file mode 100644
index 48fad7e01a5d140b47978470acdb037e9dfdf64a..0000000000000000000000000000000000000000
--- a/Component/model/BaseGrid.py
+++ /dev/null
@@ -1,228 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import AbstractComponent, ComponentKind, ComponentCommodity
-from Tooling.predictor.Predictor import Predictor
-from Tooling.dynamics.Dynamic import resample
-
-import warnings
-import math
-import pyomo.environ as pyo
-from Model_Library.Prosumer.scripts import calc_annuity_vdi2067
-import json
-import os
-
-class BaseGrid(AbstractComponent):
-
-    def __init__(self, name, type, commodity, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type=type,
-                         commodity_1=commodity,
-                         commodity_2=None,
-                         commodity_3=commodity,
-                         commodity_4=None,
-                         min_size=configuration['min_size'],
-                         max_size=configuration['max_size'],
-                         flexible=False,
-                         dynamic=dynamic)
-        
-        self.commodity = commodity
-        
-        with open((os.path.join(model_directory, configuration['type'], configuration['model'] + '.json'))) as f:
-            model = json.load(f)
-        if 'service_life' in model:
-            self.life = model['service_life']
-        else:
-            warnings.warn(f"No column for service_life for component {self.name}!")
-        if 'cost' in model:
-            self.cost = model['cost']
-        else:
-            warnings.warn(f"No column for cost for component {self.name}!")
-        if 'factor_repair_effort' in model:
-            self.f_inst = model['factor_repair_effort']
-        else:
-            warnings.warn(f"No column for factor_repair_effort for component {self.name}!")
-        if 'factor_servicing_effort' in model:
-            self.f_w = model['factor_servicing_effort']
-        else:
-            warnings.warn(f"No column for factor_servicing_effort for component {self.name}!")
-        if 'servicing_effort_hours' in model:
-            self.f_op = model['servicing_effort_hours']
-        else:
-            warnings.warn(f"No column for servicing_effort_hours for component {self.name}!")
-        
-        if 'price' in configuration:
-            if isinstance(configuration['price'], float) or isinstance(configuration['injection_price'], int):
-                self.price = configuration['price']
-            elif isinstance(configuration['price'], str):
-                self.price = resample(profiles[configuration['price']][0], profiles[configuration['price']][1], dynamic)
-            elif isinstance(configuration['price'], dict):
-                self.price = Predictor(resample(profiles[configuration['price']['profile']][0], profiles[configuration['price']['profile']][1], dynamic), configuration['price']['type'], configuration['price']['method'], dynamic)
-        else:
-            self.price = 0
-        if 'injection_price' in configuration:
-            if isinstance(configuration['injection_price'], float) or isinstance(configuration['injection_price'], int):
-                self.injection_price = configuration['injection_price']
-            elif isinstance(configuration['injection_price'], str):
-                self.injection_price = resample(profiles[configuration['injection_price']][0], profiles[configuration['injection_price']][1], dynamic)
-            elif isinstance(configuration['injection_price'], dict):
-                self.injection_price = Predictor(resample(profiles[configuration['injection_price']['profile']][0], profiles[configuration['injection_price']['profile']][1], dynamic), configuration['injection_price']['type'], configuration['injection_price']['method'], dynamic)
-        else:
-            self.injection_price = 0
-
-    def match(self, kind=ComponentKind.ALL, commodity=ComponentCommodity.ALL):
-        match_kind = kind == ComponentKind.ALL or kind == ComponentKind.GRID
-        match_commodity = commodity == ComponentCommodity.ALL or \
-            commodity == self.commodity or \
-            (isinstance(commodity, list) and self.commodity in commodity)
-        return match_kind and match_commodity
-    
-    def get_base_variable_names(self):
-        return [((self.name, 'capacity'), None), ((self.name, 'input_1'), 'T'), ((self.name, 'output_1'), 'T')]
-    
-    def _add_variables(self, model, prefix):
-        lb_capacity = self.min_size
-        ub_capacity = self.max_size
-        if math.isinf(self.min_size):
-            lb_capacity = None
-        if math.isinf(self.max_size):
-            ub_capacity = None
-
-        model.add(prefix + ('capacity',), pyo.Var(bounds=(lb_capacity, ub_capacity)))
-
-        model.add(prefix + ('input_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_1',), pyo.Var(model.T, bounds=(0, None)))
-
-    def _add_constraints(self, model, prefix, configuration):
-        if 'fix_sizing' in configuration:
-            model.add(prefix + ('fix_capacity',), pyo.Constraint(expr = model.component_dict[prefix + ('capacity',)] == configuration['fix_sizing']['values'][self.name]))
-        
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_1',)][t] <= model.component_dict[prefix + ('capacity',)]
-        model.add(prefix + ('capacity_input_cons',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] <= model.component_dict[prefix + ('capacity',)]
-        model.add(prefix + ('capacity_output_cons',), pyo.Constraint(model.T, rule = rule))
-
-        if 'grid_slack' in configuration and self.match(commodity=configuration['grid_slack']['commodity']):
-            model.add(prefix + ('s_p_export',), pyo.Var(model.T, bounds=(0, None)))
-            model.add(prefix + ('s_n_export',), pyo.Var(model.T, bounds=(0, None)))
-            model.add(prefix + ('s_p_import',), pyo.Var(model.T, bounds=(0, None)))
-            model.add(prefix + ('s_n_import',), pyo.Var(model.T, bounds=(0, None)))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('input_1',)][t] == configuration['grid_slack']['values'][self.name][0][t] + model.component_dict[prefix + ('s_p_export',)][t] - model.component_dict[prefix + ('s_n_export',)][t]
-            model.add(prefix + ('s_export',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('output_1',)][t] == configuration['grid_slack']['values'][self.name][1][t] + model.component_dict[prefix + ('s_p_import',)][t] - model.component_dict[prefix + ('s_n_import',)][t]
-            model.add(prefix + ('s_import',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('s_p_export',)][t] * configuration['grid_slack']['strategy_factor']
-            model.add_objective_term(pyo.Expression(model.T, rule=rule))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('s_n_export',)][t] * configuration['grid_slack']['strategy_factor']
-            model.add_objective_term(pyo.Expression(model.T, rule=rule))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('s_p_import',)][t] * configuration['grid_slack']['strategy_factor']
-            model.add_objective_term(pyo.Expression(model.T, rule=rule))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('s_n_import',)][t] * configuration['grid_slack']['strategy_factor']
-            model.add_objective_term(pyo.Expression(model.T, rule=rule))
-
-        if 'grid_fix' in configuration and self.match(commodity=configuration['grid_fix']['commodity']):
-            def rule(m, t):
-                return model.component_dict[prefix + ('input_1',)][t] == configuration['grid_fix']['values'][self.name][0][t]
-            model.add(prefix + ('fix_export',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('output_1',)][t] == configuration['grid_fix']['values'][self.name][1][t]
-            model.add(prefix + ('fix_import',), pyo.Constraint(model.T, rule = rule))
-    
-    def add_capital_costs(self, model, prosumer_configuration):
-        prefix = (self.name,)
-
-        model.add(prefix + ('capital_cost',), pyo.Var(bounds=(0, None)))
-        
-        capital_cost = calc_annuity_vdi2067.run(prosumer_configuration['planning_horizon'],
-                                               self.life,
-                                               self.cost,
-                                               model.component_dict[prefix + ('capacity',)],
-                                               self.f_inst,
-                                               self.f_w,
-                                               self.f_op,
-                                               prosumer_configuration['yearly_interest'])
-        model.add(prefix + ('capital_cost_cons',), pyo.Constraint(expr = model.component_dict[prefix + ('capital_cost',)] == capital_cost))
-        return prefix + ('capital_cost',)
-
-    def add_operating_costs(self, model, configuration):
-        prefix = (self.name,)
-        
-        if isinstance(self.price, float) or isinstance(self.price, int):
-            cost_function = lambda t: model.component_dict[prefix + ('output_1',)][t] * self.price
-        else:
-            model.add(prefix + ('price',), pyo.Param(model.T, mutable = True))
-
-            if isinstance(self.price, Predictor):
-                if 'predict' in configuration:
-                    price = self.price.predict(list(model.T))
-                else:
-                    price = resample(self.price.profile, self.dynamic, model.dynamic)
-            else :
-                price = resample(self.price, self.dynamic, model.dynamic)
-            
-            model.set_value(prefix + ('price',), price)
-        
-            cost_function = lambda t: model.component_dict[prefix + ('output_1',)][t] * model.component_dict[prefix + ('price',)][t]
-        
-        if isinstance(self.injection_price, float) or isinstance(self.injection_price, int):
-            revenue_function = lambda t: model.component_dict[prefix + ('input_1',)][t] * self.injection_price
-        else:
-            model.add(prefix + ('injection_price',), pyo.Param(model.T, mutable = True))
-
-            if isinstance(self.injection_price, Predictor):
-                if 'predict' in configuration:
-                    injection_price = self.injection_price.predict(list(model.T))
-                else:
-                    injection_price = resample(self.injection_price.profile, self.dynamic, model.dynamic)
-            else :
-                injection_price = resample(self.injection_price, self.dynamic, model.dynamic)
-            
-            model.set_value(prefix + ('injection_price',), injection_price)
-        
-            revenue_function = lambda t: model.component_dict[prefix + ('input_1',)][t] * model.component_dict[prefix + ('injection_price',)][t]
-
-        model.add(prefix + ('operating_cost',), pyo.Var(model.T))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('operating_cost',)][t] == (cost_function(t) - revenue_function(t)) * model.step_size(t)
-        model.add(prefix + ('operation_cost_cons',), pyo.Constraint(model.T, rule = rule))
-
-        return prefix + ('operating_cost',)
diff --git a/Component/model/BaseStorage.py b/Component/model/BaseStorage.py
deleted file mode 100644
index 49535b4643dc1addd9bdbe98d4dd6ee94a7bd6fa..0000000000000000000000000000000000000000
--- a/Component/model/BaseStorage.py
+++ /dev/null
@@ -1,380 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-from Model_Library.Component.model.AbstractComponent import AbstractComponent, ComponentKind, ComponentCommodity
-
-import warnings
-import math
-import pyomo.environ as pyo
-from Model_Library.Prosumer.scripts import calc_annuity_vdi2067
-import pandas as pd
-import json
-import os
-
-class BaseStorage(AbstractComponent):
-
-    def __init__(self, name, type, commodity, configuration, model_directory, dynamic):
-        super().__init__(name=name,
-                         type=type,
-                         commodity_1=commodity,
-                         commodity_2=None,
-                         commodity_3=commodity,
-                         commodity_4=None,
-                         min_size=configuration['min_size'],
-                         max_size=configuration['max_size'],
-                         flexible=True,
-                         dynamic=dynamic)
-        
-        self.commodity = commodity
-
-        with open((os.path.join(model_directory, configuration['type'], configuration['model'] + '.json'))) as f:
-            model = json.load(f)
-        if 'input_efficiency' in model:
-            self.input_efficiency = model['input_efficiency']
-        else:
-            warnings.warn(f"No column for input_efficiency for component {self.name}!")
-        if 'e2p_in' in model:
-            self.e2p_in = model['e2p_in']
-        else:
-            warnings.warn(f"No column for e2p_in for component {self.name}!")
-        if 'output_efficiency' in model:
-            self.output_efficiency = model['output_efficiency']
-        else:
-            warnings.warn(f"No column for output_efficiency for component {self.name}!")
-        if 'e2p_out' in model:
-            self.e2p_out = model['e2p_out']
-        else:
-            warnings.warn(f"No column for e2p_out for component {self.name}!")
-        if 'min_soe' in model:
-            self.min_soe = model['min_soe']
-        else:
-            warnings.warn(f"No column for min_soe for component {self.name}!")
-        if 'max_soe' in model:
-            self.max_soe = model['max_soe']
-        else:
-            warnings.warn(f"No column for max_soe for component {self.name}!")
-        if 'init_soe' in model:
-            self.init_soe = model['init_soe']
-        else:
-            warnings.warn(f"No column for init_soe for component {self.name}!")
-        if 'service_life' in model:
-            self.life = model['service_life']
-        else:
-            warnings.warn(f"No column for service_life for component {self.name}!")
-        if 'cost' in model:
-            self.cost = model['cost']
-        else:
-            warnings.warn(f"No column for cost for component {self.name}!")
-        if 'factor_repair_effort' in model:
-            self.f_inst = model['factor_repair_effort']
-        else:
-            warnings.warn(f"No column for factor_repair_effort for component {self.name}!")
-        if 'factor_servicing_effort' in model:
-            self.f_w = model['factor_servicing_effort']
-        else:
-            warnings.warn(f"No column for factor_servicing_effort for component {self.name}!")
-        if 'servicing_effort_hours' in model:
-            self.f_op = model['servicing_effort_hours']
-        else:
-            warnings.warn(f"No column for servicing_effort_hours for component {self.name}!")
-
-    def match(self, kind=ComponentKind.ALL, commodity=ComponentCommodity.ALL):
-        match_kind = kind == ComponentKind.ALL or kind == ComponentKind.STORAGE
-        match_commodity = commodity == ComponentCommodity.ALL or \
-            commodity == self.commodity or \
-            (isinstance(commodity, list) and self.commodity in commodity)
-        return match_kind and match_commodity
-    
-    def get_base_variable_names(self):
-        return [((self.name, 'capacity'), None), ((self.name, 'energy'), 'T_prime'), ((self.name, 'input_1'), 'T'), ((self.name, 'output_1'), 'T')]
-
-    def _add_variables(self, model, prefix):
-        lb_cap = self.min_size
-        ub_cap = self.max_size
-        if math.isinf(lb_cap):
-            lb_cap = None
-        if math.isinf(ub_cap):
-            ub_cap = None
-
-        model.add(prefix + ('capacity',), pyo.Var(bounds=(lb_cap, ub_cap)))
-
-        model.add(prefix + ('energy',), pyo.Var(model.T_prime, bounds=(0, None)))
-
-        model.add(prefix + ('input_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_1',), pyo.Var(model.T, bounds=(0, None)))
-
-    def _add_constraints(self, model, prefix, configuration):
-        self._constraint_capacity(model, prefix, configuration)
-        self._constraint_conser(model, prefix, configuration)
-        self._constraint_bi_flow(model, prefix)
-
-        if 'fix_sizing' in configuration:
-            model.add(prefix + ('fix_capacity',), pyo.Constraint(expr = model.component_dict[prefix + ('capacity',)] == configuration['fix_sizing']['values'][self.name]))
-
-        if 'storage_slack' in configuration and self.match(commodity=configuration['storage_slack']['commodity']):
-            model.add(prefix + ('s_p_energy',), pyo.Var(model.T, bounds=(0, None)))
-            model.add(prefix + ('s_n_energy',), pyo.Var(model.T, bounds=(0, None)))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('energy',)][t] == configuration['storage_slack']['values'][self.name][t] + model.component_dict[prefix + ('s_p_energy',)][t] - model.component_dict[prefix + ('s_n_energy',)][t]
-            model.add(prefix + ('s_energy',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('s_p_energy',)][t] * configuration['storage_slack']['strategy_factor']
-            model.add_objective_term(pyo.Expression(model.T, rule=rule))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('s_n_energy',)][t] * configuration['storage_slack']['strategy_factor']
-            model.add_objective_term(pyo.Expression(model.T, rule=rule))
-
-    def _constraint_capacity(self, model, prefix, configuration):
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_1',)][t] * self.input_efficiency <= model.component_dict[prefix + ('capacity',)] / self.e2p_in
-        model.add(prefix + ('capacity_input_cons',), pyo.Constraint(model.T, rule = rule))
-        
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] / self.output_efficiency <= model.component_dict[prefix + ('capacity',)] / self.e2p_out
-        model.add(prefix + ('capacity_output_cons',), pyo.Constraint(model.T, rule = rule))
-
-        if 'storage_boundaries' in configuration and self.match(commodity=configuration['storage_boundaries']['commodity']):
-            def rule(m, t):
-                return configuration['storage_boundaries']['min'] * model.component_dict[prefix + ('capacity',)] <= model.component_dict[prefix + ('energy',)][t]
-            model.add(prefix + ('energy_lb',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('energy',)][t] <= configuration['storage_boundaries']['max'] * model.component_dict[prefix + ('capacity',)]
-            model.add(prefix + ('energy_ub',), pyo.Constraint(model.T, rule = rule))
-        else:
-            def rule(m, t):
-                return self.min_soe * model.component_dict[prefix + ('capacity',)] <= model.component_dict[prefix + ('energy',)][t]
-            model.add(prefix + ('energy_lb',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('energy',)][t] <= self.max_soe * model.component_dict[prefix + ('capacity',)]
-            model.add(prefix + ('energy_ub',), pyo.Constraint(model.T, rule = rule))
-
-    def _constraint_conser(self, model, prefix, configuration):
-        if 'storage_connect' in configuration and self.match(commodity=configuration['storage_connect']['commodity']):
-            last_energy = configuration['storage_connect']['values'][self.name]
-            model.add(prefix + ('fix_first_energy',), pyo.Constraint(expr = model.component_dict[prefix + ('energy',)][model.T_prime.first()] == last_energy))
-        else:
-            model.add(prefix + ('fix_first_energy',), pyo.Constraint(expr = model.component_dict[prefix + ('energy',)][model.T_prime.first()] == self.init_soe * model.component_dict[prefix + ('capacity',)]))
-        
-        def rule(m, t):
-            return model.component_dict[prefix + ('energy',)][t] == model.component_dict[prefix + ('energy',)][model.T_prime[model.T_prime.ord(t) - 1]] + model.component_dict[prefix + ('input_1',)][t] * self.input_efficiency * model.step_size(t) - model.component_dict[prefix + ('output_1',)][t] / self.output_efficiency * model.step_size(t)
-        model.add(prefix + ('conser',), pyo.Constraint(model.T, rule = rule))
-
-    def _constraint_bi_flow(self, model, prefix):
-        model.add(prefix + ('z_input',), pyo.Var(model.T, domain=pyo.Binary))
-        model.add(prefix + ('z_output',), pyo.Var(model.T, domain=pyo.Binary))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('z_input',)][t] + model.component_dict[prefix + ('z_output',)][t] <= 1
-        model.add(prefix + ('bi_flow_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_1',)][t] <= model.component_dict[prefix + ('z_input',)][t] * 100000
-        model.add(prefix + ('bi_flow_2',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] <= model.component_dict[prefix + ('z_output',)][t] * 100000
-        model.add(prefix + ('bi_flow_3',), pyo.Constraint(model.T, rule = rule))
-    
-    def add_capital_costs(self, model, prosumer_configuration):
-        prefix = (self.name,)
-        
-        model.add(prefix + ('capital_cost',), pyo.Var(bounds=(0, None)))
-        
-        capital_cost = calc_annuity_vdi2067.run(prosumer_configuration['planning_horizon'],
-                                               self.life,
-                                               self.cost,
-                                               model.component_dict[prefix + ('capacity',)],
-                                               self.f_inst,
-                                               self.f_w,
-                                               self.f_op,
-                                               prosumer_configuration['yearly_interest'])
-        model.add(prefix + ('capital_cost_cons',), pyo.Constraint(expr = model.component_dict[prefix + ('capital_cost',)] == capital_cost))
-        return prefix + ('capital_cost',)
-
-    # Flexibility stuff
-
-    def calc_flex_comp(self, results, dynamic, init_results):
-        """
-        This method calculates the maximal possible flexibility from the battery.
-        This is done by comparing two values:
-            1) The maximal possible CHA/DCH power
-            2) The maximal allowed CHA/DCH power regarding the available power BEFORE the PLANNED CHA/DCH
-        The minimal value is chosen to be the maximal possible CHA/DCH power. To calaculate the available flexibility from this,
-        the PLANNED CHA/DCH rates are added/substracted depending on the type of flexibility.
-        E.g. positive flex: the current DCH power is substracted and the current CHA power is added
-        Parameters
-        ----------
-        flows: flow variables in the prosumer
-        results: result df to get capacity and energy values
-        T: time steps of current RH step
-        """
-
-        cap_storage = results[(self.name, 'capacity')]  # the total theoretical capacity of the comm_batt
-        e_storage = results[(self.name, 'energy')]
-
-        # maximal in/out powers that the comm_batt is capable of
-        max_power_out = cap_storage / self.e2p_out * self.output_efficiency
-        max_power_in = cap_storage / self.e2p_in / self.input_efficiency
-
-        # get the total output/input powers
-        total_dch = pd.Series(data=0.0, index=dynamic.time_steps())
-        total_dch += results[(self.name, 'output_1')] #  self.output_efficiency #here not necessary
-
-        total_cha = pd.Series(data=0.0, index=dynamic.time_steps())
-        total_cha += results[(self.name, 'input_1')] # self.input_efficiency
-
-        # these are the power values that are theoretically available
-        # for flexibility through CHA and DCH (missing curtailment)
-        max_power_dch = max_power_out
-        max_power_cha = max_power_in
-
-        # now the energy restriction is calculated
-        # ToDO @jbr: in the future here should be an extra restriction if some SOE values
-        #  have to be reached in future timesteps
-
-        #e_cha = (self.max_soe * cap_storage - e_storage_shift) / self.input_efficiency
-        #e_dch = (e_storage_shift - self.min_soe * cap_storage) * self.output_efficiency
-        buffer_cha = 0.0
-        buffer_dch = 0.0
-        e_cha = (0.8 * cap_storage - e_storage)  # / self.input_efficiency
-        e_dch = (e_storage - 0.2 * cap_storage)  # * self.output_efficiency
-
-        for t in dynamic.time_steps():
-            if e_dch[t] < -1 * e_cha[t]:
-                print('Fehler zum Zeitpunkt ' + str(t))
-
-        # problem with these ones is: a negative energy means that the energy level is in "restricted" levels.
-        # This may lead to infeasailities as this constraint requires the Prosumer to provide charge/discharge
-        # even if this is not possible. But this should be correct
-
-        e_cha.loc[e_cha < 0] = 0
-        e_dch.loc[e_dch < 0] = 0
-
-        # pos flex:
-        self.temp_flex['flex_pos_inc'] = max_power_dch - total_dch
-        self.temp_flex.loc[self.temp_flex['flex_pos_inc'] < 0, 'flex_pos_inc'] = 0
-        self.temp_flex['flex_pos_dec'] = total_cha
-        self.temp_flex['flex_pos'] = max_power_dch - total_dch + total_cha
-        self.temp_flex.loc[self.temp_flex['flex_pos'] < 0, 'flex_pos'] = 0
-
-        self.temp_flex['e_dch'] = e_dch
-
-        # neg flex: negative signs due to negative flexibility
-        self.temp_flex['flex_neg_inc'] = max_power_cha - total_cha
-        self.temp_flex.loc[self.temp_flex['flex_neg_inc'] < 0, 'flex_neg_inc'] = 0
-        self.temp_flex['flex_neg_dec'] = total_dch
-        self.temp_flex['flex_neg'] = max_power_cha - total_cha + total_dch
-        self.temp_flex.loc[self.temp_flex['flex_neg'] < 0, 'flex_neg'] = 0
-
-        self.temp_flex['e_cha'] = e_cha
-
-    def check_limits(self, input_flows, output_flows, results, dynamic):
-        """
-        calculate the consequences of maximal positive and negative flexibility. Positive flexibility can
-        increase  (pos_flex_inc) and decrease (pos_flex_dec) the outputs.
-        The negative flexibility increases/outputs the inputs of the Storage.
-        Therefore, they have to be transformed to input values.
-        Parameters
-        ----------
-        path_comps
-        comp
-
-        Returns
-        -------
-
-        """
-        i_start = len(self.temp_flex['flex_pos']) - len(dynamic.time_steps())
-
-        pos_flex = self.temp_flex['flex_pos'][dynamic.time_steps()]
-        pos_flex_inc = self.temp_flex['flex_pos_inc'][dynamic.time_steps()]  # increasing of comp output
-        pos_flex_dec = self.temp_flex['flex_pos_dec'] [dynamic.time_steps()] # curtailment of comp input
-
-        neg_flex = self.temp_flex['flex_neg'][dynamic.time_steps()]
-        neg_flex_inc = self.temp_flex['flex_neg_inc'][dynamic.time_steps()]  # increasing of comp input
-        neg_flex_dec = self.temp_flex['flex_neg_dec'][dynamic.time_steps()]  # curtailment of comp output
-
-        if not hasattr(self, 'limits'):
-            self.limits = pd.DataFrame()
-
-            self.limits['planned_input'] = input_flows
-            self.limits['planned_output'] = output_flows
-            self.limits['flex_input'] = input_flows
-            self.limits['flex_output'] = output_flows
-
-
-        # maximal in/out powers that the comm_batt is capable of
-        cap_storage = results[(self.name, 'capacity')]
-        power_limit_out = cap_storage / self.e2p_out
-        power_limit_in = cap_storage / self.e2p_in
-
-        # ----------------MAX FLEX RESULTS--------------------
-
-        extra_output = (pos_flex_inc - pos_flex_dec) / self.output_efficiency
-        extra_input = (neg_flex_inc - neg_flex_dec) * self.input_efficiency
-
-        # get the maximum value of inputs and outputs. This is enough information because
-        # the grid is just able to deliver increasing flexibility of only one type in each timestep
-        max_output = output_flows + extra_output
-        max_input = input_flows + extra_input
-        self.limits['max_usage'] = max_input
-
-        diff = max_input - power_limit_in
-        diff[diff < 0] = 0
-
-        return diff
-
-    def calc_correction_factors(self, time_steps, c_f_df, results):
-        """
-        Calculates the correction factors (CF).
-        A general component will only have an input and output efficiency.
-        If not, no calculation will be done and the cf df will not be changed.
-
-        Parameters
-        ----------
-        T: time steps of current RH interval
-        c_f_df: correction factors of all components that have aleary been searched
-        results: results of initial scheduling to get comp. size
-        input_profiles: not necessary in this method.
-
-        Returns
-        -------
-        c_f_df: updated CF dataframe
-        """
-        c_f_dch = pd.Series(data=1/self.output_efficiency, index=time_steps)
-        c_static_dch = pd.Series(data=0, index=time_steps)
-        c_f_cha = pd.Series(data=self.input_efficiency, index=time_steps)
-        c_static_cha = pd.Series(data=0, index=time_steps)
-
-        c_f_df['c_f_dch'] = c_f_dch * c_f_df['c_f_dch']
-        c_f_df['c_static_dch'] = c_static_dch + c_f_df['c_static_dch']
-
-        c_f_df['c_f_cha'] = c_f_cha * c_f_df['c_f_cha']
-        c_f_df['c_static_cha'] = c_static_cha + c_f_df['c_static_cha']
-
-        return c_f_df
diff --git a/Component/model/EMS_components/Coordinator.py b/Component/model/EMS_components/Coordinator.py
deleted file mode 100644
index 8d4b2a56aea966b20aaddc9e57c2164c701825a2..0000000000000000000000000000000000000000
--- a/Component/model/EMS_components/Coordinator.py
+++ /dev/null
@@ -1,216 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import pyomo.environ as pyo
-from datetime import timedelta
-from Tooling.dynamics.Dynamic import resample
-
-def add_power_variables_sizing(community, model):
-    """
-    ToDo JBR: Can be substituted by the normal add_power_variables method?
-    Introduces all variables necessary for the max_operational_profit and max_wholesale_profit
-    objective functions for the sizing of the community assets.
-    P_internal_demand is the internally matched demand. P_external_demand is the externally procurred electricity.
-    Vice versa, the internal and external supply variables are introduced.
-
-    Parameters
-    ----------
-    time_steps: time steps of whole problem size
-    var_dict: variable dict
-    model: pyomo model
-    """
-    model.add(('internal_exchange',), pyo.Var(model.T, bounds=(0, None)))
-    model.add(('community_export',), pyo.Var(model.T, bounds=(0, None)))
-    model.add(('community_import',), pyo.Var(model.T, bounds=(0, None)))
-
-def add_peak_shaving_variables(community, model):
-    """
-    Introduction of peak power that will be used for taking on the peak demanded power value.
-
-    Parameters
-    ----------
-    time_steps: time steps (not necessary)
-    var_dict: dict with all variables
-    model: pyomo model
-    """
-    model.add(('peak_community_import',), pyo.Var())
-
-def add_residual_supply_variables(community, model):
-    """
-    Introduces the variables for the residual supply of the community. The residual supply takes on
-    positive values when the aggregated load profiles of all agents result in surpluses and negative values when
-    it results in overall demand.
-    These variables are used for objective functions that only focus on the exchange with the environment of the EC.
-    This is computational less expensive than distinguish also between external and internal exchange.
-
-    Parameters
-    ----------
-    time_steps: time steps of current RH-interval
-    var_dict: dict with all decision variables
-    model: pyomo model
-    """
-    model.add(('agg_balance',), pyo.Var(model.T, bounds=(None, None)))
-
-def add_power_constraints_sizing(community, model):
-    """
-    Similar to the "add_power_const"-method. However, here the new community's grid exchange depends on
-    the CAs grid exchanges and not on DF activation.
-
-    Parameters
-    ----------
-    time_steps: time steps of the whole problem size
-    var_dict: dict constains all decision variables
-    model: pyomo model
-    """
-    export_vars = []
-    import_vars = []
-    for ca in community.community_assets.values():
-        ca_export_expression, ca_import_expression = ca.get_export_import_expressions(model.blocks[(ca._name,)])
-        model.lift_expression((ca._name, 'export'), model.blocks[(ca._name,)], ca_export_expression)
-        model.lift_expression((ca._name, 'import'), model.blocks[(ca._name,)], ca_import_expression)
-        export_vars.append((ca._name, 'export'))
-        import_vars.append((ca._name, 'import'))
-
-    # distinguish between internal and external demand
-    def rule(m, t):
-        return model.component_dict[('internal_exchange',)][t] <= community.result['initial']['agg_export'][t] + pyo.quicksum(model.component_dict[export_var][t] for export_var in export_vars)
-    model.add(('internal_exchange_1',), pyo.Constraint(model.T, rule = rule))
-
-    def rule(m, t):
-        return model.component_dict[('internal_exchange',)][t] <= community.result['initial']['agg_import'][t] + pyo.quicksum(model.component_dict[import_var][t] for import_var in import_vars)
-    model.add(('internal_exchange_2',), pyo.Constraint(model.T, rule = rule))
-
-    def rule(m, t):
-        return model.component_dict[('community_export',)][t] == community.result['initial']['agg_export'][t] + pyo.quicksum(model.component_dict[export_var][t] for export_var in export_vars) - model.component_dict[('internal_exchange',)][t]
-    model.add(('community_export_cons',), pyo.Constraint(model.T, rule = rule))
-
-    def rule(m, t):
-        return model.component_dict[('community_import',)][t] == community.result['initial']['agg_import'][t] + pyo.quicksum(model.component_dict[import_var][t] for import_var in import_vars) - model.component_dict[('internal_exchange',)][t]
-    model.add(('community_import_cons',), pyo.Constraint(model.T, rule = rule))
-
-def add_peak_shaving_constraints(community, model):
-    """
-    The peak power variable is set as the maximal value of all external demand variables to set it as
-    the peak power demand.
-
-    Parameters
-    ----------
-    time_steps: time steps of current RH-interval or whole problem size (if used for CA sizing)
-    var_dict: dict cotains all decision variables
-    model: pyomo model
-    """
-    def rule(m, t):
-        return model.component_dict[('community_import',)][t] <= model.component_dict[('peak_community_import',)]
-    model.add(('peak_community_import_cons',), pyo.Constraint(model.T, rule = rule))
-
-def add_balance_constraints_sizing(community, model):
-    """
-    Similar to the "add_res_supply_constr" method but not depending on DF activations
-    but on supply and demand variables of the CAs.
-    """
-    export_vars = []
-    import_vars = []
-    for ca in community.community_assets.values():
-        ca_export_expression, ca_import_expression = ca.get_export_import_expressions(model.blocks[(ca._name,)])
-        model.lift_expression((ca._name, 'export'), model.blocks[(ca._name,)], ca_export_expression)
-        model.lift_expression((ca._name, 'import'), model.blocks[(ca._name,)], ca_import_expression)
-        export_vars.append((ca._name, 'export'))
-        import_vars.append((ca._name, 'import'))
-
-    def rule(m, t):
-        return model.component_dict[('agg_balance',)][t] == community.result['initial']['agg_balance'][t] + pyo.quicksum(model.component_dict[export_var][t] for export_var in export_vars) - pyo.quicksum(model.component_dict[import_var][t] for import_var in import_vars)
-    model.add(('agg_balance_cons',), pyo.Constraint(model.T, rule = rule))
-
-def add_total_community_import_constraints_sizing(community, model):
-    """
-    The sum of the external demands after DF activations are not allowed to be higher than before.
-    This aims to prohibit arbitrage trading.
-    """
-    community_import = community.result['initial']['agg_balance'][model.time_steps] * -1
-    community_import.loc[community_import < 0] = 0
-    total_community_import = community_import.sum()
-
-    model.add(('total_community_import_cons',), pyo.Constraint(expr = pyo.quicksum(model.component_dict[('community_import',)][t] for t in model.time_steps) <= total_community_import))
-
-def add_sizing_objective(community, model, strategy_name):
-    if 'max_operational_profit' == strategy_name:
-        # factor that converts the simulation to ONE year
-        annual_factor = timedelta(days=365) / timedelta(hours=sum(model.dynamic.step_size_p(position) for position in range(model.dynamic.number_of_steps())))
-
-        # capital related costs and operating related costs
-        capital_costs = []
-        for ca in community.community_assets.values():
-            for component in ca._components.values():
-                capital_cost = component.add_capital_costs(model.blocks[(ca._name,)], ca._configuration)
-                if capital_cost is not None:
-                    capital_costs.append(((ca._name,), capital_cost))
-
-        model.block.f1 = pyo.Var()
-        elec_price_int = resample(community.configuration['elec_price_int'], community.dynamic, model.dynamic)
-        elec_price_ext = resample(community.configuration['elec_price_ext'], community.dynamic, model.dynamic)
-        injection_price = resample(community.configuration['injection_price'], community.dynamic, model.dynamic)
-        model.block.C_f1 = pyo.Constraint(expr=model.block.f1 == - pyo.quicksum(model.blocks[block].component_dict[var] for block, var in capital_costs)
-                                                                - (pyo.quicksum(model.component_dict[('internal_exchange',)][t] * elec_price_int[t] * model.step_size(t) for t in model.time_steps)
-                                                                 + pyo.quicksum(model.component_dict[('community_import',)][t] * elec_price_ext[t] * model.step_size(t) for t in model.time_steps)
-                                                                 - pyo.quicksum(model.component_dict[('internal_exchange',)][t] * injection_price[t] * model.step_size(t) for t in model.time_steps)
-                                                                 - pyo.quicksum(model.component_dict[('community_export',)][t] * injection_price[t] * model.step_size(t) for t in model.time_steps)
-                                                                  ) * annual_factor
-                                                                - model.component_dict[('peak_community_import',)] * community.configuration['network_usage_capacity_fee'])
-
-        model.block.O_f1 = pyo.Objective(expr=model.block.f1, sense=pyo.maximize)
-
-    if 'max_wholesale_profit' == strategy_name:
-        # factor that converts the simulation to ONE year
-        annual_factor = timedelta(days=365) / timedelta(hours=sum(model.dynamic.step_size_p(position) for position in range(model.dynamic.number_of_steps())))
-
-        # capital related costs and operating related costs
-        capital_costs = []
-        for ca in community.community_assets.values():
-            for component in ca._components.values():
-                capital_cost = component.add_capital_costs(model, ca._configuration)
-                if capital_cost is not None:
-                    capital_costs.append(((ca._name,), capital_cost))
-
-        model.block.f1 = pyo.Var()
-        model.block.C_f1 = pyo.Constraint(expr=model.block.f1 == - pyo.quicksum(model.blocks[block].component_dict[var] for block, var in capital_costs)
-                                                                 + pyo.quicksum(model.component_dict[('agg_balance',)][t] * community.configuration['spot_price'][t] * model.step_size(t) for t in model.time_steps) * annual_factor)
-
-        model.block.O_f1 = pyo.Objective(expr=model.block.f1, sense=pyo.maximize)
-
-def implement_sizing_strategy(community, model, strategy_name):
-    if 'max_operational_profit' == strategy_name:
-        add_power_variables_sizing(community, model)
-        add_peak_shaving_variables(community, model)
-
-        add_power_constraints_sizing(community, model)
-        add_peak_shaving_constraints(community, model)
-
-        add_total_community_import_constraints_sizing(community, model)
-
-    if 'max_wholesale_profit' == strategy_name:
-        add_residual_supply_variables(community, model)
-
-        add_balance_constraints_sizing(community, model)
-
-    add_sizing_objective(community, model, strategy_name)
diff --git a/Component/model/EMS_components/EnergyManagementSystem.py b/Component/model/EMS_components/EnergyManagementSystem.py
deleted file mode 100644
index 7cba8df0e3bd400338634d8453e8a8f114af8177..0000000000000000000000000000000000000000
--- a/Component/model/EMS_components/EnergyManagementSystem.py
+++ /dev/null
@@ -1,88 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import pyomo.environ as pyo
-from Model_Library.Prosumer.scripts import calc_annuity_vdi2067
-from datetime import timedelta
-
-def implement_strategy(prosumer, model, configuration):
-    if 'strategy' not in configuration:
-        implement_single_strategy(prosumer, model, configuration, [])
-    else:
-        if isinstance(configuration['strategy'], list):
-            implement_single_strategy(prosumer, model, configuration, configuration['strategy'])
-        elif isinstance(configuration['strategy'], dict):
-            for strategy_name, single_strategy in configuration['strategy'].items():
-                implement_single_strategy(prosumer, model, configuration, single_strategy, strategy_name)
-
-def implement_single_strategy(prosumer, model, configuration, strategy, name='objective'):
-    objective_expression = []
-    if 'annuity' in strategy:
-        capital_costs = []
-        for component in prosumer._components.values():
-            capital_cost = component.add_capital_costs(model, prosumer._configuration)
-            if capital_cost is not None:
-                capital_costs.append(capital_cost)
-
-        operating_costs = []
-        for component in prosumer._components.values():
-            operating_cost = component.add_operating_costs(model, configuration)
-            if operating_cost is not None:
-                operating_costs.append(operating_cost)
-
-        # The factor that converts the simulation to ONE year
-        annual_factor = timedelta(days=365) / timedelta(hours=sum(model.dynamic.step_size_p(position) for position in range(model.dynamic.number_of_steps())))
-        
-        objective_expression.append(pyo.quicksum(model.component_dict[capital_cost] for capital_cost in capital_costs) + (pyo.quicksum(model.component_dict[operation_cost][t] for operation_cost in operating_costs for t in model.time_steps) * annual_factor))
-
-    if 'peak_power_costs' in strategy:
-        peak_power_costs = []
-        for component in prosumer._components.values():
-            peak_power_cost = component.add_peak_power_costs(model)
-            if peak_power_cost is not None:
-                peak_power_costs.append(peak_power_cost)
-
-        # net present value factor (PREISDYNAMISCHER Barwertfaktor) and annuity factor (Annuitätsfaktor)
-        dynamic_cash_value = calc_annuity_vdi2067.dynamic_cash_value(prosumer._configuration['planning_horizon'],
-                                                                     q=1 + prosumer._configuration['yearly_interest'], r=1)
-        annuity_factor = calc_annuity_vdi2067.annuity_factor(prosumer._configuration['planning_horizon'],
-                                                             q=1 + prosumer._configuration['yearly_interest'])
-
-        objective_expression.append(pyo.quicksum(model.component_dict[peak_power_cost] for peak_power_cost in peak_power_costs) * dynamic_cash_value * annuity_factor)
-    
-    if 'c02' in strategy:
-        co2_emmisions = []
-        for component in prosumer._components.values():
-            co2_emmision = component.add_co2_emissions(model, configuration)
-            if co2_emmision is not None:
-                co2_emmisions.append(co2_emmision)
-                
-        objective_expression.append(pyo.quicksum(model.component_dict[co2_emmision][t] for co2_emmision in co2_emmisions for t in model.time_steps))
-    
-    objective_expression.append(pyo.quicksum(term[t] for term in model.objective_terms for t in model.time_steps))
-
-    setattr(model.block, 'f_' + name, pyo.Var())
-    setattr(model.block, 'c_' + name, pyo.Constraint(expr=getattr(model.block, 'f_' + name) == pyo.quicksum(term for term in objective_expression)))
-    setattr(model.block, 'O_' + name, pyo.Objective(expr=getattr(model.block, 'f_' + name), sense=pyo.minimize))
-    
\ No newline at end of file
diff --git a/Component/model/__init__.py b/Component/model/__init__.py
deleted file mode 100644
index 78a9cb8f243b23ccf8106457a48205fdd567b618..0000000000000000000000000000000000000000
--- a/Component/model/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .electrical_components import *
-from .gas_components import *
-from .heat_components import *
-from .hydrogen_components import *
-from .EMS_components import *
-from .AbstractComponent import AbstractComponent
diff --git a/Component/model/cool_components/__init__.py b/Component/model/cool_components/__init__.py
deleted file mode 100644
index 019fe1c98f1816557ccbebc6d61b9dd7cf5a35d8..0000000000000000000000000000000000000000
--- a/Component/model/cool_components/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .chillers import *
\ No newline at end of file
diff --git a/Component/model/cool_components/chillers/AbsorptionChiller.py b/Component/model/cool_components/chillers/AbsorptionChiller.py
deleted file mode 100644
index 2ac435677b8a49ad0746df46e1f760372b264890..0000000000000000000000000000000000000000
--- a/Component/model/cool_components/chillers/AbsorptionChiller.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-class AbsorptionChiller(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="AbsorptionChiller",
-                         commodity_1=ComponentCommodity.HEAT,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.COLD,
-                         commodity_4=None,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
diff --git a/Component/model/cool_components/chillers/CompressorChiller.py b/Component/model/cool_components/chillers/CompressorChiller.py
deleted file mode 100644
index 30b918d88bb79a4e0e2534cf37e7e91d307658bd..0000000000000000000000000000000000000000
--- a/Component/model/cool_components/chillers/CompressorChiller.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-
-class CompressorChiller(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="CompressorChiller",
-                         commodity_1=ComponentCommodity.ELECTRICITY,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.COLD,
-                         commodity_4=None,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
diff --git a/Component/model/cool_components/chillers/__init__.py b/Component/model/cool_components/chillers/__init__.py
deleted file mode 100644
index a93b3c580be36725a1447464dd6cee235e9dbeb5..0000000000000000000000000000000000000000
--- a/Component/model/cool_components/chillers/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .AbsorptionChiller import AbsorptionChiller
-from .CompressorChiller import CompressorChiller
diff --git a/Component/model/cool_components/cool_consumption/CoolConsumption.py b/Component/model/cool_components/cool_consumption/CoolConsumption.py
deleted file mode 100644
index 65a0724740b7d383f5439ab5c55a70f1b44f354c..0000000000000000000000000000000000000000
--- a/Component/model/cool_components/cool_consumption/CoolConsumption.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseConsumption import BaseConsumption
-from Tooling.predictor.Predictor import Predictor
-from Tooling.dynamics.Dynamic import resample
-
-import pyomo.environ as pyo
-
-class CoolConsumption(BaseConsumption):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="CoolConsumption",
-                         commodity=ComponentCommodity.COLD,
-                         configuration=configuration,
-                         profiles=profiles,
-                         dynamic=dynamic)
diff --git a/Component/model/cool_components/cool_consumption/__init__.py b/Component/model/cool_components/cool_consumption/__init__.py
deleted file mode 100644
index 8b137891791fe96927ad78e64b0aad7bded08bdc..0000000000000000000000000000000000000000
--- a/Component/model/cool_components/cool_consumption/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/Component/model/cool_components/external_sources/CoolGrid.py b/Component/model/cool_components/external_sources/CoolGrid.py
deleted file mode 100644
index b701518c781880ff234e4f7850a2c15fbe9fcf87..0000000000000000000000000000000000000000
--- a/Component/model/cool_components/external_sources/CoolGrid.py
+++ /dev/null
@@ -1,37 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseGrid import BaseGrid
-
-class CoolGrid(BaseGrid):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='CoolGrid',
-                         commodity=ComponentCommodity.COLD,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         profiles=profiles,
-                         dynamic=dynamic)
diff --git a/Component/model/cool_components/external_sources/__init__.py b/Component/model/cool_components/external_sources/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/Component/model/electrical_components/__init__.py b/Component/model/electrical_components/__init__.py
deleted file mode 100644
index 8c21f348f2dda44952fb534cf92a878f296459e3..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .inflexible_generators import *
-from .electrical_consumption import *
-from .power_electronics import *
-from .external_sources import *
-from .storages import *
diff --git a/Component/model/electrical_components/electrical_bus_bar/ElectricalBusBar.py b/Component/model/electrical_components/electrical_bus_bar/ElectricalBusBar.py
deleted file mode 100644
index 06cb399af55d6577fc3950d204d5f26dba9b22e6..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/electrical_bus_bar/ElectricalBusBar.py
+++ /dev/null
@@ -1,33 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseBusBar import BaseBusBar
-
-class ElectricalBusBar(BaseBusBar):
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='ElectricalBusBar',
-                         commodity=ComponentCommodity.ELECTRICITY,
-                         dynamic=dynamic)
diff --git a/Component/model/electrical_components/electrical_bus_bar/__init__.py b/Component/model/electrical_components/electrical_bus_bar/__init__.py
deleted file mode 100644
index a65a062b49ecce38d21eff47ce74bb21a5127df6..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/electrical_bus_bar/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .ElectricalBusBar import ElectricalBusBar
\ No newline at end of file
diff --git a/Component/model/electrical_components/electrical_consumption/DrivingConsumption.py b/Component/model/electrical_components/electrical_consumption/DrivingConsumption.py
deleted file mode 100644
index a41997f037a6f40bb326a809ec9ff0933a410efe..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/electrical_consumption/DrivingConsumption.py
+++ /dev/null
@@ -1,49 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-import pandas
-import pandas as pd
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseConsumption import BaseConsumption
-
-import pyomo.environ as pyo
-
-class DrivingConsumption(BaseConsumption):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='DrivingConsumption',
-                         commodity=ComponentCommodity.ELECTRICITY,
-                         configuration=configuration,
-                         profiles=profiles,
-                         dynamic=dynamic)
-
-        if isinstance(configuration['consumption'], str):
-            if profiles[configuration['consumption']][0].ndim == 1: # object is pandas series, no column names
-                self.consumption = profiles[configuration['consumption']][0]
-            else: # object is pandas dataframe, column names can be used
-                self.consumption = profiles[configuration['consumption']][0].loc[:, 'Power']
-
-    def _add_constraints(self, model, prefix, configuration):
-        super()._add_constraints(model, prefix, configuration)
\ No newline at end of file
diff --git a/Component/model/electrical_components/electrical_consumption/ElectricalConsumption.py b/Component/model/electrical_components/electrical_consumption/ElectricalConsumption.py
deleted file mode 100644
index 23bc3d1362b41689b73dc82152e78d66619ebfe0..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/electrical_consumption/ElectricalConsumption.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseConsumption import BaseConsumption
-
-class ElectricalConsumption(BaseConsumption):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='ElectricalConsumption',
-                         commodity=ComponentCommodity.ELECTRICITY,
-                         configuration=configuration,
-                         profiles=profiles,
-                         dynamic=dynamic)
diff --git a/Component/model/electrical_components/electrical_consumption/__init__.py b/Component/model/electrical_components/electrical_consumption/__init__.py
deleted file mode 100644
index eddb1962e02463fa17faec1518068711c8cb2302..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/electrical_consumption/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .DrivingConsumption import DrivingConsumption
-from .ElectricalConsumption import ElectricalConsumption
-
diff --git a/Component/model/electrical_components/electrical_generation/ElectricalGeneration.py b/Component/model/electrical_components/electrical_generation/ElectricalGeneration.py
deleted file mode 100644
index 798c85c891c56bdb141f5b59ca4218b4aa767b55..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/electrical_generation/ElectricalGeneration.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseGeneration import BaseGeneration
-
-class ElectricalGeneration(BaseGeneration):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='ElectricalGeneration',
-                         commodity=ComponentCommodity.ELECTRICITY,
-                         configuration=configuration,
-                         profiles=profiles,
-                         dynamic=dynamic)
-
-        if isinstance(configuration['generation'], str):
-            if profiles[configuration['generation']][0].ndim == 1: # object is pandas series, no column names
-                self.generation = profiles[configuration['generation']][0]
-            else: # object is pandas dataframe, column names (Power,Driving)
-                self.generation = profiles[configuration['generation']][0].loc[:, 'Power']
diff --git a/Component/model/electrical_components/electrical_generation/__init__.py b/Component/model/electrical_components/electrical_generation/__init__.py
deleted file mode 100644
index 9cef75a06584a0dab74b13950c2541665dac98b6..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/electrical_generation/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .ElectricalGeneration import ElectricalGeneration
-
diff --git a/Component/model/electrical_components/external_sources/ElectricalGrid.py b/Component/model/electrical_components/external_sources/ElectricalGrid.py
deleted file mode 100644
index f7b87f2eb8d4b67f1deaad2e7a4382fb15ac78f6..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/external_sources/ElectricalGrid.py
+++ /dev/null
@@ -1,97 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseGrid import BaseGrid
-from Tooling.predictor.Predictor import Predictor
-from Tooling.dynamics.Dynamic import resample
-
-import pyomo.environ as pyo
-
-class ElectricalGrid(BaseGrid):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='ElectricalGrid',
-                         commodity=ComponentCommodity.ELECTRICITY,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         profiles=profiles,
-                         dynamic=dynamic)
-        
-        if 'emmision' in configuration:
-            if isinstance(configuration['emission'], float) or isinstance(configuration['injection_price'], int):
-                self.emission = configuration['emission']
-            elif isinstance(configuration['emission'], str):
-                self.emission = resample(profiles[configuration['emission']][0], profiles[configuration['emission']][1], dynamic)
-            elif isinstance(configuration['emission'], dict):
-                self.emission = Predictor(resample(profiles[configuration['emission']['profile']][0], profiles[configuration['emission']['profile']][1], dynamic), configuration['emission']['type'], configuration['emission']['method'], dynamic)
-        else:
-            self.emission = 0
-        if 'peak_power_cost' in configuration:
-            self.peak_power_cost = configuration['peak_power_cost']
-        else:
-            self.peak_power_cost = 0
-
-    def add_co2_emissions(self, model, configuration):
-        prefix = (self.name,)
-
-        if isinstance(self.emission, float) or isinstance(self.emission, int):
-            def rule(m, t):
-                return model.component_dict[prefix + ('co2_emission',)][t] == model.component_dict[prefix + ('output_1',)][t] * self.emission * model.step_size(t)
-            model.add(prefix + ('co2_emission_cons',), pyo.Constraint(model.T, rule = rule))
-        else:
-            model.add(prefix + ('emission',), pyo.Param(model.T, mutable = True))
-
-            if isinstance(self.emission, Predictor):
-                if 'predict' in configuration:
-                    emission = self.emission.predict(list(model.T))
-                else:
-                    emission = resample(self.emission.profile, self.dynamic, model.dynamic)
-            else:
-                emission = resample(self.emission, self.dynamic, model.dynamic)
-
-            model.set_value(prefix + ('emission',), emission)
-
-            model.add(prefix + ('co2_emission',), pyo.Var(model.T))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('co2_emission',)][t] == model.component_dict[prefix + ('output_1',)][t] * model.component_dict[prefix + ('emission',)][t] * model.step_size(t)
-            model.add(prefix + ('co2_emission_cons',), pyo.Constraint(model.T, rule = rule))
-
-        return prefix + ('co2_emission',)
-
-    def add_peak_power_costs(self, model):
-        prefix = (self.name,)
-
-        model.add(prefix + ('peak_import',), pyo.Var())
-
-        model.add(prefix + ('peak_power_cost',), pyo.Var())
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] <= model.component_dict[prefix + ('peak_import',)]
-        model.add(prefix + ('peak_import_cons',), pyo.Constraint(model.T, rule = rule))
-
-        model.add(prefix + ('peak_power_cost_cons',), pyo.Constraint(expr = model.component_dict[prefix + ('peak_import',)] * self.peak_power_cost - model.component_dict[prefix + ('peak_power_cost',)] == 0))
-        return prefix + ('peak_power_cost',)
diff --git a/Component/model/electrical_components/external_sources/__init__.py b/Component/model/electrical_components/external_sources/__init__.py
deleted file mode 100644
index a628c78815f9fb80f56ec2bb0cba02a9cdf1aca0..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/external_sources/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .ElectricalGrid import ElectricalGrid
diff --git a/Component/model/electrical_components/inflexible_generators/PVGenerator.py b/Component/model/electrical_components/inflexible_generators/PVGenerator.py
deleted file mode 100644
index 5959c52fba467782cf3928e67e37846fa21d6d57..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/inflexible_generators/PVGenerator.py
+++ /dev/null
@@ -1,122 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-from Tooling.predictor.Predictor import Predictor
-from Tooling.dynamics.Dynamic import resample
-
-import warnings
-import pyomo.environ as pyo
-import json
-import os
-
-class PVGenerator(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='PVGenerator',
-                         commodity_1=None,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.ELECTRICITY,
-                         commodity_4=None,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
-
-        with open((os.path.join(model_directory, configuration['type'], configuration['model'] + '.json'))) as f:
-            model = json.load(f)
-        if 'NOCT' in model:
-            self.noct = model['NOCT']
-        else:
-            warnings.warn(f"No column for NOCT for component {self.name}!")
-        if 'temp_coefficient' in model:
-            self.temp_coefficient = float(model['temp_coefficient'])
-        else:
-            warnings.warn(f"No column for temp_coefficient for component {self.name}!")
-
-        if 'irradiance' in configuration:
-            if isinstance(configuration['irradiance'], str):
-                self.irradiance = resample(profiles[configuration['irradiance']][0], profiles[configuration['irradiance']][1], dynamic)
-            elif isinstance(configuration['irradiance'], dict):
-                self.irradiance = Predictor(resample(profiles[configuration['irradiance']['profile']][0], profiles[configuration['irradiance']['profile']][1], dynamic), configuration['irradiance']['type'], configuration['irradiance']['method'], dynamic)
-        if 'temperature' in configuration:
-            if isinstance(configuration['temperature'], str):
-                self.temperature = resample(profiles[configuration['temperature']][0], profiles[configuration['temperature']][1], dynamic)
-            elif isinstance(configuration['temperature'], dict):
-                self.temperature = Predictor(resample(profiles[configuration['temperature']['profile']][0], profiles[configuration['temperature']['profile']][1], dynamic), configuration['temperature']['type'], configuration['temperature']['method'], dynamic)
-        if 'power_factors' in configuration:
-            if isinstance(configuration['power_factors'], str):
-                self.power_factors = resample(profiles[configuration['power_factors']][0], profiles[configuration['power_factors']][1], dynamic)
-            elif isinstance(configuration['power_factors'], dict):
-                self.power_factors = Predictor(resample(profiles[configuration['power_factors']['profile']][0], profiles[configuration['power_factors']['profile']][1], dynamic), configuration['power_factors']['type'], configuration['power_factors']['method'], dynamic)
-
-    def calculate_power_factors(self, irradiance, temperature):
-        cell_temp = temperature + (irradiance / 800) * (self.noct - 20)
-        return (irradiance / 1000) * (1 - self.temp_coefficient * (cell_temp - 25))
-    
-    def get_input_output_variable_names(self):
-        return [((self.name, 'output_1'), 'T')]
-
-    def add_input_output_variables(self, model, prefix):
-        model.add(prefix + ('output_1',), pyo.Var(model.T, bounds=(0, None)))
-
-    def _constraint_capacity(self, model, prefix):
-        pass
-
-    def _constraint_conser(self, model, prefix, configuration):
-        model.add(prefix + ('power_factor',), pyo.Param(model.T, mutable = True))
-
-        if hasattr(self, 'power_factors'):
-            if isinstance(self.power_factors, Predictor):
-                if 'predict' in configuration:
-                    power_factors = self.power_factors.predict(list(model.T))
-                else:
-                    power_factors = resample(self.power_factors.profile, self.dynamic, model.dynamic)
-            else:
-                power_factors = resample(self.power_factors, self.dynamic, model.dynamic)
-        else:
-            if isinstance(self.irradiance, Predictor):
-                if 'predict' in configuration:
-                    irradiance = self.irradiance.predict(list(model.T))
-                else:
-                    irradiance = resample(self.irradiance.profile, self.dynamic, model.dynamic)
-            else:
-                irradiance = resample(self.irradiance, self.dynamic, model.dynamic)
-            if isinstance(self.temperature, Predictor):
-                if 'predict' in configuration:
-                    temperature = self.temperature.predict(list(model.T))
-                else:
-                    temperature = resample(self.temperature.profile, self.dynamic, model.dynamic)
-            else:
-                temperature = resample(self.temperature, self.dynamic, model.dynamic)
-            power_factors = self.calculate_power_factors(irradiance, temperature)
-            power_factors.loc[lambda power_factor: power_factor > 1] = 1
-            power_factors.loc[lambda power_factor: power_factor < 0] = 1
-
-        model.set_value(prefix + ('power_factor',), power_factors)
-            
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] <= model.component_dict[prefix + ('capacity',)] * model.component_dict[prefix + ('power_factor',)][t]
-        model.add(prefix + ('conser',), pyo.Constraint(model.T, rule = rule))
diff --git a/Component/model/electrical_components/inflexible_generators/__init__.py b/Component/model/electrical_components/inflexible_generators/__init__.py
deleted file mode 100644
index cc52544ed820c64275f9340a6ee40f532d41608c..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/inflexible_generators/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .PVGenerator import PVGenerator
diff --git a/Component/model/electrical_components/power_electronics/DcDcConverter.py b/Component/model/electrical_components/power_electronics/DcDcConverter.py
deleted file mode 100644
index 4c47fdca381a92ccce9d3aceed3092d1c32497d5..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/power_electronics/DcDcConverter.py
+++ /dev/null
@@ -1,151 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-import pyomo.environ as pyo
-
-class DcDcConverter(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='DcDcConverter',
-                         commodity_1=ComponentCommodity.ELECTRICITY,
-                         commodity_2=ComponentCommodity.ELECTRICITY,
-                         commodity_3=ComponentCommodity.ELECTRICITY,
-                         commodity_4=ComponentCommodity.ELECTRICITY,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
-    
-    def get_input_output_variable_names(self):
-        return [((self.name, 'input_1'), 'T'), ((self.name, 'input_2'), 'T'), ((self.name, 'output_1'), 'T'), ((self.name, 'output_2'), 'T')]
-    
-    def add_input_output_variables(self, model, prefix):
-        model.add(prefix + ('input_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('input_2',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_2',), pyo.Var(model.T, bounds=(0, None)))
-
-    def _add_constraints(self, model, prefix, configuration):
-        super()._add_constraints(model, prefix, configuration)
-        self._constraint_bi_flow(model, prefix, configuration)
-
-    def _constraint_capacity(self, model, prefix):
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_1',)][t] <= model.component_dict[prefix + ('capacity',)]
-        model.add(prefix + ('capacity_cons_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] <= model.component_dict[prefix + ('capacity',)]
-        model.add(prefix + ('capacity_cons_2',), pyo.Constraint(model.T, rule = rule))
-
-    def _constraint_conser(self, model, prefix, configuration):
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_2',)][t] == model.component_dict[prefix + ('input_1',)][t] * self.efficiency
-        model.add(prefix + ('conser_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] == model.component_dict[prefix + ('input_2',)][t] * self.efficiency
-        model.add(prefix + ('conser_2',), pyo.Constraint(model.T, rule = rule))
-
-    def _constraint_bi_flow(self, model, prefix, configuration):
-        model.add(prefix + ('z_1',), pyo.Var(model.T, domain=pyo.Binary))
-        model.add(prefix + ('z_2',), pyo.Var(model.T, domain=pyo.Binary))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('z_1',)][t] + model.component_dict[prefix + ('z_2',)][t] <= 1
-        model.add(prefix + ('bi_flow_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_1',)][t] <= model.component_dict[prefix + ('z_1',)][t] * 100000
-        model.add(prefix + ('bi_flow_2',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] <= model.component_dict[prefix + ('z_2',)][t] * 100000
-        model.add(prefix + ('bi_flow_3',), pyo.Constraint(model.T, rule = rule))
-
-    # Flexibility stuff
-
-    def check_limits(self, input_flows, output_flows, results, time_steps):
-        """
-        ToDO JBR: clean
-        calculate the consequences of maximal positive and negative flexibility. Positive flexibility can
-        increase  (pos_flex_inc) and decrease (pos_flex_dec) the inputs of an Inverter.
-        The negative flexibility increases/decreases the outputs of the Inverter.
-        Therefore, they have to be transformed to input values.
-
-        Parameters
-        ----------
-        input_flows: incoming power flows
-        output_flows: outgoing power flows
-        results: df with iniital prosumer results. Needed for obtaining the initial component size.
-        time_steps: time steps of current RH interval
-        """
-        # flexibility that was already translated to the input side by def adjust_with_efficiency
-        pos_flex_dec = self.temp_flex['flex_pos_dec'][time_steps]
-        neg_flex_inc = self.temp_flex['flex_neg_inc'][time_steps]
-
-        power_limit = 1 * results[(self.name, 'capacity')]  # excluding the excess power for safety
-
-        # ----------------CHECK POWER--------------------
-
-        # additional input power from positive flexibility
-        # the increasing part has to be retransformed to the 'other' side of the comp becaus ethat ois  the value that is really increased
-        # the decreasing part stays the same because the input flow of this flex type comes from the grid
-        max_input_1 = input_flows + self.flex_pos_inc_old - pos_flex_dec
-        # the decreasing part has to be retransformed to the 'other' side of the comp becaus ethat ois  the value that is really increased
-        # the increasing part stays the same because the input flow of this flex type comes from the grid
-        # max_input_2 = neg_flex_inc.combine(input_flows - self.flex_neg_dec_old,  max)  # additional input power from negative flexibility
-        max_input_2 = input_flows + neg_flex_inc - self.flex_neg_dec_old  # additional input power from negative flexibility
-
-        # upper limit for pos-inc df
-        upper_limit = power_limit - input_flows + pos_flex_dec
-        upper_limit.loc[upper_limit < 0] = 0  # account for the case input_flows > power_limit (excess)
-
-        # max pos-inc df on input side of converter
-        max_pos_inc = self.flex_pos_inc_old.combine(upper_limit, min)
-
-        excess_input_1 = max_input_1 - power_limit
-        excess_input_2 = max_input_2 - power_limit
-        excess_input_1.loc[excess_input_1 < 0] = 0
-        excess_input_2.loc[excess_input_2 < 0] = 0
-
-        # adjust the flexibility values
-        # the energy values are not affected because they are still availabele even if inter_comps can not handle the amount
-        trans_excess_1 = excess_input_1 * self.efficiency
-        trans_excess_1.loc[trans_excess_1 < 0] = 0
-
-        self.temp_flex['flex_pos_inc'] -= trans_excess_1  # excess has to be transformed to inv output value
-        self.temp_flex.loc[self.temp_flex['flex_pos_inc'] < 0, 'flex_pos_inc'] = 0
-
-        self.temp_flex['flex_neg_inc'] -= excess_input_2
-        self.temp_flex.loc[self.temp_flex['flex_neg_inc'] < 0, 'flex_neg_inc'] = 0
-
-        self.temp_flex['flex_pos'] = self.temp_flex['flex_pos_inc'] + self.temp_flex['flex_pos_dec']
-        self.temp_flex['flex_neg'] = self.temp_flex['flex_neg_inc'] + self.temp_flex['flex_neg_dec']
diff --git a/Component/model/electrical_components/power_electronics/DynamicBiInverter.py b/Component/model/electrical_components/power_electronics/DynamicBiInverter.py
deleted file mode 100644
index 4c6eabfc427af83a8e4fa5a75a765455414d5c06..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/power_electronics/DynamicBiInverter.py
+++ /dev/null
@@ -1,559 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-import warnings
-from scipy.optimize import curve_fit
-import numpy as np
-import pyomo.environ as pyo
-import copy
-import json
-import pandas as pd
-import os
-
-class DynamicBiInverter(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='DynamicBiInverter',
-                         commodity_1=ComponentCommodity.ELECTRICITY,
-                         commodity_2=ComponentCommodity.ELECTRICITY,
-                         commodity_3=ComponentCommodity.ELECTRICITY,
-                         commodity_4=ComponentCommodity.ELECTRICITY,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
-
-        with open((os.path.join(model_directory, configuration['type'], configuration['model'] + '.json'))) as f:
-            model = json.load(f)
-        self.eff_vars = dict()
-        if 'p1' in model:
-            self.eff_vars['p1'] = model['p1']
-        else:
-            warnings.warn(f"No column for p1 for component {self.name}!")
-        if 'p2' in model:
-            self.eff_vars['p2'] = model['p2']
-        else:
-            warnings.warn(f"No column for p2 for component {self.name}!")
-        if 'p3' in model:
-            self.eff_vars['p3'] = model['p3']
-        else:
-            warnings.warn(f"No column for p3 for component {self.name}!")
-        if 'eta1' in model:
-            self.eff_vars['eta1'] = model['eta1']
-        else:
-            warnings.warn(f"No column for eta1 for component {self.name}!")
-        if 'eta2' in model:
-            self.eff_vars['eta2'] = model['eta2']
-        else:
-            warnings.warn(f"No column for eta2 for component {self.name}!")
-        if 'eta3' in model:
-            self.eff_vars['eta3'] = model['eta3']
-        else:
-            warnings.warn(f"No column for eta3 for component {self.name}!")
-        if 'nominal_efficiency' in model:
-            self.eta_norm = model['nominal_efficiency']
-        else:
-            warnings.warn(f"No column for nominal_efficiency for component {self.name}!")
-        if 'curve_type' in model:
-            self.curve_type = model['curve_type']
-        else:
-            self.curve_type = 'input_power'
-            warnings.warn(f"No column for curve_type for component {self.name}!")
-        # todo: replace the bigM with max capacity given in the input matrix
-        self.bigM = 100
-
-    def calculate_input_curve_parameters(self):
-        """
-        The PV inverter model based on D.Sauer's DA.
-        The return value e, f are fitted parameters for equation:
-        p_out[p.u.] = e * p_in[p.u.] - f
-        """
-
-        # based on sauer (diplomarbeit) and own calculations to aproximate a linear relationship
-        p1 = self.eff_vars['p1']
-        p2 = self.eff_vars['p2']
-        p3 = self.eff_vars['p3']
-        eta1 = self.eff_vars['eta1']
-        eta2 = self.eff_vars['eta2']
-        eta3 = self.eff_vars['eta3']
-
-        def get_p_self_in():
-            p_self_in = (p1 * p2 * p3 * (
-                    eta1 * eta1 * p1 * (eta2 - eta3) + eta1 * (eta3 * eta3 * p3 - eta2 * eta2 * p2) + eta2 * eta3 *
-                    (eta2 * p2 - eta3 * p3))) / \
-                        ((eta1 * eta1 * p1 * p1 - eta1 * p1 * (eta2 * p2 + eta3 * p3) + eta2 * eta3 * p2 * p3) * (
-                                eta2 * p2 - eta3 * p3))
-            return p_self_in
-
-        def get_v_loss_in():
-            v_loss_in = (eta1 * eta1 * p1 * p1 * (eta2 * p2 - eta3 * p3 - p2 + p3) + eta1 * p1 * (
-                    eta3 * eta3 * p3 * p3 - eta2 * eta2 * p2 * p2) +
-                         eta2 * eta2 * p2 * p2 * (
-                                 eta3 * p3 + p1 - p3) - eta2 * eta3 * eta3 * p2 * p3 * p3 + eta3 * eta3 * p3 * p3 *
-                         (p2 - p1)) / ((eta1 * p1 - eta2 * p2) * (eta1 * p1 - eta3 * p3) * (eta3 * p3 - eta2 * p2))
-            return v_loss_in
-
-        def get_r_loss():
-            r_loss_in = (eta1 * p1 * (p2 - p3) + eta2 * p2 * (p3 - p1) + eta3 * p3 * (p1 - p2)) / \
-                        ((eta1 * eta1 * p1 * p1 - eta1 * p1 * (eta2 * p2 + eta3 * p3) + eta2 * eta3 * p2 * p3) * (
-                                eta3 * p3 - eta2 * p2))
-            return r_loss_in
-
-        return [get_p_self_in(), get_v_loss_in(), get_r_loss()]
-
-    def calculate_output_curve_parameters(self):
-        p1 = self.eff_vars['p1']
-        p2 = self.eff_vars['p2']
-        p3 = self.eff_vars['p3']
-        eta1 = self.eff_vars['eta1']
-        eta2 = self.eff_vars['eta2']
-        eta3 = self.eff_vars['eta3']
-
-        def get_p_self_out():
-            p_self_out = (p1 * p2 * p3 * (
-                    eta1 * eta2 * (p1 - p2) + eta1 * eta3 * (p3 - p1) + eta2 * eta3 * (p2 - p3))) / \
-                         (eta1 * eta2 * eta3 * (p1 - p2) * (p1 - p3) * (p2 - p3))
-            return p_self_out
-
-        def get_v_loss_out():
-            v_loss_out = (eta1 * eta2 * (p1 - p2) * (
-                    eta3 * (p2 - p3) * (p1 - p3) + p3 * (p1 + p2)) + eta1 * eta3 * p2 * (
-                                  p3 * p3 - p1 * p1) + eta2 * eta3 * p1 * (p2 * p2 - p3 * p3)) / \
-                         (eta1 * eta2 * eta3 * (p1 - p2) * (p1 - p3) * (p3 - p2))
-            return v_loss_out
-
-        def get_r_loss_out():
-            r_loss_out = (eta1 * (eta2 * p3 * (p1 - p2) + eta3 * p2 * (p3 - p1)) + eta2 * eta3 * p1 * (p2 - p3)) / \
-                         (eta1 * eta2 * eta3 * (p1 * p1 - p1 * (p2 + p3) + p2 * p3) * (p2 - p3))
-            return r_loss_out
-
-        return [get_p_self_out(), get_v_loss_out(), get_r_loss_out()]
-
-    def transform_output_curve_parameters(self):
-        vals = self.calculate_output_curve_parameters()
-        p_self_in = vals[0] * self.eta_norm
-        v_loss_in = vals[1]
-        r_loss_in = vals[2] / self.eta_norm
-        return [p_self_in, v_loss_in, r_loss_in]
-
-    def calculate_efficiency_curve(self):
-
-        assert self.curve_type == 0 or self.curve_type == 1, \
-            'The curve type must be either input_power or output_power'
-        if self.curve_type == 0:
-            vals = self.calculate_input_curve_parameters()
-        elif self.curve_type == 1:
-            vals = self.transform_output_curve_parameters()
-
-        a = -(1 + vals[1]) / (2 * vals[2])
-        b = ((1 + vals[1]) ** 2) / (4 * vals[2] ** 2)
-        c = 1 / vals[2]
-        d = vals[0] / vals[2]
-
-        # fit a linear curve to the original function
-        # https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.curve_fit.html
-        # cite scipy and Levenberg-Marquardt. Uses least squares
-
-        def ydata(x):
-            return a + np.power((b - d + c * x), 0.5)
-
-        def func(x, e, f):
-            return x * e - f
-
-        xdata = np.linspace(0, 1, 1000)
-        y = ydata(xdata)
-        popt, pcov = curve_fit(func, xdata, y)
-        e = popt[0]
-        f = popt[1]
-        return e, f
-    
-    def get_base_variable_names(self):
-        return super().get_base_variable_names() + self.get_power_flow_variable_names()
-    
-    def get_input_output_variable_names(self):
-        return [((self.name, 'input_1'), 'T'), ((self.name, 'input_2'), 'T'), ((self.name, 'output_1'), 'T'), ((self.name, 'output_2'), 'T')]
-    
-    def get_power_flow_variable_names(self):
-        return [((self.name, 'pin1_1'), 'T'), ((self.name, 'pin1_2'), 'T'), ((self.name, 'pin2_1'), 'T'), ((self.name, 'pin2_2'), 'T'), ((self.name, 'z_pin_1'), 'T'), ((self.name, 'z_pin_2'), 'T')]
-
-    def _add_variables(self, model, prefix):
-        super()._add_variables(model, prefix)
-        self._add_power_flow_variables(model, prefix)
-    
-    def add_input_output_variables(self, model, prefix):
-        model.add(prefix + ('input_1',), pyo.Var(model.T, bounds=(0, None)))
-        
-        model.add(prefix + ('input_2',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_2',), pyo.Var(model.T, bounds=(0, None)))
-
-    def _add_power_flow_variables(self, model, prefix):
-        model.add(prefix + ('pin1_1',), pyo.Var(model.T, bounds=(None, None)))
-
-        model.add(prefix + ('pin1_2',), pyo.Var(model.T, bounds=(None, None)))
-
-        model.add(prefix + ('pin2_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('pin2_2',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('z_pin_1',), pyo.Var(model.T, domain=pyo.Binary))
-
-        model.add(prefix + ('z_pin_2',), pyo.Var(model.T, domain=pyo.Binary))
-
-    def _add_constraints(self, model, prefix, configuration):
-        super()._add_constraints(model, prefix, configuration)
-        self._constraint_bi_flow(model, prefix, configuration)
-
-    def _constraint_capacity(self, model, prefix):
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_1',)][t] <= model.component_dict[prefix + ('capacity',)]
-        model.add(prefix + ('capacity_cons_1',), pyo.Constraint(model.T, rule = rule))
-        
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_2',)][t] <= model.component_dict[prefix + ('capacity',)]
-        model.add(prefix + ('capacity_cons_2',), pyo.Constraint(model.T, rule = rule))
-
-    def _constraint_conser(self, model, prefix, configuration):
-        # find out the component in flow dictionary according to name
-        bigM = self.max_size  # * 10
-        # Add power flow constraints
-        e, f = self.calculate_efficiency_curve()
-        # g = e / self.max_power  # avoid this step if we want to dimensionate the inverter, use of big m
-        '''
-        # non linear! because a continuous is multiplied by a binary
-        model.cons.add(
-            pyo.quicksum(var_dict[i][t] * e for i in input_powers) - f*self.max_power == var_dict[prefix + ('pin1',)][t])
-
-        model.cons.add(var_dict[prefix + ('pin1',)][t] * var_dict[prefix + ('z_pin',)][t] ==
-                        var_dict[prefix + ('pin2',)][t])
-
-        model.cons.add(var_dict[prefix + ('pin1',)][t]/bigM + 1 >= var_dict[prefix + ('z_pin',)][t])
-
-        model.cons.add(var_dict[prefix + ('z_pin',)][t] >= var_dict[prefix + ('pin1',)][t]/bigM)
-
-        model.cons.add(var_dict[prefix + ('pin2',)][t] == pyo.quicksum(
-            var_dict[i][t] for i in output_powers))
-        # name=self.name + '_' + str(t)
-        '''
-        # True linear model
-
-        # pin1 is continuous and unbounded (can be negative)
-        # pin2 is continuous and bounded as positive (0,None)
-        # z_pin is binary --> z_pin = 0 when pin1 < 0; z_pin = 1 when pin1 > 0
-
-        # constraint 1: ∑_(𝑖∈𝐼𝑛𝑝𝑢𝑡𝑃𝑜𝑤𝑒𝑟𝑠)▒[𝑒∗𝑃_𝑖𝑛𝑝𝑢𝑡 [𝑖]] −𝑓∗𝑃_𝑖𝑛𝑣𝑒𝑟𝑡𝑒𝑟_𝑛𝑜𝑚=𝑝_𝑖𝑛′ = pin1   (1)
-
-        # constraint 2: The following constraint ensures that 𝑝_𝑖𝑛′′ (continuous) be zero
-        # if 𝑧_(𝑝_𝑖𝑛)(binary) is zero and bounds 𝑝_𝑖𝑛′′ to positive and negative big M if 𝑧_(𝑝_𝑖𝑛) is one.
-        # −big𝑀∗𝑧_(𝑝_𝑖𝑛) ≤ 𝑝_𝑖𝑛′′ ≤ big𝑀∗𝑧_(𝑝_𝑖𝑛)           (2) --> p_in'' = 0 when z_pin = 0
-
-        # constraint 3: The following constraint ensures that 𝑝_𝑖𝑛′′ (continuous) be
-        # equal to 𝑝_𝑖𝑛′ (continuous) if 𝑧_(𝑝_𝑖𝑛)(binary) is one.
-        # 𝑝_𝑖𝑛′−(1−𝑧_(𝑝_𝑖𝑛))∗big𝑀 ≤ 𝑝_𝑖𝑛′′ ≤ 𝑝_𝑖𝑛′−(1−𝑧_(𝑝_𝑖𝑛))∗(−big𝑀)     (3) --> p_in'' = p_in' when z_pin = 1
-
-        # From (2) and (3) and the fact that 𝑝_𝑖𝑛^′′ is bounded as positive follows that 𝑧_(𝑝_𝑖𝑛 )
-        # will be forced by (3) to be zero in the case of 𝑝_𝑖𝑛^′ being negative since otherwise 𝑝_𝑖𝑛^′′
-        # would be equal to 𝑝_𝑖𝑛^′ (negative), which violates the bounds of 𝑝_𝑖𝑛^′′. With 𝑧_(𝑝_𝑖𝑛 )
-        # set to 0, (2) will ensure that 𝑝_𝑖𝑛^′′ be 0.
-
-        # The following constraint sets the final output of the inverter linearization
-        # model to the sum of power outputs of the inverter (4).
-        # 𝑝_𝑖𝑛′′=∑_(𝑖∈𝑂𝑢𝑡𝑝𝑢𝑡𝑃𝑜𝑤𝑒𝑟𝑠)[𝑃_𝑜𝑢𝑡𝑝𝑢𝑡 [𝑖]]
-
-        # This constraint is the representation of a linear approximation of the original function, where e and f are
-        # parameters of the linear relationship fitted to the simplification of the original non linear function
-        # by means of a least squares regression implemented in self.calculate_efficiency_curve(). (1)
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_1',)][t] * e - model.component_dict[prefix + ('capacity',)] * f == model.component_dict[prefix + ('pin1_1',)][t]
-        model.add(prefix + ('conser_1_1',), pyo.Constraint(model.T, rule = rule))
-        
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_2',)][t] * e - model.component_dict[prefix + ('capacity',)] * f == model.component_dict[prefix + ('pin1_2',)][t]
-        model.add(prefix + ('conser_1_2',), pyo.Constraint(model.T, rule = rule))
-
-        # The following constraint (split in two due to expression syntax) ensures that pin2 (continuous) be zero if
-        # z_pin (binary) is zero and bounds pin2 to positive and negative bigM if z_pin is one (2)
-        def rule(m, t):
-            return model.component_dict[prefix + ('z_pin_1',)][t] * -bigM <= model.component_dict[prefix + ('pin2_1',)][t]
-        model.add(prefix + ('conser_2_1',), pyo.Constraint(model.T, rule = rule))
-        
-        def rule(m, t):
-            return model.component_dict[prefix + ('z_pin_2',)][t] * -bigM <= model.component_dict[prefix + ('pin2_2',)][t]
-        model.add(prefix + ('conser_2_2',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin2_1',)][t] <= model.component_dict[prefix + ('z_pin_1',)][t] * bigM
-        model.add(prefix + ('conser_3_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin2_2',)][t] <= model.component_dict[prefix + ('z_pin_2',)][t] * bigM
-        model.add(prefix + ('conser_3_2',), pyo.Constraint(model.T, rule = rule))
-
-        # The following constraint (split in two due to expression syntax) ensures that pin2 (continuous) be equal
-        # to pin1 (continuous) if z_pin (binary) is one. (3)
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin1_1',)][t] - (1 - model.component_dict[prefix + ('z_pin_1',)][t]) * bigM <= model.component_dict[prefix + ('pin2_1',)][t]
-        model.add(prefix + ('conser_4_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin1_2',)][t] - (1 - model.component_dict[prefix + ('z_pin_2',)][t]) * bigM <= model.component_dict[prefix + ('pin2_2',)][t]
-        model.add(prefix + ('conser_4_2',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin1_1',)][t] + (1 - model.component_dict[prefix + ('z_pin_1',)][t]) * bigM >= model.component_dict[prefix + ('pin2_1',)][t]
-        model.add(prefix + ('conser_5_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin1_2',)][t] + (1 - model.component_dict[prefix + ('z_pin_2',)][t]) * bigM >= model.component_dict[prefix + ('pin2_2',)][t]
-        model.add(prefix + ('conser_5_2',), pyo.Constraint(model.T, rule = rule))
-
-        # from (2) and (3) and the fact that pin2 is bounded as positive follows that z_pin will be forced by (3) to
-        # be zero in the case of pin1 being negative since otherwise pin2 would be equal to pin1 (negative), which
-        # violates the bounds of pin2. With z_pin set to 0, (2) will ensure that pin2 be 0.
-
-        # The following constraint sets the final output of the inverter linearization model to the sum of power
-        # outputs of the inverter (4)
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin2_1',)][t] == model.component_dict[prefix + ('output_1',)][t]
-        model.add(prefix + ('conser_6_1',), pyo.Constraint(model.T, rule = rule))
-        
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin2_2',)][t] == model.component_dict[prefix + ('output_2',)][t]
-        model.add(prefix + ('conser_6_2',), pyo.Constraint(model.T, rule = rule))
-
-        # The following constraints are not strictly necessary but they shrink the feasible space, allowing for
-        # faster solving
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin2_1',)][t] >= -bigM
-        model.add(prefix + ('conser_7_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin2_2',)][t] >= -bigM
-        model.add(prefix + ('conser_7_2',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin2_1',)][t] <= bigM
-        model.add(prefix + ('conser_8_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin2_2',)][t] <= bigM
-        model.add(prefix + ('conser_8_2',), pyo.Constraint(model.T, rule = rule))
-
-        # model.cons.add(var_dict[prefix + ('pin2',)][t]<=var_dict[prefix + ('pin1',)][t]+(1 - var_dict[prefix + ('z_pin',)][t]) * bigM)
-
-        # Explicitly enforces z = 0 for negative pin1 and z = 1 for positive, before implicit in variable bounds of
-        # pin2
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin1_1',)][t] / bigM + 1 >= model.component_dict[prefix + ('z_pin_1',)][t]
-        model.add(prefix + ('conser_9_1',), pyo.Constraint(model.T, rule = rule))
-        
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin1_2',)][t] / bigM + 1 >= model.component_dict[prefix + ('z_pin_2',)][t]
-        model.add(prefix + ('conser_9_2',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('z_pin_1',)][t] >= model.component_dict[prefix + ('pin1_1',)][t] / bigM
-        model.add(prefix + ('conser_10_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('z_pin_2',)][t] >= model.component_dict[prefix + ('pin1_2',)][t] / bigM
-        model.add(prefix + ('conser_10_2',), pyo.Constraint(model.T, rule = rule))
-
-    def _constraint_bi_flow(self, model, prefix, configuration):
-        model.add(prefix + ('z_1',), pyo.Var(model.T, domain=pyo.Binary))
-        model.add(prefix + ('z_2',), pyo.Var(model.T, domain=pyo.Binary))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('z_1',)][t] + model.component_dict[prefix + ('z_2',)][t] <= 1
-        model.add(prefix + ('bi_flow_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_1',)][t] <= model.component_dict[prefix + ('z_1',)][t] * 100000
-        model.add(prefix + ('bi_flow_2',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_2',)][t] <= model.component_dict[prefix + ('z_2',)][t] * 100000
-        model.add(prefix + ('bi_flow_3',), pyo.Constraint(model.T, rule = rule))
-
-    # Flexibility stuff
-
-    def check_limits(self, input_flows, output_flows, results, dynamic):
-        """
-        ToDO JBR: clean
-        calculate the consequences of maximal positive and negative flexibility. Positive flexibility can
-        increase  (pos_flex_inc) and decrease (pos_flex_dec) the inputs of an Inverter.
-        The negative flexibility increases/decreases the outputs of the Inverter.
-        Therefore, they have to be transformed to input values.
-
-        Parameters
-        ----------
-        input_flows: incoming power flows
-        output_flows: outgoing power flows
-        results: df with iniital prosumer results. Needed for obtaining the initial component size.
-        time_steps: time steps of current RH interval
-        """
-        pos_flex_dec = self.temp_flex['flex_pos_dec'][dynamic.time_steps()]  # curtailment of comp input
-
-        neg_flex_inc = self.temp_flex['flex_neg_inc'][dynamic.time_steps()]  # increasing of comp input
-
-        if not hasattr(self, 'limits'):
-            self.limits = pd.DataFrame()
-
-            self.limits['planned_input'] = input_flows
-            self.limits['planned_output'] = output_flows
-            self.limits['flex_input'] = input_flows
-            self.limits['flex_output'] = output_flows
-
-        power_limit = 1 * results[(self.name, 'capacity')]  # excluding the excess power for safety
-        e, f = self.calculate_efficiency_curve()
-        # ----------------CHECK POWER--------------------
-        # Power of all input flows <= maximal power + excess power
-        # ToDO. The possibility of rerouting the power flow from the PV into storage instead into the grid
-        #  is not yet implemented --> missing negative flexibility
-
-        # additional input power from positive flexibility
-        # the increasing part has to be retransformed to the 'other' side of the comp becaus ethat ois  the value that is really increased
-        # the decreasing part stays the same because the input flow of this flex type comes from the grid
-        max_input_1 = input_flows + self.flex_pos_inc_old - pos_flex_dec
-        # the decreasing part has to be retransformed to the 'other' side of the comp becaus ethat ois  the value that is really increased
-        # the increasing part stays the same because the input flow of this flex type comes from the grid
-        # max_input_2 = neg_flex_inc.combine(input_flows - self.flex_neg_dec_old,  max)  # additional input power from negative flexibility
-        max_input_2 = input_flows + neg_flex_inc - self.flex_neg_dec_old  # additional input power from negative flexibility
-
-        # upper limit for pos-inc df
-        upper_limit_pos_inc = power_limit - input_flows
-        upper_limit_pos_inc.loc[upper_limit_pos_inc < 0] = 0  # account for the case input_flows > power_limit (excess)
-
-        # max pos-inc df on input side of converter
-        max_pos_inc = self.flex_pos_inc_old.combine(upper_limit_pos_inc, min)
-
-        # transferring the maximal allower pos-inc flex to the output side as it lays nearer to the grid
-        max_pos_inc_out = max_pos_inc * e - f * results[(self.name, 'capacity')]
-        max_pos_inc_out.loc[max_pos_inc_out < 0] = 0
-
-        # upper limit for neg-inc df
-        upper_limit_neg_inc = power_limit - input_flows
-        upper_limit_neg_inc.loc[upper_limit_neg_inc < 0] = 0  # account for the case input_flows > power_limit (excess)
-
-        # max neg-inc df on input side of converter
-        max_neg_inc = neg_flex_inc.combine(upper_limit_neg_inc, min)
-
-        # negative flex doesn't need conversion as it already has been
-        max_neg_inc_out = max_neg_inc
-
-        self.temp_flex['flex_pos_inc'] = max_pos_inc_out  # excess has to be transformed to inv output value
-        self.temp_flex.loc[self.temp_flex['flex_pos_inc'] < 0, 'flex_pos_inc'] = 0
-
-        self.temp_flex['flex_neg_inc'] = max_neg_inc_out
-        self.temp_flex.loc[self.temp_flex['flex_neg_inc'] < 0, 'flex_neg_inc'] = 0
-
-        self.temp_flex['flex_pos'] = self.temp_flex['flex_pos_inc'] + self.temp_flex['flex_pos_dec']
-        self.temp_flex['flex_neg'] = self.temp_flex['flex_neg_inc'] + self.temp_flex['flex_neg_dec']
-
-    def adjust_flex_with_efficiency(self, results, dynamic):
-        """
-        ToDO JBR: clean
-        Adjust the DF values from other components.
-        The theoretically available DF has to be adjusted according to the components efficiency.
-
-        Parameters
-        ----------
-        results: result df from initial scheduling
-        flows: all power flow variables of this prosumer
-        input_profiles: not needed here
-        T: time steps of current RH interval
-        """
-        # energy is still available after each flex activation. e.g.: If the efficiency would not affect the energy,
-        # the community would always underestimate the effect of a positive flexibility activation on the energy amount
-        # left
-
-        self.flex_pos_inc_old = copy.deepcopy(self.temp_flex['flex_pos_inc'])
-        self.flex_neg_dec_old = copy.deepcopy(self.temp_flex['flex_neg_dec'])
-
-        # ------------EFFICIENCY--------------
-        # the idea here is that the flexibility that comes out of the inverter is
-        # related to the efficiency of the inverter.
-        # So considering the power flow of the flexibility type,
-        # the net flexibility gets higher or lower compared to the flexibility at the origin component
-
-        e, f = self.calculate_efficiency_curve()
-        # ------POSITIVE--------------
-        # increasing positive flex means lower flex at grid
-        self.temp_flex['flex_pos_inc'] = self.temp_flex['flex_pos_inc'] * e - f * results[(self.name, 'capacity')]
-        self.temp_flex.loc[self.temp_flex['flex_pos_inc'] < 0, 'flex_pos_inc'] = 0
-
-        # decreasing positive flex means higher flex at grid
-        self.temp_flex.loc[self.temp_flex['flex_pos_dec'] > 0, 'flex_pos_dec'] = \
-            (self.temp_flex['flex_pos_dec'] + f * results[(self.name, 'capacity')]) / e
-
-        self.temp_flex['flex_pos'] = self.temp_flex['flex_pos_inc'] + self.temp_flex['flex_pos_dec']
-
-        # ------NEGATIVE--------------
-        # increasing negative flex means higher flex at grid
-        self.temp_flex.loc[self.temp_flex['flex_neg_inc'] > 0, 'flex_neg_inc'] = \
-            (self.temp_flex['flex_neg_inc'] + f * results[(self.name, 'capacity')]) /e
-
-        # decreasing neg flex means lower flex at grid
-        self.temp_flex['flex_neg_dec'] = (self.temp_flex['flex_neg_dec']) * e - f * results[(self.name, 'capacity')]
-        self.temp_flex.loc[self.temp_flex['flex_neg_dec'] < 0, 'flex_neg_dec'] = 0  # fix error from efficiency error
-
-        self.temp_flex['flex_neg'] = (self.temp_flex['flex_neg_inc'] + self.temp_flex['flex_neg_dec'])
-
-    def calc_correction_factors(self, time_steps, c_f_df, results):
-        """
-        Calculates the correction factors (CF).
-        For the inverter, with the dynamic behaviour, there exists a static part (only depending on comp. size) and
-        a dynamic part (depending on power flow).
-
-        Parameters
-        ----------
-        T: time steps of current RH interval
-        c_f_df: correction factors of all components that have aleary been searched
-        results: results of initial scheduling to get comp. size
-        input_profiles: not necessary in this method.
-
-        Returns
-        -------
-        c_f_df: updated CF dataframe
-        """
-        capacity = results[(self.name, 'capacity')]
-        e, f = self.calculate_efficiency_curve()
-
-        c_f_dch = pd.Series(data=1 / e, index=time_steps)
-        c_static_dch = pd.Series(data=f * capacity / e, index=time_steps)
-        c_f_cha = pd.Series(data=e, index=time_steps)
-        c_static_cha = pd.Series(data=f*capacity, index=time_steps)
-
-        c_f_df['c_f_dch'] = c_f_dch * c_f_df['c_f_dch']
-        c_f_df['c_static_dch'] = c_static_dch + c_f_df['c_static_dch']
-
-        c_f_df['c_f_cha'] = c_f_cha * c_f_df['c_f_cha']
-        c_f_df['c_static_cha'] = c_static_cha + c_f_df['c_static_cha']
-
-        return c_f_df
\ No newline at end of file
diff --git a/Component/model/electrical_components/power_electronics/DynamicInverter.py b/Component/model/electrical_components/power_electronics/DynamicInverter.py
deleted file mode 100644
index f0eefa395ffb7a7be9bd2af0ad3f58c59fb6bb32..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/power_electronics/DynamicInverter.py
+++ /dev/null
@@ -1,472 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-import warnings
-from scipy.optimize import curve_fit
-import numpy as np
-import pyomo.environ as pyo
-import copy
-import json
-import pandas as pd
-import os
-
-class DynamicInverter(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='DynamicInverter',
-                         commodity_1=ComponentCommodity.ELECTRICITY,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.ELECTRICITY,
-                         commodity_4=None,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
-
-        with open((os.path.join(model_directory, configuration['type'], configuration['model'] + '.json'))) as f:
-            model = json.load(f)
-        self.eff_vars = dict()
-        if 'p1' in model:
-            self.eff_vars['p1'] = model['p1']
-        else:
-            warnings.warn(f"No column for p1 for component {self.name}!")
-        if 'p2' in model:
-            self.eff_vars['p2'] = model['p2']
-        else:
-            warnings.warn(f"No column for p2 for component {self.name}!")
-        if 'p3' in model:
-            self.eff_vars['p3'] = model['p3']
-        else:
-            warnings.warn(f"No column for p3 for component {self.name}!")
-        if 'eta1' in model:
-            self.eff_vars['eta1'] = model['eta1']
-        else:
-            warnings.warn(f"No column for eta1 for component {self.name}!")
-        if 'eta2' in model:
-            self.eff_vars['eta2'] = model['eta2']
-        else:
-            warnings.warn(f"No column for eta2 for component {self.name}!")
-        if 'eta3' in model:
-            self.eff_vars['eta3'] = model['eta3']
-        else:
-            warnings.warn(f"No column for eta3 for component {self.name}!")
-        if 'nominal_efficiency' in model:
-            self.eta_norm = model['nominal_efficiency']
-        else:
-            warnings.warn(f"No column for nominal_efficiency for component {self.name}!")
-        if 'curve_type' in model:
-            self.curve_type = model['curve_type']
-        else:
-            self.curve_type = 'input_power'
-            warnings.warn(f"No column for curve_type for component {self.name}!")
-        # todo: replace the bigM with max capacity given in the input matrix
-        self.bigM = 100
-
-    def calculate_input_curve_parameters(self):
-        """
-        The PV inverter model based on D.Sauer's DA.
-        The return value e, f are fitted parameters for equation:
-        p_out[p.u.] = e * p_in[p.u.] - f
-        """
-
-        # based on sauer (diplomarbeit) and own calculations to aproximate a linear relationship
-        p1 = self.eff_vars['p1']
-        p2 = self.eff_vars['p2']
-        p3 = self.eff_vars['p3']
-        eta1 = self.eff_vars['eta1']
-        eta2 = self.eff_vars['eta2']
-        eta3 = self.eff_vars['eta3']
-
-        def get_p_self_in():
-            p_self_in = (p1 * p2 * p3 * (
-                    eta1 * eta1 * p1 * (eta2 - eta3) + eta1 * (eta3 * eta3 * p3 - eta2 * eta2 * p2) + eta2 * eta3 *
-                    (eta2 * p2 - eta3 * p3))) / \
-                        ((eta1 * eta1 * p1 * p1 - eta1 * p1 * (eta2 * p2 + eta3 * p3) + eta2 * eta3 * p2 * p3) * (
-                                eta2 * p2 - eta3 * p3))
-            return p_self_in
-
-        def get_v_loss_in():
-            v_loss_in = (eta1 * eta1 * p1 * p1 * (eta2 * p2 - eta3 * p3 - p2 + p3) + eta1 * p1 * (
-                    eta3 * eta3 * p3 * p3 - eta2 * eta2 * p2 * p2) +
-                         eta2 * eta2 * p2 * p2 * (
-                                 eta3 * p3 + p1 - p3) - eta2 * eta3 * eta3 * p2 * p3 * p3 + eta3 * eta3 * p3 * p3 *
-                         (p2 - p1)) / ((eta1 * p1 - eta2 * p2) * (eta1 * p1 - eta3 * p3) * (eta3 * p3 - eta2 * p2))
-            return v_loss_in
-
-        def get_r_loss():
-            r_loss_in = (eta1 * p1 * (p2 - p3) + eta2 * p2 * (p3 - p1) + eta3 * p3 * (p1 - p2)) / \
-                        ((eta1 * eta1 * p1 * p1 - eta1 * p1 * (eta2 * p2 + eta3 * p3) + eta2 * eta3 * p2 * p3) * (
-                                eta3 * p3 - eta2 * p2))
-            return r_loss_in
-
-        return [get_p_self_in(), get_v_loss_in(), get_r_loss()]
-
-    def calculate_output_curve_parameters(self):
-        p1 = self.eff_vars['p1']
-        p2 = self.eff_vars['p2']
-        p3 = self.eff_vars['p3']
-        eta1 = self.eff_vars['eta1']
-        eta2 = self.eff_vars['eta2']
-        eta3 = self.eff_vars['eta3']
-
-        def get_p_self_out():
-            p_self_out = (p1 * p2 * p3 * (
-                    eta1 * eta2 * (p1 - p2) + eta1 * eta3 * (p3 - p1) + eta2 * eta3 * (p2 - p3))) / \
-                         (eta1 * eta2 * eta3 * (p1 - p2) * (p1 - p3) * (p2 - p3))
-            return p_self_out
-
-        def get_v_loss_out():
-            v_loss_out = (eta1 * eta2 * (p1 - p2) * (
-                    eta3 * (p2 - p3) * (p1 - p3) + p3 * (p1 + p2)) + eta1 * eta3 * p2 * (
-                                  p3 * p3 - p1 * p1) + eta2 * eta3 * p1 * (p2 * p2 - p3 * p3)) / \
-                         (eta1 * eta2 * eta3 * (p1 - p2) * (p1 - p3) * (p3 - p2))
-            return v_loss_out
-
-        def get_r_loss_out():
-            r_loss_out = (eta1 * (eta2 * p3 * (p1 - p2) + eta3 * p2 * (p3 - p1)) + eta2 * eta3 * p1 * (p2 - p3)) / \
-                         (eta1 * eta2 * eta3 * (p1 * p1 - p1 * (p2 + p3) + p2 * p3) * (p2 - p3))
-            return r_loss_out
-
-        return [get_p_self_out(), get_v_loss_out(), get_r_loss_out()]
-
-    def transform_output_curve_parameters(self):
-        vals = self.calculate_output_curve_parameters()
-        p_self_in = vals[0] * self.eta_norm
-        v_loss_in = vals[1]
-        r_loss_in = vals[2] / self.eta_norm
-        return [p_self_in, v_loss_in, r_loss_in]
-
-    def calculate_efficiency_curve(self):
-
-        assert self.curve_type == 0 or self.curve_type == 1, \
-            'The curve type must be either input_power or output_power'
-        if self.curve_type == 0:
-            vals = self.calculate_input_curve_parameters()
-        elif self.curve_type == 1:
-            vals = self.transform_output_curve_parameters()
-
-        a = -(1 + vals[1]) / (2 * vals[2])
-        b = ((1 + vals[1]) ** 2) / (4 * vals[2] ** 2)
-        c = 1 / vals[2]
-        d = vals[0] / vals[2]
-
-        # fit a linear curve to the original function
-        # https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.curve_fit.html
-        # cite scipy and Levenberg-Marquardt. Uses least squares
-
-        def ydata(x):
-            return a + np.power((b - d + c * x), 0.5)
-
-        def func(x, e, f):
-            return x * e - f
-
-        xdata = np.linspace(0, 1, 1000)
-        y = ydata(xdata)
-        popt, pcov = curve_fit(func, xdata, y)
-        e = popt[0]
-        f = popt[1]
-        return e, f
-    
-    def get_base_variable_names(self):
-        return super().get_base_variable_names() + self.get_power_flow_variable_names()
-    
-    def get_power_flow_variable_names(self):
-        return [((self.name, 'pin1'), 'T'), ((self.name, 'pin2'), 'T'), ((self.name, 'z_pin'), 'T')]
-
-    def _add_variables(self, model, prefix):
-        super()._add_variables(model, prefix)
-        self._add_power_flow_variables(model, prefix)
-
-    def _add_power_flow_variables(self, model, prefix):
-        model.add(prefix + ('pin1',), pyo.Var(model.T, bounds=(None, None)))
-
-        model.add(prefix + ('pin2',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('z_pin',), pyo.Var(model.T, domain=pyo.Binary))
-
-    def _constraint_conser(self, model, prefix, configuration):
-        # find out the component in flow dictionary according to name
-        bigM = self.max_size  # * 10
-        # Add power flow constraints
-        e, f = self.calculate_efficiency_curve()
-        # g = e / self.max_power  # avoid this step if we want to dimensionate the inverter, use of big m
-        '''
-        # non linear! because a continuous is multiplied by a binary
-        model.cons.add(
-            pyo.quicksum(var_dict[i][t] * e for i in input_powers) - f*self.max_power == var_dict[prefix + ('pin1',)][t])
-
-        model.cons.add(var_dict[prefix + ('pin1',)][t] * var_dict[prefix + ('z_pin',)][t] ==
-                        var_dict[prefix + ('pin2',)][t])
-
-        model.cons.add(var_dict[prefix + ('pin1',)][t]/bigM + 1 >= var_dict[prefix + ('z_pin',)][t])
-
-        model.cons.add(var_dict[prefix + ('z_pin',)][t] >= var_dict[prefix + ('pin1',)][t]/bigM)
-
-        model.cons.add(var_dict[prefix + ('pin2',)][t] == pyo.quicksum(
-            var_dict[i][t] for i in output_powers))
-        # name=self.name + '_' + str(t)
-        '''
-        # True linear model
-
-        # pin1 is continuous and unbounded (can be negative)
-        # pin2 is continuous and bounded as positive (0,None)
-        # z_pin is binary --> z_pin = 0 when pin1 < 0; z_pin = 1 when pin1 > 0
-
-        # constraint 1: ∑_(𝑖∈𝐼𝑛𝑝𝑢𝑡𝑃𝑜𝑤𝑒𝑟𝑠)▒[𝑒∗𝑃_𝑖𝑛𝑝𝑢𝑡 [𝑖]] −𝑓∗𝑃_𝑖𝑛𝑣𝑒𝑟𝑡𝑒𝑟_𝑛𝑜𝑚=𝑝_𝑖𝑛′ = pin1   (1)
-
-        # constraint 2: The following constraint ensures that 𝑝_𝑖𝑛′′ (continuous) be zero
-        # if 𝑧_(𝑝_𝑖𝑛)(binary) is zero and bounds 𝑝_𝑖𝑛′′ to positive and negative big M if 𝑧_(𝑝_𝑖𝑛) is one.
-        # −big𝑀∗𝑧_(𝑝_𝑖𝑛) ≤ 𝑝_𝑖𝑛′′ ≤ big𝑀∗𝑧_(𝑝_𝑖𝑛)           (2) --> p_in'' = 0 when z_pin = 0
-
-        # constraint 3: The following constraint ensures that 𝑝_𝑖𝑛′′ (continuous) be
-        # equal to 𝑝_𝑖𝑛′ (continuous) if 𝑧_(𝑝_𝑖𝑛)(binary) is one.
-        # 𝑝_𝑖𝑛′−(1−𝑧_(𝑝_𝑖𝑛))∗big𝑀 ≤ 𝑝_𝑖𝑛′′ ≤ 𝑝_𝑖𝑛′−(1−𝑧_(𝑝_𝑖𝑛))∗(−big𝑀)     (3) --> p_in'' = p_in' when z_pin = 1
-
-        # From (2) and (3) and the fact that 𝑝_𝑖𝑛^′′ is bounded as positive follows that 𝑧_(𝑝_𝑖𝑛 )
-        # will be forced by (3) to be zero in the case of 𝑝_𝑖𝑛^′ being negative since otherwise 𝑝_𝑖𝑛^′′
-        # would be equal to 𝑝_𝑖𝑛^′ (negative), which violates the bounds of 𝑝_𝑖𝑛^′′. With 𝑧_(𝑝_𝑖𝑛 )
-        # set to 0, (2) will ensure that 𝑝_𝑖𝑛^′′ be 0.
-
-        # The following constraint sets the final output of the inverter linearization
-        # model to the sum of power outputs of the inverter (4).
-        # 𝑝_𝑖𝑛′′=∑_(𝑖∈𝑂𝑢𝑡𝑝𝑢𝑡𝑃𝑜𝑤𝑒𝑟𝑠)[𝑃_𝑜𝑢𝑡𝑝𝑢𝑡 [𝑖]]
-
-        # This constraint is the representation of a linear approximation of the original function, where e and f are
-        # parameters of the linear relationship fitted to the simplification of the original non linear function
-        # by means of a least squares regression implemented in self.calculate_efficiency_curve(). (1)
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_1',)][t] * e - model.component_dict[prefix + ('capacity',)] * f == model.component_dict[prefix + ('pin1',)][t]
-        model.add(prefix + ('conser_1',), pyo.Constraint(model.T, rule = rule))
-
-        # The following constraint (split in two due to expression syntax) ensures that pin2 (continuous) be zero if
-        # z_pin (binary) is zero and bounds pin2 to positive and negative bigM if z_pin is one (2)
-        def rule(m, t):
-            return model.component_dict[prefix + ('z_pin',)][t] * -bigM <= model.component_dict[prefix + ('pin2',)][t]
-        model.add(prefix + ('conser_2',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin2',)][t] <= model.component_dict[prefix + ('z_pin',)][t] * bigM
-        model.add(prefix + ('conser_3',), pyo.Constraint(model.T, rule = rule))
-
-        # The following constraint (split in two due to expression syntax) ensures that pin2 (continuous) be equal
-        # to pin1 (continuous) if z_pin (binary) is one. (3)
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin1',)][t] - (1 - model.component_dict[prefix + ('z_pin',)][t]) * bigM <= model.component_dict[prefix + ('pin2',)][t]
-        model.add(prefix + ('conser_4',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin1',)][t] + (1 - model.component_dict[prefix + ('z_pin',)][t]) * bigM >= model.component_dict[prefix + ('pin2',)][t]
-        model.add(prefix + ('conser_5',), pyo.Constraint(model.T, rule = rule))
-
-        # from (2) and (3) and the fact that pin2 is bounded as positive follows that z_pin will be forced by (3) to
-        # be zero in the case of pin1 being negative since otherwise pin2 would be equal to pin1 (negative), which
-        # violates the bounds of pin2. With z_pin set to 0, (2) will ensure that pin2 be 0.
-
-        # The following constraint sets the final output of the inverter linearization model to the sum of power
-        # outputs of the inverter (4)
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin2',)][t] == model.component_dict[prefix + ('output_1',)][t]
-        model.add(prefix + ('conser_6',), pyo.Constraint(model.T, rule = rule))
-
-        # The following constraints are not strictly necessary but they shrink the feasible space, allowing for
-        # faster solving
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin2',)][t] >= -bigM
-        model.add(prefix + ('conser_7',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin2',)][t] <= bigM
-        model.add(prefix + ('conser_8',), pyo.Constraint(model.T, rule = rule))
-
-        # model.cons.add(var_dict[prefix + ('pin2',)][t]<=var_dict[prefix + ('pin1',)][t]+(1 - var_dict[prefix + ('z_pin',)][t]) * bigM)
-
-        # Explicitly enforces z = 0 for negative pin1 and z = 1 for positive, before implicit in variable bounds of
-        # pin2
-        def rule(m, t):
-            return model.component_dict[prefix + ('pin1',)][t] / bigM + 1 >= model.component_dict[prefix + ('z_pin',)][t]
-        model.add(prefix + ('conser_9',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('z_pin',)][t] >= model.component_dict[prefix + ('pin1',)][t] / bigM
-        model.add(prefix + ('conser_10',), pyo.Constraint(model.T, rule = rule))
-
-    # Flexibility stuff
-
-    def check_limits(self, input_flows, output_flows, results, dynamic):
-        """
-        ToDO JBR: clean
-        calculate the consequences of maximal positive and negative flexibility. Positive flexibility can
-        increase  (pos_flex_inc) and decrease (pos_flex_dec) the inputs of an Inverter.
-        The negative flexibility increases/decreases the outputs of the Inverter.
-        Therefore, they have to be transformed to input values.
-
-        Parameters
-        ----------
-        input_flows: incoming power flows
-        output_flows: outgoing power flows
-        results: df with iniital prosumer results. Needed for obtaining the initial component size.
-        time_steps: time steps of current RH interval
-        """
-        pos_flex_dec = self.temp_flex['flex_pos_dec'][dynamic.time_steps()]  # curtailment of comp input
-
-        neg_flex_inc = self.temp_flex['flex_neg_inc'][dynamic.time_steps()]  # increasing of comp input
-
-        if not hasattr(self, 'limits'):
-            self.limits = pd.DataFrame()
-
-            self.limits['planned_input'] = input_flows
-            self.limits['planned_output'] = output_flows
-            self.limits['flex_input'] = input_flows
-            self.limits['flex_output'] = output_flows
-
-        power_limit = 1 * results[(self.name, 'capacity')]  # excluding the excess power for safety
-        e, f = self.calculate_efficiency_curve()
-        # ----------------CHECK POWER--------------------
-        # Power of all input flows <= maximal power + excess power
-        # ToDO. The possibility of rerouting the power flow from the PV into storage instead into the grid
-        #  is not yet implemented --> missing negative flexibility
-
-        # additional input power from positive flexibility
-        # the increasing part has to be retransformed to the 'other' side of the comp becaus ethat ois  the value that is really increased
-        # the decreasing part stays the same because the input flow of this flex type comes from the grid
-        max_input_1 = input_flows + self.flex_pos_inc_old - pos_flex_dec
-        # the decreasing part has to be retransformed to the 'other' side of the comp becaus ethat ois  the value that is really increased
-        # the increasing part stays the same because the input flow of this flex type comes from the grid
-        # max_input_2 = neg_flex_inc.combine(input_flows - self.flex_neg_dec_old,  max)  # additional input power from negative flexibility
-        max_input_2 = input_flows + neg_flex_inc - self.flex_neg_dec_old  # additional input power from negative flexibility
-
-        # upper limit for pos-inc df
-        upper_limit_pos_inc = power_limit - input_flows
-        upper_limit_pos_inc.loc[upper_limit_pos_inc < 0] = 0  # account for the case input_flows > power_limit (excess)
-
-        # max pos-inc df on input side of converter
-        max_pos_inc = self.flex_pos_inc_old.combine(upper_limit_pos_inc, min)
-
-        # transferring the maximal allower pos-inc flex to the output side as it lays nearer to the grid
-        max_pos_inc_out = max_pos_inc * e - f * results[(self.name, 'capacity')]
-        max_pos_inc_out.loc[max_pos_inc_out < 0] = 0
-
-        # upper limit for neg-inc df
-        upper_limit_neg_inc = power_limit - input_flows
-        upper_limit_neg_inc.loc[upper_limit_neg_inc < 0] = 0  # account for the case input_flows > power_limit (excess)
-
-        # max neg-inc df on input side of converter
-        max_neg_inc = neg_flex_inc.combine(upper_limit_neg_inc, min)
-
-        # negative flex doesn't need conversion as it already has been
-        max_neg_inc_out = max_neg_inc
-
-        self.temp_flex['flex_pos_inc'] = max_pos_inc_out  # excess has to be transformed to inv output value
-        self.temp_flex.loc[self.temp_flex['flex_pos_inc'] < 0, 'flex_pos_inc'] = 0
-
-        self.temp_flex['flex_neg_inc'] = max_neg_inc_out
-        self.temp_flex.loc[self.temp_flex['flex_neg_inc'] < 0, 'flex_neg_inc'] = 0
-
-        self.temp_flex['flex_pos'] = self.temp_flex['flex_pos_inc'] + self.temp_flex['flex_pos_dec']
-        self.temp_flex['flex_neg'] = self.temp_flex['flex_neg_inc'] + self.temp_flex['flex_neg_dec']
-
-    def adjust_flex_with_efficiency(self, results, dynamic):
-        """
-        ToDO JBR: clean
-        Adjust the DF values from other components.
-        The theoretically available DF has to be adjusted according to the components efficiency.
-
-        Parameters
-        ----------
-        results: result df from initial scheduling
-        flows: all power flow variables of this prosumer
-        input_profiles: not needed here
-        T: time steps of current RH interval
-        """
-        # energy is still available after each flex activation. e.g.: If the efficiency would not affect the energy,
-        # the community would always underestimate the effect of a positive flexibility activation on the energy amount
-        # left
-
-        self.flex_pos_inc_old = copy.deepcopy(self.temp_flex['flex_pos_inc'])
-        self.flex_neg_dec_old = copy.deepcopy(self.temp_flex['flex_neg_dec'])
-
-        # ------------EFFICIENCY--------------
-        # the idea here is that the flexibility that comes out of the inverter is
-        # related to the efficiency of the inverter.
-        # So considering the power flow of the flexibility type,
-        # the net flexibility gets higher or lower compared to the flexibility at the origin component
-
-        e, f = self.calculate_efficiency_curve()
-        # ------POSITIVE--------------
-        # increasing positive flex means lower flex at grid
-        self.temp_flex['flex_pos_inc'] = self.temp_flex['flex_pos_inc'] * e - f * results[(self.name, 'capacity')]
-        self.temp_flex.loc[self.temp_flex['flex_pos_inc'] < 0, 'flex_pos_inc'] = 0
-
-        # decreasing positive flex means higher flex at grid
-        self.temp_flex.loc[self.temp_flex['flex_pos_dec'] > 0, 'flex_pos_dec'] = \
-            (self.temp_flex['flex_pos_dec'] + f * results[(self.name, 'capacity')]) / e
-
-        self.temp_flex['flex_pos'] = self.temp_flex['flex_pos_inc'] + self.temp_flex['flex_pos_dec']
-
-        # ------NEGATIVE--------------
-        # increasing negative flex means higher flex at grid
-        self.temp_flex.loc[self.temp_flex['flex_neg_inc'] > 0, 'flex_neg_inc'] = \
-            (self.temp_flex['flex_neg_inc'] + f * results[(self.name, 'capacity')]) /e
-
-        # decreasing neg flex means lower flex at grid
-        self.temp_flex['flex_neg_dec'] = (self.temp_flex['flex_neg_dec']) * e - f * results[(self.name, 'capacity')]
-        self.temp_flex.loc[self.temp_flex['flex_neg_dec'] < 0, 'flex_neg_dec'] = 0  # fix error from efficiency error
-
-        self.temp_flex['flex_neg'] = (self.temp_flex['flex_neg_inc'] + self.temp_flex['flex_neg_dec'])
-
-    def calc_correction_factors(self, time_steps, c_f_df, results):
-        """
-        Calculates the correction factors (CF).
-        For the inverter, with the dynamic behaviour, there exists a static part (only depending on comp. size) and
-        a dynamic part (depending on power flow).
-
-        Parameters
-        ----------
-        T: time steps of current RH interval
-        c_f_df: correction factors of all components that have aleary been searched
-        results: results of initial scheduling to get comp. size
-        input_profiles: not necessary in this method.
-
-        Returns
-        -------
-        c_f_df: updated CF dataframe
-        """
-        capacity = results[(self.name, 'capacity')]
-        e, f = self.calculate_efficiency_curve()
-
-        c_f_dch = pd.Series(data=1 / e, index=time_steps)
-        c_static_dch = pd.Series(data=f * capacity / e, index=time_steps)
-        c_f_cha = pd.Series(data=e, index=time_steps)
-        c_static_cha = pd.Series(data=f*capacity, index=time_steps)
-
-        c_f_df['c_f_dch'] = c_f_dch * c_f_df['c_f_dch']
-        c_f_df['c_static_dch'] = c_static_dch + c_f_df['c_static_dch']
-
-        c_f_df['c_f_cha'] = c_f_cha * c_f_df['c_f_cha']
-        c_f_df['c_static_cha'] = c_static_cha + c_f_df['c_static_cha']
-
-        return c_f_df
\ No newline at end of file
diff --git a/Component/model/electrical_components/power_electronics/StaticBiInverter.py b/Component/model/electrical_components/power_electronics/StaticBiInverter.py
deleted file mode 100644
index 451fe3f10d7816197630668acb212b74e7b6d1c2..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/power_electronics/StaticBiInverter.py
+++ /dev/null
@@ -1,195 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-import pyomo.environ as pyo
-
-class StaticBiInverter(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='StaticBiInverter',
-                         commodity_1=ComponentCommodity.ELECTRICITY,
-                         commodity_2=ComponentCommodity.ELECTRICITY,
-                         commodity_3=ComponentCommodity.ELECTRICITY,
-                         commodity_4=ComponentCommodity.ELECTRICITY,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
-    
-    def get_input_output_variable_names(self):
-        return [((self.name, 'input_1'), 'T'), ((self.name, 'input_2'), 'T'), ((self.name, 'output_1'), 'T'), ((self.name, 'output_2'), 'T')]
-    
-    def add_input_output_variables(self, model, prefix):
-        model.add(prefix + ('input_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('input_2',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_2',), pyo.Var(model.T, bounds=(0, None)))
-
-    def _add_constraints(self, model, prefix, configuration):
-        super()._add_constraints(model, prefix, configuration)
-        self._constraint_bi_flow(model, prefix, configuration)
-
-    def _constraint_capacity(self, model, prefix):
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_1',)][t] <= model.component_dict[prefix + ('capacity',)]
-        model.add(prefix + ('capacity_cons_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] <= model.component_dict[prefix + ('capacity',)]
-        model.add(prefix + ('capacity_cons_2',), pyo.Constraint(model.T, rule = rule))
-
-    def _constraint_conser(self, model, prefix, configuration):
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_2',)][t] == model.component_dict[prefix + ('input_1',)][t] * self.efficiency
-        model.add(prefix + ('conser_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] == model.component_dict[prefix + ('input_2',)][t] * self.efficiency
-        model.add(prefix + ('conser_2',), pyo.Constraint(model.T, rule = rule))
-
-    def _constraint_bi_flow(self, model, prefix, configuration):
-        model.add(prefix + ('z_1',), pyo.Var(model.T, domain=pyo.Binary))
-        model.add(prefix + ('z_2',), pyo.Var(model.T, domain=pyo.Binary))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('z_1',)][t] + model.component_dict[prefix + ('z_2',)][t] <= 1
-        model.add(prefix + ('bi_flow_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_1',)][t] <= model.component_dict[prefix + ('z_1',)][t] * 100000
-        model.add(prefix + ('bi_flow_2',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] <= model.component_dict[prefix + ('z_2',)][t] * 100000
-        model.add(prefix + ('bi_flow_3',), pyo.Constraint(model.T, rule = rule))
-
-    def add_operating_costs(self, model, configuration):
-        # penalize fluctuation of charging curve
-        prefix = (self.name,)
-
-        P = self.max_size
-
-        model.add(prefix + ('z_increase',), pyo.Var(model.T, domain=pyo.Binary, initialize=0))
-        model.add(prefix + ('z_decrease',), pyo.Var(model.T, domain=pyo.Binary, initialize=0))
-
-        set_1 = set(list(model.T)[1:])
-        set_2 = set(list(model.T)[1:-1])
-
-        def rule_1(m):
-            return model.component_dict[prefix + ('z_increase',)][0] == 0
-        def rule_2(m):
-            return model.component_dict[prefix + ('z_decrease',)][0] == 0
-        def rule_3(m,t):
-            return model.component_dict[prefix + ('z_increase',)][t] >= (model.component_dict[prefix + ('output_1',)][t] - model.component_dict[prefix + ('output_1',)][t-1]) / P
-        def rule_4(m,t):
-            return model.component_dict[prefix + ('z_decrease',)][t] >= (model.component_dict[prefix + ('output_1',)][t-1] - model.component_dict[prefix + ('output_1',)][t]) / P
-        def rule_5(m, t):
-            return model.component_dict[prefix + ('z_increase',)][t] + model.component_dict[prefix + ('z_decrease',)][t] <= 1
-        def rule_6(m,t):
-            return model.component_dict[prefix + ('power_fluctuation',)][t] == (model.component_dict[prefix + ('output_1',)][t] - model.component_dict[prefix + ('output_1',)][t-1]) * model.component_dict[prefix + ('z_increase',)][t] + \
-                                                                             (model.component_dict[prefix + ('output_1',)][t-1] - model.component_dict[prefix + ('output_1',)][t]) * model.component_dict[prefix + ('z_decrease',)][t]
-        # t=0
-        model.add(prefix + ('penalty_1',), pyo.Constraint(rule=rule_1))
-        model.add(prefix + ('penalty_2',), pyo.Constraint(rule=rule_2))
-        # t=1
-        model.add(prefix + ('penalty_3',), pyo.Constraint(set_1, rule=rule_3))
-        model.add(prefix + ('penalty_4',), pyo.Constraint(set_1, rule=rule_4))
-        model.add(prefix + ('penalty_5',), pyo.Constraint(set_1, rule=rule_5))
-
-        model.add(prefix + ('power_fluctuation',), pyo.Var(model.T, within=pyo.NonNegativeReals, initialize=0))
-        model.add(prefix + ('penalty_6',), pyo.Constraint(set_2, rule=rule_6))
-
-        #penalty_cost = pyo.quicksum(model.component_dict[prefix + ('power_fluctuation',)][t] for t in model.T) * 0.001
-        model.add(prefix + ('operating_cost',), pyo.Var(model.T))
-        def rule_cost(m,t):
-            return model.component_dict[prefix + ('operating_cost',)][t] == model.component_dict[prefix + ('power_fluctuation',)][t] * 0.001
-        model.add(prefix + ('operating_cost_cons',), pyo.Constraint(model.T, rule=rule_cost))
-
-        return prefix + ('operating_cost',)
-
-    # Flexibility stuff
-
-    def check_limits(self, input_flows, output_flows, results, dynamic):
-        """
-        ToDO JBR: clean
-        calculate the consequences of maximal positive and negative flexibility. Positive flexibility can
-        increase  (pos_flex_inc) and decrease (pos_flex_dec) the inputs of an Inverter.
-        The negative flexibility increases/decreases the outputs of the Inverter.
-        Therefore, they have to be transformed to input values.
-
-        Parameters
-        ----------
-        input_flows: incoming power flows
-        output_flows: outgoing power flows
-        results: df with iniital prosumer results. Needed for obtaining the initial component size.
-        time_steps: time steps of current RH interval
-        """
-        # flexibility that was already translated to the input side by def adjust_with_efficiency
-        pos_flex_dec = self.temp_flex['flex_pos_dec'][dynamic.time_steps()]
-        neg_flex_inc = self.temp_flex['flex_neg_inc'][dynamic.time_steps()]
-
-        power_limit = 1 * results[(self.name, 'capacity')]  # excluding the excess power for safety
-
-        # ----------------CHECK POWER--------------------
-
-        # additional input power from positive flexibility
-        # the increasing part has to be retransformed to the 'other' side of the comp becaus ethat ois  the value that is really increased
-        # the decreasing part stays the same because the input flow of this flex type comes from the grid
-        max_input_1 = input_flows + self.flex_pos_inc_old - pos_flex_dec
-        # the decreasing part has to be retransformed to the 'other' side of the comp becaus ethat ois  the value that is really increased
-        # the increasing part stays the same because the input flow of this flex type comes from the grid
-        # max_input_2 = neg_flex_inc.combine(input_flows - self.flex_neg_dec_old,  max)  # additional input power from negative flexibility
-        max_input_2 = input_flows + neg_flex_inc - self.flex_neg_dec_old  # additional input power from negative flexibility
-
-        # upper limit for pos-inc df
-        upper_limit = power_limit - input_flows + pos_flex_dec
-        upper_limit.loc[upper_limit < 0] = 0  # account for the case input_flows > power_limit (excess)
-
-        # max pos-inc df on input side of converter
-        max_pos_inc = self.flex_pos_inc_old.combine(upper_limit, min)
-
-        excess_input_1 = max_input_1 - power_limit
-        excess_input_2 = max_input_2 - power_limit
-        excess_input_1.loc[excess_input_1 < 0] = 0
-        excess_input_2.loc[excess_input_2 < 0] = 0
-
-        # adjust the flexibility values
-        # the energy values are not affected because they are still availabele even if inter_comps can not handle the amount
-        trans_excess_1 = excess_input_1 * self.efficiency
-        trans_excess_1.loc[trans_excess_1 < 0] = 0
-
-        self.temp_flex['flex_pos_inc'] -= trans_excess_1  # excess has to be transformed to inv output value
-        self.temp_flex.loc[self.temp_flex['flex_pos_inc'] < 0, 'flex_pos_inc'] = 0
-
-        self.temp_flex['flex_neg_inc'] -= excess_input_2
-        self.temp_flex.loc[self.temp_flex['flex_neg_inc'] < 0, 'flex_neg_inc'] = 0
-
-        self.temp_flex['flex_pos'] = self.temp_flex['flex_pos_inc'] + self.temp_flex['flex_pos_dec']
-        self.temp_flex['flex_neg'] = self.temp_flex['flex_neg_inc'] + self.temp_flex['flex_neg_dec']
diff --git a/Component/model/electrical_components/power_electronics/StaticInverter.py b/Component/model/electrical_components/power_electronics/StaticInverter.py
deleted file mode 100644
index 9c2e6ef94758d0148e6047fd92c128044b7b1051..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/power_electronics/StaticInverter.py
+++ /dev/null
@@ -1,145 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-import pyomo.environ as pyo
-
-class StaticInverter(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='StaticInverter',
-                         commodity_1=ComponentCommodity.ELECTRICITY,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.ELECTRICITY,
-                         commodity_4=None,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
-
-    def add_operating_costs(self, model, configuration):
-        # penalize fluctuation of charging curve
-        prefix = (self.name,)
-
-        P = self.max_size # change to size of inverter
-
-        model.add(prefix + ('z_increase',), pyo.Var(model.T, domain=pyo.Binary, initialize=0))
-        model.add(prefix + ('z_decrease',), pyo.Var(model.T, domain=pyo.Binary, initialize=0))
-
-        set_1 = set(list(model.T)[1:])
-        set_2 = set(list(model.T)[1:-1])
-
-        def rule_1(m):
-            return model.component_dict[prefix + ('z_increase',)][0] == 0
-        def rule_2(m):
-            return model.component_dict[prefix + ('z_decrease',)][0] == 0
-        def rule_3(m,t):
-            return model.component_dict[prefix + ('z_increase',)][t] >= (model.component_dict[prefix + ('output_1',)][t] - model.component_dict[prefix + ('output_1',)][t-1]) / P
-        def rule_4(m,t):
-            return model.component_dict[prefix + ('z_decrease',)][t] >= (model.component_dict[prefix + ('output_1',)][t-1] - model.component_dict[prefix + ('output_1',)][t]) / P
-        def rule_5(m, t):
-            return model.component_dict[prefix + ('z_increase',)][t] + model.component_dict[prefix + ('z_decrease',)][t] <= 1
-        def rule_6(m,t):
-            return model.component_dict[prefix + ('power_fluctuation',)][t] == (model.component_dict[prefix + ('output_1',)][t] - model.component_dict[prefix + ('output_1',)][t-1]) * model.component_dict[prefix + ('z_increase',)][t] + \
-                                                                             (model.component_dict[prefix + ('output_1',)][t-1] - model.component_dict[prefix + ('output_1',)][t]) * model.component_dict[prefix + ('z_decrease',)][t]
-        # t=0
-        model.add(prefix + ('penalty_1',), pyo.Constraint(rule=rule_1))
-        model.add(prefix + ('penalty_2',), pyo.Constraint(rule=rule_2))
-        # t=1
-        model.add(prefix + ('penalty_3',), pyo.Constraint(set_1, rule=rule_3))
-        model.add(prefix + ('penalty_4',), pyo.Constraint(set_1, rule=rule_4))
-        model.add(prefix + ('penalty_5',), pyo.Constraint(set_1, rule=rule_5))
-
-        model.add(prefix + ('power_fluctuation',), pyo.Var(model.T, within=pyo.NonNegativeReals, initialize=0))
-        model.add(prefix + ('penalty_6',), pyo.Constraint(set_2, rule=rule_6))
-
-        #penalty_cost = pyo.quicksum(model.component_dict[prefix + ('power_fluctuation',)][t] for t in model.T) * 0.001
-        model.add(prefix + ('operating_cost',), pyo.Var(model.T))
-        def rule_cost(m,t):
-            return model.component_dict[prefix + ('operating_cost',)][t] == model.component_dict[prefix + ('power_fluctuation',)][t] * 0.001
-        model.add(prefix + ('operating_cost_cons',), pyo.Constraint(model.T, rule=rule_cost))
-
-        return prefix + ('operating_cost',)
-
-    # Flexibility stuff
-
-    def check_limits(self, input_flows, output_flows, results, dynamic):
-        """
-        ToDO JBR: clean
-        calculate the consequences of maximal positive and negative flexibility. Positive flexibility can
-        increase  (pos_flex_inc) and decrease (pos_flex_dec) the inputs of an Inverter.
-        The negative flexibility increases/decreases the outputs of the Inverter.
-        Therefore, they have to be transformed to input values.
-
-        Parameters
-        ----------
-        input_flows: incoming power flows
-        output_flows: outgoing power flows
-        results: df with iniital prosumer results. Needed for obtaining the initial component size.
-        time_steps: time steps of current RH interval
-        """
-        # flexibility that was already translated to the input side by def adjust_with_efficiency
-        pos_flex_dec = self.temp_flex['flex_pos_dec'][dynamic.time_steps()]
-        neg_flex_inc = self.temp_flex['flex_neg_inc'][dynamic.time_steps()]
-
-        power_limit = 1 * results[(self.name, 'capacity')]  # excluding the excess power for safety
-
-        # ----------------CHECK POWER--------------------
-
-        # additional input power from positive flexibility
-        # the increasing part has to be retransformed to the 'other' side of the comp becaus ethat ois  the value that is really increased
-        # the decreasing part stays the same because the input flow of this flex type comes from the grid
-        max_input_1 = input_flows + self.flex_pos_inc_old - pos_flex_dec
-        # the decreasing part has to be retransformed to the 'other' side of the comp becaus ethat ois  the value that is really increased
-        # the increasing part stays the same because the input flow of this flex type comes from the grid
-        # max_input_2 = neg_flex_inc.combine(input_flows - self.flex_neg_dec_old,  max)  # additional input power from negative flexibility
-        max_input_2 = input_flows + neg_flex_inc - self.flex_neg_dec_old  # additional input power from negative flexibility
-
-        # upper limit for pos-inc df
-        upper_limit = power_limit - input_flows + pos_flex_dec
-        upper_limit.loc[upper_limit < 0] = 0  # account for the case input_flows > power_limit (excess)
-
-        # max pos-inc df on input side of converter
-        max_pos_inc = self.flex_pos_inc_old.combine(upper_limit, min)
-
-        excess_input_1 = max_input_1 - power_limit
-        excess_input_2 = max_input_2 - power_limit
-        excess_input_1.loc[excess_input_1 < 0] = 0
-        excess_input_2.loc[excess_input_2 < 0] = 0
-
-        # adjust the flexibility values
-        # the energy values are not affected because they are still availabele even if inter_comps can not handle the amount
-        trans_excess_1 = excess_input_1 * self.efficiency
-        trans_excess_1.loc[trans_excess_1 < 0] = 0
-
-        self.temp_flex['flex_pos_inc'] -= trans_excess_1  # excess has to be transformed to inv output value
-        self.temp_flex.loc[self.temp_flex['flex_pos_inc'] < 0, 'flex_pos_inc'] = 0
-
-        self.temp_flex['flex_neg_inc'] -= excess_input_2
-        self.temp_flex.loc[self.temp_flex['flex_neg_inc'] < 0, 'flex_neg_inc'] = 0
-
-        self.temp_flex['flex_pos'] = self.temp_flex['flex_pos_inc'] + self.temp_flex['flex_pos_dec']
-        self.temp_flex['flex_neg'] = self.temp_flex['flex_neg_inc'] + self.temp_flex['flex_neg_dec']
diff --git a/Component/model/electrical_components/power_electronics/__init__.py b/Component/model/electrical_components/power_electronics/__init__.py
deleted file mode 100644
index b2d56a909d9a838bde6b26b56774f19c454c57c4..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/power_electronics/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .DcDcConverter import DcDcConverter
-from .DynamicBiInverter import DynamicBiInverter
-from .DynamicInverter import DynamicInverter
-from .StaticBiInverter import StaticBiInverter
-from .StaticInverter import StaticInverter
\ No newline at end of file
diff --git a/Component/model/electrical_components/storages/ElecBus.py b/Component/model/electrical_components/storages/ElecBus.py
deleted file mode 100644
index e5b9e89e666fa206df751ed3d28982cbb3a69250..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/storages/ElecBus.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.electrical_components.storages.ElecVehicle import ElecVehicle
-
-import pyomo.environ as pyo
-
-class ElecBus(ElecVehicle):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic, type='ElecBus'):
-        super().__init__(name=name,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         profiles=profiles,
-                         dynamic=dynamic,
-                         type=type)
-
-    def _add_variables(self, model, prefix):
-        super()._add_variables(model, prefix)
-
-    def _add_constraints(self, model, prefix, configuration):
-        super()._add_constraints(model, prefix, configuration)
-        self._constraint_final_soe(model, prefix)
-        self._constraint_SOC_above_below_x(model, prefix, 0.5)
-        #self._constraint_min_soe(model, prefix)
-
-    def _constraint_conser(self, model, prefix, configuration):
-        if 'storage_connect' in configuration and self.match(commodity=configuration['storage_connect']['commodity']):
-            last_energy = configuration['storage_connect']['values'][self.name]
-            model.add(prefix + ('fix_first_energy',), pyo.Constraint(expr = model.component_dict[prefix + ('energy',)][model.T_prime.first()] == last_energy))
-        else:
-            model.add(prefix + ('fix_first_energy',), pyo.Constraint(expr=model.component_dict[prefix + ('energy',)][model.T_prime.first()] <= model.component_dict[prefix + ('capacity',)]))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('energy',)][t] == model.component_dict[prefix + ('energy',)][model.T_prime[model.T_prime.ord(t) - 1]] + model.component_dict[prefix + ('input_1',)][t] * self.input_efficiency * model.step_size(t) - model.component_dict[prefix + ('output_1',)][t] / self.output_efficiency * model.step_size(t)
-        model.add(prefix + ('conser',), pyo.Constraint(model.T, rule=rule))
-
-    def _constraint_final_soe(self, model, prefix):
-        model.add(prefix + ('fix_final_energy',), pyo.Constraint(expr=model.component_dict[prefix + ('energy',)][model.T.last()] >= model.component_dict[prefix + ('energy',)][model.T_prime.first()]))
-
-    def _constraint_min_soe(self, model, prefix):
-        def rule(m,t):
-            return model.component_dict[prefix + ('energy',)][t] >= 0.3 * model.component_dict[prefix + ('capacity',)] * model.component_dict[prefix + ('z_input',)][t]
-        model.add(prefix + ('min_SOE_while_CHA',), pyo.Constraint(model.T, rule=rule))
-
-    def add_operating_costs(self, model, configuration):
-        # opportunity costs for battery_energy_throughput and for SOC > 50%
-        prefix = (self.name,)
-
-        cost_function = lambda t: model.component_dict[prefix + ('input_1',)][t] * 0.2 + model.component_dict[prefix + ('z_SOC_above_50',)][t] * 0.0
-
-        model.add(prefix + ('operating_cost',), pyo.Var(model.T))
-
-        def rule(m,t ):
-            return model.component_dict[prefix + ('operating_cost',)][t] == cost_function(t) * model.step_size(t)
-        model.add(prefix + ('operating_cost_cons',), pyo.Constraint(model.T, rule=rule))
-
-        return prefix + ('operating_cost',)
diff --git a/Component/model/electrical_components/storages/ElecVehicle.py b/Component/model/electrical_components/storages/ElecVehicle.py
deleted file mode 100644
index fe9076a80088b56893adb21af2515e2e29e3b99b..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/storages/ElecVehicle.py
+++ /dev/null
@@ -1,65 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.electrical_components.storages.LiionBattery import LiionBattery
-
-import pyomo.environ as pyo
-from pyomo.environ import value
-
-class ElecVehicle(LiionBattery):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic, type='ElecVehicle'):
-        super().__init__(name=name,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         profiles=profiles,
-                         dynamic=dynamic,
-                         type=type)
-
-    def _add_variables(self, model, prefix):
-        super()._add_variables(model, prefix)
-
-    def _add_constraints(self, model, prefix, configuration):
-        super()._add_constraints(model, prefix, configuration)
-        self._constraint_SOC_above_below_x(model, prefix, 0.3)
-        self._constraint_SOC_above_below_x(model, prefix, 0.99)
-
-    # constraint that introduces two binaries for the given SOC threshold, only 1 can be true at the same time for each respective threshold
-    # binary = 1 indicates that condition is true
-    # SOC should be passed as decimal number, not as percentage
-    def _constraint_SOC_above_below_x(self, model, prefix, soc):
-        model.add(prefix + ('z_SOC_below_' + str(int(soc*100)),), pyo.Var(model.T, within=pyo.Binary))
-        model.add(prefix + ('z_SOC_above_' + str(int(soc*100)),), pyo.Var(model.T, within=pyo.Binary))
-
-        def rule(m,t):
-            return 1000 * self.max_size * model.component_dict[prefix + ('z_SOC_below_' + str(int(soc*100)),)][t] >= soc * 0.9999 * model.component_dict[prefix + ('capacity',)] - model.component_dict[prefix + ('energy',)][t]
-        model.add(prefix + ('SOC_below_' + str(int(soc*100)) + '_cons',), pyo.Constraint(model.T, rule=rule))
-        def rule(m,t):
-            return 1000 * self.max_size * model.component_dict[prefix + ('z_SOC_above_' + str(int(soc*100)),)][t] >= model.component_dict[prefix + ('energy',)][t] - soc * model.component_dict[prefix + ('capacity',)]
-        model.add(prefix + ('SOC_above_' + str(int(soc*100)) + '_cons',), pyo.Constraint(model.T, rule=rule))
-        def rule(m,t):
-            return model.component_dict[prefix + ('z_SOC_above_' + str(int(soc*100)),)][t] + model.component_dict[prefix + ('z_SOC_below_' + str(int(soc*100)),)][t] <= 1
-        model.add(prefix + ('SOC_below_above_' + str(int(soc*100)) + '_cons',), pyo.Constraint(model.T, rule=rule))
-
-
diff --git a/Component/model/electrical_components/storages/LiionBattery.py b/Component/model/electrical_components/storages/LiionBattery.py
deleted file mode 100644
index b2d7ef5d9e3d86c03a1bc0724ba3c9c35b70efba..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/storages/LiionBattery.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseStorage import BaseStorage
-
-class LiionBattery(BaseStorage):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic, type='LiionBattery'):
-        super().__init__(name=name,
-                         type=type,
-                         commodity=ComponentCommodity.ELECTRICITY,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
diff --git a/Component/model/electrical_components/storages/__init__.py b/Component/model/electrical_components/storages/__init__.py
deleted file mode 100644
index d765d399c1bdf0ddc1359ff2bec6928e4a71ef33..0000000000000000000000000000000000000000
--- a/Component/model/electrical_components/storages/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .LiionBattery import LiionBattery
-from .ElecVehicle import ElecVehicle
-from .ElecBus import ElecBus
diff --git a/Component/model/gas_components/BHKW/BHKW.py b/Component/model/gas_components/BHKW/BHKW.py
deleted file mode 100644
index c269bbbbb55df46b23ceafc12d6f078960a26b3a..0000000000000000000000000000000000000000
--- a/Component/model/gas_components/BHKW/BHKW.py
+++ /dev/null
@@ -1,187 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-import warnings
-import pyomo.environ as pyo
-import json
-import os
-
-class BHKW(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="BHKW",
-                         commodity_1=ComponentCommodity.GAS,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.HEAT,
-                         commodity_4=ComponentCommodity.ELECTRICITY,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
-
-        with open((os.path.join(model_directory, configuration['type'], configuration['model'] + '.json'))) as f:
-            model = json.load(f)
-        if 'efficiency_elec_under50' in model:
-            self.efficiency_elec_under50 = model['efficiency_elec_under50']
-        else:
-            warnings.warn(f"No column for efficiency_elec_under50 for component {self.name}!")
-        if 'efficiency_heat_under50' in model:
-            self.efficiency_heat_under50 = model['efficiency_heat_under50']
-        else:
-            warnings.warn(f"No column for efficiency_heat_under50 for component {self.name}!")
-        if 'efficiency_elec_over50' in model:
-            self.efficiency_elec_over50 = model['efficiency_elec_over50']
-        else:
-            warnings.warn(f"No column for efficiency_elec_over50 for component {self.name}!")
-        if 'efficiency_heat_over50' in model:
-            self.efficiency_heat_over50 = model['efficiency_heat_over50']
-        else:
-            warnings.warn(f"No column for efficiency_heat_over50 for component {self.name}!")
-    
-    def get_base_variable_names(self):
-        return super().get_base_variable_names() + self.get_power_flow_variable_names()
-    
-    def get_input_output_variable_names(self):
-        return [((self.name, 'input_1'), 'T'), ((self.name, 'output_1'), 'T'), ((self.name, 'output_2'), 'T')]
-    
-    def get_power_flow_variable_names(self):
-        return [((self.name, 'p_el'), 'T'), ((self.name, 'p_th'), 'T'), ((self.name, 'p_g'), 'T'), ((self.name, 'z_50perc'), 'T')]
-
-    def _add_variables(self, model, prefix):
-        super()._add_variables(self, model, prefix)
-        self._add_power_flow_variables(model, prefix)
-        
-    def add_input_output_variables(self, model, prefix):
-        model.add(prefix + ('input_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_2',), pyo.Var(model.T, bounds=(0, None)))
-
-    def _add_power_flow_variables(self, model, prefix):
-        model.add(prefix + ('p_el',), pyo.Var(model.T))
-
-        model.add(prefix + ('p_th',), pyo.Var(model.T))
-
-        model.add(prefix + ('p_g',), pyo.Var(model.T))
-
-        model.add(prefix + ('z_50perc',), pyo.Var(model.T, domain=pyo.Binary))
-
-    def _constraint_capacity(self, model, prefix):
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_2',)][t] <= model.component_dict[prefix + ('capacity',)]
-        model.add(prefix + ('capacity_cons',), pyo.Constraint(model.T, rule = rule))
-
-    def _constraint_conser(self, model, prefix, configuration):
-        # p_g is the total gas input of this bhkw (continuous and unbounded)
-        # constraint (1)
-        # p_el is the total electrical output (continuous and unbounded)
-        # constraint (2)
-        # p_th is the total thermal output (continuous and unbounded)
-        # constraint (3)
-        # z_50perc is binary --> z_50perc = 0 when p_el <= 50% capacity; z_50perc = 1 when p_el >= 50% capacity
-        # (p_g - 0.5*gas_power)/bigM <= z_50perc <= (p_g - 0.5*gas_power)/bigM + 1
-        # constraint (4) (5)
-        # eta_el_under50 * p_g - bigM * z_50perc <= p_el <= eta_el_under50 * p_g + bigM * z_50perc
-        # p_el = eta_el_under50 * p_g if z_50perc = 0; p_el open, if z_50perc = 1
-        # constraint (6) (7)
-        # eta_heat_under50 * p_g - bigM * z_50perc <= p_th <= eta_heat_under50 * p_g + bigM * z_50perc
-        # p_th = eta_heat_under50 * p_g if z_50perc = 0; p_th open, if z_50perc = 1
-        # constraint (8) (9)
-        # eta_el_over50 * p_g - bigM * (1-z_50perc) <= p_el <= eta_el_over50 * p_g + bigM * (1-z_50perc)
-        # p_el = eta_el_over50 * p_g if z_50perc = 1; p_el open, if z_50perc = 0
-        # constraint (10) (11)
-        # eta_heat_over50 * p_g - bigM * (1-z_50perc) <= p_th <= eta_heat_over50 * p_g + bigM * (1-z_50perc)
-        # p_th = eta_heat_over50 * p_g if z_50perc = 1; p_th open, if z_50perc = 0
-        # constraint (12) (13)
-
-        bigM = 1e10
-
-            # constraint (1)
-        def rule(m, t):
-            return model.component_dict[prefix + ('input_1',)][t] == model.component_dict[prefix + ('p_g',)][t]
-        model.add(prefix + ('conser_1',), pyo.Constraint(model.T, rule = rule))
-
-            # constraint (2)
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_2',)][t] == model.component_dict[prefix + ('p_el',)][t]
-        model.add(prefix + ('conser_2',), pyo.Constraint(model.T, rule = rule))
-        
-            # constraint (3)
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] == model.component_dict[prefix + ('p_th',)][t]
-        model.add(prefix + ('conser_3',), pyo.Constraint(model.T, rule = rule))
-        
-            # constraint (4)
-        def rule(m, t):
-            return (model.component_dict[prefix + ('p_el',)][t] - 0.5 * model.component_dict[prefix + ('capacity',)]) / bigM + 1 >= model.component_dict[prefix + ('z_50perc',)][t]
-        model.add(prefix + ('conser_4',), pyo.Constraint(model.T, rule = rule))
-        
-            # constraint (5)
-        def rule(m, t):
-            return (model.component_dict[prefix + ('p_el',)][t] - 0.5 * model.component_dict[prefix + ('capacity',)]) / bigM <= model.component_dict[prefix + ('z_50perc',)][t]
-        model.add(prefix + ('conser_5',), pyo.Constraint(model.T, rule = rule))
-        
-            # constraint (6)
-        def rule(m, t):
-            return model.component_dict[prefix + ('p_g',)][t] * self.efficiency_elec_under50 - bigM * model.component_dict[prefix + ('z_50perc',)][t] <= model.component_dict[prefix + ('p_el',)][t]
-        model.add(prefix + ('conser_6',), pyo.Constraint(model.T, rule = rule))
-        
-            # constraint (7)
-        def rule(m, t):
-            return model.component_dict[prefix + ('p_g',)][t] * self.efficiency_elec_under50 + bigM * model.component_dict[prefix + ('z_50perc',)][t] >= model.component_dict[prefix + ('p_el',)][t]
-        model.add(prefix + ('conser_7',), pyo.Constraint(model.T, rule = rule))
-        
-            # constraint (8)
-        def rule(m, t):
-            return model.component_dict[prefix + ('p_g',)][t] * self.efficiency_heat_under50 - bigM * model.component_dict[prefix + ('z_50perc',)][t] <= model.component_dict[prefix + ('p_th',)][t]
-        model.add(prefix + ('conser_8',), pyo.Constraint(model.T, rule = rule))
-        
-            # constraint (9)
-        def rule(m, t):
-            return model.component_dict[prefix + ('p_g',)][t] * self.efficiency_heat_under50 + bigM * model.component_dict[prefix + ('z_50perc',)][t] >= model.component_dict[prefix + ('p_th',)][t]
-        model.add(prefix + ('conser_9',), pyo.Constraint(model.T, rule = rule))
-        
-            # constraint (10)
-        def rule(m, t):
-            return model.component_dict[prefix + ('p_g',)][t] * self.efficiency_elec_over50 - bigM * (1 - model.component_dict[prefix + ('z_50perc',)][t]) <= model.component_dict[prefix + ('p_el',)][t]
-        model.add(prefix + ('conser_10',), pyo.Constraint(model.T, rule = rule))
-        
-            # constraint (11)
-        def rule(m, t):
-            return model.component_dict[prefix + ('p_g',)][t] * self.efficiency_elec_over50 + bigM * (1 - model.component_dict[prefix + ('z_50perc',)][t]) >= model.component_dict[prefix + ('p_el',)][t]
-        model.add(prefix + ('conser_11',), pyo.Constraint(model.T, rule = rule))
-        
-            # constraint (12)
-        def rule(m, t):
-            return model.component_dict[prefix + ('p_g',)][t] * self.efficiency_heat_over50 - bigM * (1 - model.component_dict[prefix + ('z_50perc',)][t]) <= model.component_dict[prefix + ('p_th',)][t]
-        model.add(prefix + ('conser_12',), pyo.Constraint(model.T, rule = rule))
-        
-            # constraint (13)
-        def rule(m, t):
-            return model.component_dict[prefix + ('p_g',)][t] * self.efficiency_heat_over50 + bigM * (1 - model.component_dict[prefix + ('z_50perc',)][t]) >= model.component_dict[prefix + ('p_th',)][t]
-        model.add(prefix + ('conser_13',), pyo.Constraint(model.T, rule = rule))
diff --git a/Component/model/gas_components/BHKW/__init__.py b/Component/model/gas_components/BHKW/__init__.py
deleted file mode 100644
index f7807f03e806e1b69dbfc4975975cf00d9b33ef8..0000000000000000000000000000000000000000
--- a/Component/model/gas_components/BHKW/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .BHKW import BHKW
diff --git a/Component/model/gas_components/__init__.py b/Component/model/gas_components/__init__.py
deleted file mode 100644
index 6d2809eed71d0a57a8f84395be73dd5bb28ebcea..0000000000000000000000000000000000000000
--- a/Component/model/gas_components/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .external_sources import *
diff --git a/Component/model/gas_components/chp/CHP.py b/Component/model/gas_components/chp/CHP.py
deleted file mode 100644
index 73def5794f0760106025773ac9cad8563e517517..0000000000000000000000000000000000000000
--- a/Component/model/gas_components/chp/CHP.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-import warnings
-import pyomo.environ as pyo
-import json
-import os
-
-class CHP(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='CHP',
-                         commodity_1=ComponentCommodity.GAS,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.HEAT,
-                         commodity_4=ComponentCommodity.ELECTRICITY,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
-
-        with open((os.path.join(model_directory, configuration['type'], configuration['model'] + '.json'))) as f:
-            model = json.load(f)
-        if 'efficiency_heat' in model:
-            self.efficiency_heat = model['efficiency_heat']
-        else:
-            warnings.warn(f"No column for efficiency_heat for component {self.name}!")
-        if 'efficiency_elec' in model:
-            self.efficiency_elec = model['efficiency_elec']
-        else:
-            warnings.warn(f"No column for efficiency_elec for component {self.name}!")
-    
-    def get_input_output_variable_names(self):
-        return [((self.name, 'input_1'), 'T'), ((self.name, 'output_1'), 'T'), ((self.name, 'output_2'), 'T')]
-
-    def add_input_output_variables(self, model, prefix):
-        model.add(prefix + ('input_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_2',), pyo.Var(model.T, bounds=(0, None)))
-
-    def _constraint_capacity(self, model, prefix):
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_2',)][t] <= model.component_dict[prefix + ('capacity',)]
-        model.add(prefix + ('capacity_cons',), pyo.Constraint(model.T, rule = rule))
-
-    def _constraint_conser(self, model, prefix, configuration):
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] == model.component_dict[prefix + ('input_1',)][t] * self.efficiency_heat
-        model.add(prefix + ('conser_1',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_2',)][t] == model.component_dict[prefix + ('input_1',)][t] * self.efficiency_elec
-        model.add(prefix + ('conser_2',), pyo.Constraint(model.T, rule = rule))
diff --git a/Component/model/gas_components/chp/__init__.py b/Component/model/gas_components/chp/__init__.py
deleted file mode 100644
index 32883d7bc5c66f212f3c9c2b4fc27fe920f16474..0000000000000000000000000000000000000000
--- a/Component/model/gas_components/chp/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .CHP import CHP
\ No newline at end of file
diff --git a/Component/model/gas_components/external_sources/GasGrid.py b/Component/model/gas_components/external_sources/GasGrid.py
deleted file mode 100644
index bdcd106d4c6e313e324e1293e04d877915701502..0000000000000000000000000000000000000000
--- a/Component/model/gas_components/external_sources/GasGrid.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseGrid import BaseGrid
-from Tooling.predictor.Predictor import Predictor
-from Tooling.dynamics.Dynamic import resample
-
-import pyomo.environ as pyo
-
-class GasGrid(BaseGrid):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='GasGrid',
-                         commodity=ComponentCommodity.GAS,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         profiles=profiles,
-                         dynamic=dynamic)
-        
-        if 'emission' in configuration:
-            if isinstance(configuration['emission'], float) or isinstance(configuration['injection_price'], int):
-                self.emission = configuration['emission']
-            elif isinstance(configuration['emission'], str):
-                self.emission = resample(profiles[configuration['emission']][0], profiles[configuration['emission']][1], dynamic)
-            elif isinstance(configuration['emission'], dict):
-                self.emission = Predictor(resample(profiles[configuration['emission']['profile']][0], profiles[configuration['emission']['profile']][1], dynamic), configuration['emission']['type'], configuration['emission']['method'], dynamic)
-        else:
-            self.emission = 0
-    
-    def add_co2_emissions(self, model, configuration):
-        prefix = (self.name,)
-
-        if isinstance(self.emission, float) or isinstance(self.emission, int):
-            def rule(m, t):
-                return model.component_dict[prefix + ('co2_emission',)][t] == model.component_dict[prefix + ('output_1',)][t] * self.emission * model.step_size(t)
-            model.add(prefix + ('co2_emission_cons',), pyo.Constraint(model.T, rule = rule))
-        else:
-            model.add(prefix + ('emission',), pyo.Param(model.T, mutable = True))
-
-            if isinstance(self.emission, Predictor):
-                if 'predict' in configuration:
-                    emission = self.emission.predict(list(model.T))
-                else:
-                    emission = resample(self.emission.profile, self.dynamic, model.dynamic)
-            else:
-                emission = resample(self.emission, self.dynamic, model.dynamic)
-
-            model.set_value(prefix + ('emission',), emission)
-            
-            model.add(prefix + ('co2_emission',), pyo.Var(model.T))
-
-            def rule(m, t):
-                return model.component_dict[prefix + ('co2_emission',)][t] == model.component_dict[prefix + ('output_1',)][t] * model.component_dict[prefix + ('emission',)][t] * model.step_size(t)
-            model.add(prefix + ('co2_emission_cons',), pyo.Constraint(model.T, rule = rule))
-
-        return prefix + ('co2_emission',)
diff --git a/Component/model/gas_components/external_sources/__init__.py b/Component/model/gas_components/external_sources/__init__.py
deleted file mode 100644
index 8c5a3dbddff32bfe320070868b37ebdc7879aff9..0000000000000000000000000000000000000000
--- a/Component/model/gas_components/external_sources/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .GasGrid import GasGrid
diff --git a/Component/model/heat_components/__init__.py b/Component/model/heat_components/__init__.py
deleted file mode 100644
index c05682867645c7cbffa2fc7ef2160d98e0cca544..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/__init__.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .boilers import *
-from .heat_exchangers import *
-from .heat_pumps import *
-from .hot_water_consumption import *
-from .solar_generators import *
-from .heat_consumption import *
-from .storages import *
diff --git a/Component/model/heat_components/boilers/ElectricBoiler.py b/Component/model/heat_components/boilers/ElectricBoiler.py
deleted file mode 100644
index ec226f09130dc93c9f009ee8722dc287fa87ac6c..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/boilers/ElectricBoiler.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-class ElectricBoiler(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="ElectricBoiler",
-                         commodity_1=ComponentCommodity.ELECTRICITY,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.HEAT,
-                         commodity_4=None,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
diff --git a/Component/model/heat_components/boilers/GasBoiler.py b/Component/model/heat_components/boilers/GasBoiler.py
deleted file mode 100644
index 2836719920579c94d2d2f3664feb3326dc98f690..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/boilers/GasBoiler.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-class GasBoiler(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="GasBoiler",
-                         commodity_1=ComponentCommodity.GAS,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.HEAT,
-                         commodity_4=None,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
diff --git a/Component/model/heat_components/boilers/__init__.py b/Component/model/heat_components/boilers/__init__.py
deleted file mode 100644
index 5befb01ac09d4a925e922173165c394e4b487baa..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/boilers/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .ElectricBoiler import ElectricBoiler
-from .GasBoiler import GasBoiler
diff --git a/Component/model/heat_components/external_sources/HeatGrid.py b/Component/model/heat_components/external_sources/HeatGrid.py
deleted file mode 100644
index 293375400e58981c0088f7e40185548ca00f29f4..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/external_sources/HeatGrid.py
+++ /dev/null
@@ -1,37 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseGrid import BaseGrid
-
-class HeatGrid(BaseGrid):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='HeatGrid',
-                         commodity=ComponentCommodity.HEAT,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         profiles=profiles,
-                         dynamic=dynamic)
diff --git a/Component/model/heat_components/external_sources/__init__.py b/Component/model/heat_components/external_sources/__init__.py
deleted file mode 100644
index e1b3835f8ca1ab92f349c40e59b87382394e708f..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/external_sources/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .HeatGrid import HeatGrid
diff --git a/Component/model/heat_components/heat_consumption/ElectricRadiator.py b/Component/model/heat_components/heat_consumption/ElectricRadiator.py
deleted file mode 100644
index a9a59cd522faa3f9bd4a6bb2e6474cfbbdf18324..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/heat_consumption/ElectricRadiator.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-class ElectricRadiator(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="ElectricRadiator",
-                         commodity_1=ComponentCommodity.ELECTRICITY,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.HEAT,
-                         commodity_4=None,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
diff --git a/Component/model/heat_components/heat_consumption/Radiator.py b/Component/model/heat_components/heat_consumption/Radiator.py
deleted file mode 100644
index 96943fac29a54fe13b42f42cdd7d02595e4570d4..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/heat_consumption/Radiator.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseConsumption import BaseConsumption
-
-class Radiator(BaseConsumption):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="Radiator",
-                         commodity=ComponentCommodity.HEAT,
-                         configuration=configuration,
-                         profiles=profiles,
-                         dynamic=dynamic)
diff --git a/Component/model/heat_components/heat_consumption/UnderfloorHeat.py b/Component/model/heat_components/heat_consumption/UnderfloorHeat.py
deleted file mode 100644
index af3052febe6b24bd2dbb8119dc13c376c556be29..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/heat_consumption/UnderfloorHeat.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-class UnderfloorHeat(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="UnderfloorHeat",
-                         commodity_1=ComponentCommodity.HEAT,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.HEAT,
-                         commodity_4=None,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
diff --git a/Component/model/heat_components/heat_consumption/__init__.py b/Component/model/heat_components/heat_consumption/__init__.py
deleted file mode 100644
index c86d0aacc1fba2a22619022672872bcc1ca4e409..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/heat_consumption/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .Radiator import Radiator
-from .ElectricRadiator import ElectricRadiator
-from .HeatConsumption import HeatConsumption
-from .UnderfloorHeat import UnderfloorHeat
diff --git a/Component/model/heat_components/heat_exchangers/HeatExchanger.py b/Component/model/heat_components/heat_exchangers/HeatExchanger.py
deleted file mode 100644
index b8274f1251af76746ea3e8bda98c8d2103ef69a1..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/heat_exchangers/HeatExchanger.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-class HeatExchanger(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="HeatExchanger",
-                         commodity_1=ComponentCommodity.HEAT,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.HEAT,
-                         commodity_4=None,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
diff --git a/Component/model/heat_components/heat_exchangers/__init__.py b/Component/model/heat_components/heat_exchangers/__init__.py
deleted file mode 100644
index a0ae89418485042baf871ccbe17c382df33aeead..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/heat_exchangers/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .HeatExchanger import HeatExchanger
diff --git a/Component/model/heat_components/heat_pumps/GasHeatPump.py b/Component/model/heat_components/heat_pumps/GasHeatPump.py
deleted file mode 100644
index 65ddc66724175400499bb1290566f70c58d279b9..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/heat_pumps/GasHeatPump.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-class GasHeatPump(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="GasHeatPump",
-                         commodity_1=ComponentCommodity.GAS,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.HEAT,
-                         commodity_4=None,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
diff --git a/Component/model/heat_components/heat_pumps/HeatPump.py b/Component/model/heat_components/heat_pumps/HeatPump.py
deleted file mode 100644
index f495208b382eddc9968498167a7696a20a0cb32d..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/heat_pumps/HeatPump.py
+++ /dev/null
@@ -1,144 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-from Tooling.predictor.Predictor import Predictor
-from Tooling.dynamics.Dynamic import resample
-
-import pyomo.environ as pyo
-import pandas as pd
-
-class HeatPump(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="HeatPump",
-                         commodity_1=ComponentCommodity.ELECTRICITY,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.HEAT,
-                         commodity_4=None,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
-
-        if isinstance(configuration['temperature'], str):
-            self.temperature = resample(profiles[configuration['temperature']][0], profiles[configuration['temperature']][1], dynamic)
-        elif isinstance(configuration['temperature'], dict):
-            self.temperature = Predictor(resample(profiles[configuration['temperature']['profile']][0], profiles[configuration['temperature']['profile']][1], dynamic), configuration['temperature']['type'], configuration['temperature']['method'], dynamic)
-
-    def calc_cop(self, amb_t, set_t=40):
-        return (set_t + 273.15) / (set_t - amb_t) * self.efficiency
-
-    def _constraint_conser(self, model, prefix, configuration):
-        model.add(prefix + ('cop',), pyo.Param(model.T, mutable = True))
-
-        if isinstance(self.temperature, Predictor):
-            if 'predict' in configuration:
-                temperature = self.temperature.predict(list(model.T))
-            else:
-                temperature = resample(self.temperature.profile, self.dynamic, model.dynamic)
-        else:
-            temperature = resample(self.temperature, self.dynamic, model.dynamic)
-        cop = self.calc_cop(temperature)
-
-        model.set_value(prefix + ('cop',), cop)
-    
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] == model.component_dict[prefix + ('input_1',)][t] * model.component_dict[prefix + ('cop',)][t]
-        model.add(prefix + ('conser',), pyo.Constraint(model.T, rule = rule))
-
-    # Flexibility stuff
-
-    def check_limits(self, input_flows, output_flows, results, dynamic):
-
-        pos_flex = self.temp_flex['flex_pos'][dynamic.time_steps()]
-        pos_flex_inc = self.temp_flex['flex_pos_inc'][dynamic.time_steps()]  # increasing of comp output
-        pos_flex_dec = self.temp_flex['flex_pos_dec'][dynamic.time_steps()]  # curtailment of comp input
-
-        neg_flex = self.temp_flex['flex_neg'][dynamic.time_steps()]
-        neg_flex_inc = self.temp_flex['flex_neg_inc'][dynamic.time_steps()]  # increasing of comp input
-        neg_flex_dec = self.temp_flex['flex_neg_dec'][dynamic.time_steps()]  # curtailment of comp output
-
-        power_limit = 1 * results[(self.name, 'capacity')]  # excluding the excess power for safety
-
-        # upper limit for neg-inc df
-        upper_limit_neg_inc = power_limit - input_flows
-        upper_limit_neg_inc.loc[upper_limit_neg_inc < 0] = 0  # account for the case input_flows > power_limit (excess)
-
-        # max neg-inc df on input side of converter
-        max_neg_inc = neg_flex_inc.combine(upper_limit_neg_inc, min)
-
-        # negative flex doesn't need conversion as it already has been
-        max_neg_inc_out = max_neg_inc
-
-        self.temp_flex['flex_neg_inc'] = max_neg_inc_out
-        self.temp_flex.loc[self.temp_flex['flex_neg_inc'] < 0, 'flex_neg_inc'] = 0
-
-        self.temp_flex['flex_neg'] = self.temp_flex['flex_neg_inc'] + self.temp_flex['flex_neg_dec']
-
-    def adjust_flex_with_efficiency(self, results, dynamic):
-        # ToDo: Is this correct? Efficiency effects energy as well because in the end the community has to know how much
-        # energy is still available after each flex activation. e.g.: If the efficiency would not affect the energy,
-        # the community would always underestimate the effect of a positive flexibility activation on the energy amount
-        # left
-
-        self.flex_pos_inc_old = self.temp_flex['flex_pos_inc'].copy()
-        self.flex_neg_dec_old = self.temp_flex['flex_neg_dec'].copy()
-
-        power_limit = 1 * results[(self.name, 'capacity')]  # excluding the excess power for safety
-
-        total_in = pd.Series(data=0.0, index=dynamic.time_steps())
-        total_in += results[(self.name, 'input_1')]  # * self.input_efficiency
-        # total out stays zero
-        # ------------EFFICIENCY--------------
-        # the idea here is that the flexibility that comes out of the inverter is
-        # related to the efficiency of the inverter.
-        # So considering the power flow of the flexibility type,
-        # the net flexibility gets higher or lower compared to the flexibility at the origin component
-
-        # ------POSITIVE--------------
-        self.temp_flex['flex_pos_inc'] = 0  # increasing positive flex means lower flex at grid
-        # decreasing positive flex means higher flex at grid
-        self.temp_flex['flex_pos_dec'] = total_in
-        self.temp_flex['flex_pos'] = total_in
-
-        self.temp_flex['e_dch'] = self.temp_flex['e_dch'] / self.cop[dynamic.time_steps()]
-        # ------NEGATIVE--------------
-        # increasing negative flex means higher flex at grid
-        self.temp_flex['flex_neg_inc'] = power_limit - total_in
-        self.temp_flex['flex_neg_dec'] = 0  # decreasing neg flex means lower flex at grid
-        self.temp_flex['flex_neg'] = power_limit - total_in
-
-        self.temp_flex['e_cha'] = self.temp_flex['e_cha'] / self.cop[dynamic.time_steps()]
-
-    def calc_correction_factors(self, time_steps, c_f_df, results):
-        c_f_cha = pd.Series(data=1, index=time_steps)
-        #c_f_cha = pd.Series(data=cop, index=T)
-        c_static_cha = pd.Series(data=0, index=time_steps)
-
-        c_f_df['c_f_cha'] = c_f_cha * c_f_df['c_f_cha']
-        c_f_df['c_static_cha'] = c_static_cha + c_f_df['c_static_cha']
-
-        return c_f_df
diff --git a/Component/model/heat_components/heat_pumps/__init__.py b/Component/model/heat_components/heat_pumps/__init__.py
deleted file mode 100644
index 5a2e72d024aa2f202eee4535df365696625b9d80..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/heat_pumps/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .HeatPump import HeatPump
-from .GasHeatPump import GasHeatPump
diff --git a/Component/model/heat_components/hot_water_consumption/HotWaterConsumption.py b/Component/model/heat_components/hot_water_consumption/HotWaterConsumption.py
deleted file mode 100644
index 9ed5d7df468bda93e4cef77b989e0d61f87c00d4..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/hot_water_consumption/HotWaterConsumption.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseConsumption import BaseConsumption
-from Tooling.predictor.Predictor import Predictor
-from Tooling.dynamics.Dynamic import resample
-
-import pyomo.environ as pyo
-
-class HotWaterConsumption(BaseConsumption):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="HotWaterConsumption",
-                         commodity=ComponentCommodity.HEAT,
-                         configuration=configuration,
-                         profiles=profiles,
-                         dynamic=dynamic)
diff --git a/Component/model/heat_components/hot_water_consumption/__init__.py b/Component/model/heat_components/hot_water_consumption/__init__.py
deleted file mode 100644
index 27e29d2876c62b1edd2712e77b0f5cae63ad0bb8..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/hot_water_consumption/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .HotWaterConsumption import HotWaterConsumption
diff --git a/Component/model/heat_components/solar_generators/SolarThermalCollector.py b/Component/model/heat_components/solar_generators/SolarThermalCollector.py
deleted file mode 100644
index 84c733fabc495d0c49b45523c98e8dfca6839dfd..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/solar_generators/SolarThermalCollector.py
+++ /dev/null
@@ -1,80 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-from Tooling.predictor.Predictor import Predictor
-from Tooling.dynamics.Dynamic import resample
-
-import pyomo.environ as pyo
-
-class SolarThermalCollector(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="SolarThermalCollector",
-                         commodity_1=None,
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.HEAT,
-                         commodity_4=None,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
-
-        if isinstance(configuration['irradiance'], str):
-            self.irradiance = resample(profiles[configuration['irradiance']][0], profiles[configuration['irradiance']][1], dynamic)
-        elif isinstance(configuration['irradiance'], dict):
-            self.irradiance = Predictor(resample(profiles[configuration['irradiance']['profile']][0], profiles[configuration['irradiance']['profile']][1], dynamic), configuration['irradiance']['type'], configuration['irradiance']['method'], dynamic)
-
-    def calculate_power_factors(self, irradiance):
-        return irradiance * 0.001 # <--- Yi: your code here
-    
-    def get_input_output_variable_names(self):
-        return [((self.name, 'output_1'), 'T')]
-
-    def add_input_output_variables(self, model, prefix):
-        model.add(prefix + ('output_1',), pyo.Var(model.T, bounds=(0, None)))
-
-    def _constraint_capacity(self, model, prefix):
-        pass
-
-    def _constraint_conser(self, model, prefix, configuration):
-        model.add(prefix + ('power_factor',), pyo.Param(model.T, mutable = True))
-
-        if isinstance(self.irradiance, Predictor):
-            if 'predict' in configuration:
-                irradiance = self.irradiance.predict(list(model.T))
-            else:
-                irradiance = resample(self.irradiance.profile, self.dynamic, model.dynamic)
-        else:
-            irradiance = resample(self.irradiance, self.dynamic, model.dynamic)
-        power_factors = self.calc_correction_factors(irradiance)
-        power_factors.loc[lambda power_factor: power_factor > 1] = 1
-        power_factors.loc[lambda power_factor: power_factor < 0] = 0
-
-        model.set_value(prefix + ('power_factor',), power_factors)
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] == model.component_dict[prefix + ('capacity',)] * model.component_dict[prefix + ('power_factor',)][t]
-        model.add(prefix + ('conser',), pyo.Constraint(model.T, rule = rule))
diff --git a/Component/model/heat_components/solar_generators/__init__.py b/Component/model/heat_components/solar_generators/__init__.py
deleted file mode 100644
index baad639461f367d6f72979821525b16bb21cbfaf..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/solar_generators/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .SolarThermalCollector import SolarThermalCollector
diff --git a/Component/model/heat_components/storages/HotWaterStorage.py b/Component/model/heat_components/storages/HotWaterStorage.py
deleted file mode 100644
index ddf2c32b7349d31d7ebebf65a84ac66a8a685c44..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/storages/HotWaterStorage.py
+++ /dev/null
@@ -1,165 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseStorage import BaseStorage
-
-import pyomo.environ as pyo
-import pandas as pd
-
-class HotWaterStorage(BaseStorage):
-    
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="HotWaterStorage",
-                         commodity=ComponentCommodity.HEAT,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
-
-    def _constraint_conser(self, model, prefix, configuration):
-        if 'storage_connect' in configuration and self.match(commodity=configuration['storage_connect']['commodity']):
-            last_energy = configuration['storage_connect']['values'][self.name]
-            model.add(prefix + ('fix_first_energy',), pyo.Constraint(expr = model.component_dict[prefix + ('energy',)][model.T_prime.first()] == last_energy))
-        else:
-            model.add(prefix + ('fix_first_energy',), pyo.Constraint(expr = model.component_dict[prefix + ('energy',)][model.T_prime.first()] == self.init_soe * model.component_dict[prefix + ('capacity',)]))
-        
-        def rule(m, t):
-            return model.component_dict[prefix + ('energy',)][t] == model.component_dict[prefix + ('energy',)][model.T_prime[model.T_prime.ord(t) - 1]] * 0.995 + model.component_dict[prefix + ('input_1',)][t] * self.input_efficiency * model.step_size(t) - model.component_dict[prefix + ('output_1',)][t] / self.output_efficiency * model.step_size(t)
-        model.add(prefix + ('conser',), pyo.Constraint(model.T, rule = rule))
-
-    # Flexibility stuff
-
-    def calc_flex_comp(self, results, dynamic, init_results):
-        """
-        This method calculates the maximal possible flexibility from the battery.
-        This is done by comparing two values:
-            1) The maximal possible CHA/DCH power
-            2) The maximal allowed CHA/DCH power regarding the available power BEFORE the PLANNED CHA/DCH
-        The minimal value is chosen to be the maximal possible CHA/DCH power. To claculate the available flexibility from this,
-        the PLANNED CHA/DCH rates are added/substracted depending on the type of flexibility.
-        E.g. positive flex: the current DCH power is substracted and the current CHA power is added
-        Parameters
-        ----------
-        flows
-        results
-        T
-
-        Returns
-        -------
-
-        """
-
-        cap_storage = results[(self.name, 'capacity')]  # the total theoretical capacity of the comm_batt
-        e_storage = results[(self.name, 'energy')]
-        e_storage_shift = e_storage.shift(periods=1)
-        e_storage_shift[dynamic.index_of(0)] = cap_storage[dynamic.index_of(0)] * self.init_soe
-
-        # maximal in/out powers that the comm_batt is capable of
-        max_power_dch = 0
-        max_power_cha = 0
-
-        # get the total output/input powers
-        init_dch = pd.Series(data=0.0, index=dynamic.time_steps())
-        init_dch += results[(self.name, 'output_1')] #  self.output_efficiency #here not necessary
-
-        init_cha = pd.Series(data=0.0, index=dynamic.time_steps())
-        init_cha += results[(self.name, 'input_1')] # self.input_efficiency
-
-        # for now inputs and outputs are not relevant in this component because we just look at electric flows.
-        # The power constraints will be added by the heatpump which has the boundaries for the electrical power.
-        total_dch = pd.Series(data=0.0, index=dynamic.time_steps())
-        total_cha = pd.Series(data=0.0, index=dynamic.time_steps())
-
-        # now the energy restriction is calculated
-        # ToDO @jbr: in the future here should be an extra restriction if some SOE values
-        #  have to be reached in future timesteps
-
-        # special energy restriction: the available energy for discharge in time step t depends on the available
-        # energy in t+x. x is the number of time steps until an ongoing discharging process is finished.
-        # That means that E(t+x) >= E(t+x-1)
-
-        e_dch = pd.Series(data=0.0, index=dynamic.time_steps())
-        for t in dynamic.time_steps():
-            if t == init_results.index[0]:
-                e_old = self.init_soe * cap_storage[0]
-            else:
-                e_old = init_results[(self.name, 'energy')].loc[t]
-
-            if e_old <= self.min_soe * cap_storage[dynamic.index_of(0)]:
-                e_dch[t] = 0
-                continue
-
-            for e_act in init_results[(self.name, 'energy')].loc[t+1:]:
-                if e_act >= e_old:
-                    e_dch[t] = e_old - self.min_soe * cap_storage[dynamic.index_of(0)]
-                    break
-                elif e_act <= self.min_soe * cap_storage[dynamic.index_of(0)]:
-                    e_dch[t] = 0
-                    break
-                else:
-                    e_old = e_act
-        e_cha = (1 * cap_storage[dynamic.index_of(0)] - e_storage)  # / self.input_efficiency
-        e_dch = e_dch.combine((e_storage - 0.2 * cap_storage),min)  # * self.output_efficiency
-
-        e_cha.loc[e_cha < 0] = 0
-        e_dch.loc[e_dch < 0] = 0
-
-        for t in dynamic.time_steps():
-            if e_dch[t] < -1 * e_cha[t]:
-                print('Fehler zum Zeitpunkt ' + str(t))
-
-        # pos flex:
-        self.temp_flex['flex_pos_inc'] = max_power_dch - total_dch
-        self.temp_flex.loc[self.temp_flex['flex_pos_inc'] < 0, 'flex_pos_inc'] = 0
-
-        self.temp_flex['flex_pos_dec'] = total_cha
-        self.temp_flex['flex_pos'] = max_power_dch - total_dch + total_cha
-        self.temp_flex.loc[self.temp_flex['flex_pos'] < 0, 'flex_pos'] = 0
-
-        self.temp_flex['e_dch'] = e_dch
-
-        # neg flex: negative signs due to negative flexibility
-        self.temp_flex['flex_neg_inc'] = max_power_cha - total_cha
-        self.temp_flex.loc[self.temp_flex['flex_neg_inc'] < 0, 'flex_neg_inc'] = 0
-        self.temp_flex['flex_neg_dec'] = total_dch
-        self.temp_flex['flex_neg'] = max_power_cha - total_cha + total_dch
-        self.temp_flex.loc[self.temp_flex['flex_neg'] < 0, 'flex_neg'] = 0
-
-        self.temp_flex['e_cha'] = e_cha
-
-    def calc_correction_factors(self, time_steps, c_f_df, results):
-
-        c_f_dch = pd.Series(data=1/self.output_efficiency, index=time_steps)
-        c_static_dch =pd.Series(data=0, index=time_steps)
-        c_f_cha = pd.Series(data=self.input_efficiency, index=time_steps)
-        c_static_cha = pd.Series(data=0, index=time_steps)
-
-        c_f_df['c_f_dch'] = c_f_dch * c_f_df['c_f_dch']
-        c_f_df['c_static_dch'] = c_static_dch + c_f_df['c_static_dch']
-
-        c_f_df['c_f_cha'] = c_f_cha * c_f_df['c_f_cha']
-        c_f_df['c_static_cha'] = c_static_cha + c_f_df['c_static_cha']
-
-        return c_f_df
diff --git a/Component/model/heat_components/storages/__init__.py b/Component/model/heat_components/storages/__init__.py
deleted file mode 100644
index 9de4688c905a750be77fc2e0ce144bc017fd658a..0000000000000000000000000000000000000000
--- a/Component/model/heat_components/storages/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .HotWaterStorage import HotWaterStorage
diff --git a/Component/model/hydrogen_components/__init__.py b/Component/model/hydrogen_components/__init__.py
deleted file mode 100644
index f7e9a715fe0af82d14985e31cc0469484945112e..0000000000000000000000000000000000000000
--- a/Component/model/hydrogen_components/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by mce on 06.01.2021.
-
-from .electrolyzers import *
-from .fuel_cells import *
-from .storages import *
diff --git a/Component/model/hydrogen_components/electrolyzers/PEMElectrolyzer.py b/Component/model/hydrogen_components/electrolyzers/PEMElectrolyzer.py
deleted file mode 100644
index e288d1181e54b6637d78e64470662f58075e19e9..0000000000000000000000000000000000000000
--- a/Component/model/hydrogen_components/electrolyzers/PEMElectrolyzer.py
+++ /dev/null
@@ -1,140 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import  BaseComponent
-
-import pandas as pd
-
-class PEMElectrolyzer(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='PEMElectrolyzer',
-                         commodity_1=ComponentCommodity.ELECTRICITY,
-                         commodity_2=None,
-                         commodity_3='hydrogen',
-                         commodity_4=None,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
-
-    # Flexibility stuff
-
-    def check_limits(self, input_flows, output_flows, results, dynamic):
-        """
-        calculate the consequences of maximal positive and negative flexibility. Positive flexibility can
-        increase  (pos_flex_inc) and decrease (pos_flex_dec) the inputs of an Inverter.
-        The negative flexibility increases/decreases the outputs of the Inverter.
-        Therefore, they have to be transformed to input values.
-        A
-        Parameters
-        ----------
-        path_comps
-        comp
-
-        Returns
-        -------
-        diff --> the differences between maximal input flows from flexibility usage and power limit
-        """
-
-        # flexibility that was already translated to the input side by def adjust_with_efficiency
-        pos_flex_dec = self.temp_flex['flex_pos_dec'][dynamic.time_steps()]
-        neg_flex_inc = self.temp_flex['flex_neg_inc'][dynamic.time_steps()]
-
-        power_limit = 1 * results[(self.name, 'capacity')]  # excluding the excess power for safety
-
-        # ----------------CHECK POWER--------------------
-
-        # additional input power from positive flexibility
-        # the increasing part has to be retransformed to the 'other' side of the comp becaus ethat ois  the value that is really increased
-        # the decreasing part stays the same because the input flow of this flex type comes from the grid
-        max_input_1 = input_flows + self.flex_pos_inc_old - pos_flex_dec
-        # the decreasing part has to be retransformed to the 'other' side of the comp becaus ethat ois  the value that is really increased
-        # the increasing part stays the same because the input flow of this flex type comes from the grid
-        # max_input_2 = neg_flex_inc.combine(input_flows - self.flex_neg_dec_old,  max)  # additional input power from negative flexibility
-        max_input_2 = input_flows + neg_flex_inc - self.flex_neg_dec_old  # additional input power from negative flexibility
-
-
-        excess_input_1 = max_input_1 - power_limit
-        excess_input_2 = max_input_2 - power_limit
-        excess_input_1.loc[excess_input_1 < 0] = 0
-        excess_input_2.loc[excess_input_2 < 0] = 0
-
-        # adjust the flexibility values
-        # the energy values are not affected because they are still availabele even if inter_comps can not handle the amount
-        trans_excess_1 = excess_input_1 * self.efficiency
-        trans_excess_1.loc[trans_excess_1 < 0] = 0
-
-        self.temp_flex['flex_pos_inc'] -= trans_excess_1  # excess has to be transformed to inv output value
-        self.temp_flex.loc[self.temp_flex['flex_pos_inc'] < 0, 'flex_pos_inc'] = 0
-
-        self.temp_flex['flex_neg_inc'] -= excess_input_2
-        self.temp_flex.loc[self.temp_flex['flex_neg_inc'] < 0, 'flex_neg_inc'] = 0
-
-        self.temp_flex['flex_pos'] = self.temp_flex['flex_pos_inc'] + self.temp_flex['flex_pos_dec']
-        self.temp_flex['flex_neg'] = self.temp_flex['flex_neg_inc'] + self.temp_flex['flex_neg_dec']
-
-    def adjust_flex_with_efficiency(self, results, dynamic):
-
-        # ToDo: Is this correct? Efficiency effects energy as well because in the end the community has to know how much
-        # energy is still available after each flex activation. e.g.: If the efficiency would not affect the energy,
-        # the community would always underestimate the effect of a positive flexibility activation on the energy amount
-        # left
-
-        # an electroyzer does just offer negative increasing flex or positive decreasing
-        self.flex_pos_inc_old = self.temp_flex['flex_pos_inc'].copy() * 0
-        self.flex_neg_dec_old = self.temp_flex['flex_neg_dec'].copy() * 0
-
-        # ------------EFFICIENCY--------------
-        # the idea here is that the flexibility that comes out of the inverter is
-        # related to the efficiency of the inverter.
-        # So considering the power flow of the flexibility type,
-        # the net flexibility gets higher or lower compared to the flexibility at the origin component
-
-        # ------POSITIVE--------------
-        # increasing positive flex means lower flex at grid
-        self.temp_flex['flex_pos_inc'] = self.temp_flex['flex_pos_inc'] * 0
-        # decreasing positive flex means higher flex at grid
-        self.temp_flex['flex_pos_dec'] = self.temp_flex['flex_pos_dec'] / self.efficiency
-
-        self.temp_flex['flex_pos'] = self.temp_flex['flex_pos_inc'] + self.temp_flex['flex_pos_dec']
-
-        # ------NEGATIVE--------------
-        # increasing negative flex means higher flex at grid
-        self.temp_flex['flex_neg_inc'] = self.temp_flex['flex_neg_inc'] / self.efficiency
-        # decreasing neg flex means lower flex at grid
-        self.temp_flex['flex_neg_dec'] = self.temp_flex['flex_neg_dec'] * 0
-
-        self.temp_flex['flex_neg'] = (self.temp_flex['flex_neg_inc'] + self.temp_flex['flex_neg_dec'])
-
-    def calc_correction_factors(self, time_steps, c_f_df, results):
-
-        c_f_cha = pd.Series(data=self.efficiency, index=time_steps)
-        c_static_cha = pd.Series(data=0, index=time_steps)
-
-        c_f_df['c_f_cha'] = c_f_cha * c_f_df['c_f_cha']
-        c_f_df['c_static_cha'] = c_static_cha + c_f_df['c_static_cha']
-
-        return c_f_df
diff --git a/Component/model/hydrogen_components/electrolyzers/__init__.py b/Component/model/hydrogen_components/electrolyzers/__init__.py
deleted file mode 100644
index 6e130b539a109b769fbbe168bab20bf466b0b4a0..0000000000000000000000000000000000000000
--- a/Component/model/hydrogen_components/electrolyzers/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by mce on 06.01.2020.
-
-from .PEMElectrolyzer import PEMElectrolyzer
diff --git a/Component/model/hydrogen_components/fuel_cells/PEMFuelCell.py b/Component/model/hydrogen_components/fuel_cells/PEMFuelCell.py
deleted file mode 100644
index e68007e8331a846938ca5c7ec69ea68f1e56ca6f..0000000000000000000000000000000000000000
--- a/Component/model/hydrogen_components/fuel_cells/PEMFuelCell.py
+++ /dev/null
@@ -1,171 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseComponent import BaseComponent
-
-import warnings
-import pyomo.environ as pyo
-import json
-import os
-import pandas as pd
-
-class PEMFuelCell(BaseComponent):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='PEMFuelCell',
-                         commodity_1='hydrogen',
-                         commodity_2=None,
-                         commodity_3=ComponentCommodity.ELECTRICITY,
-                         commodity_4=ComponentCommodity.HEAT,
-                         configuration=configuration,
-                         model_directory=model_directory,
-                         dynamic=dynamic)
-
-        with open((os.path.join(model_directory, configuration['type'], configuration['model'] + '.json'))) as f:
-            model = json.load(f)
-        if 'elec_eff' in model:
-            self.elec_eff = self.model['elec_eff']
-        else:
-            warnings.warn(f"No column for elec_eff for component {self.name}!")
-        if 'heat_eff' in model:
-            self.heat_eff = self.model['heat_eff']
-        else:
-            warnings.warn(f"No column for heat_eff for component {self.name}!")
-    
-    def get_input_output_variable_names(self):
-        return [((self.name, 'input_1'), 'T'), ((self.name, 'output_1'), 'T'), ((self.name, 'output_2'), 'T')]
-
-    def add_input_output_variables(self, model, prefix):
-        model.add(prefix + ('input_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_1',), pyo.Var(model.T, bounds=(0, None)))
-
-        model.add(prefix + ('output_2',), pyo.Var(model.T, bounds=(0, None)))
-
-    def _constraint_conser(self, model, prefix, configuration):
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_1',)][t] == model.component_dict[prefix + ('input_1',)][t] * self.elec_eff
-        model.add(prefix + ('conser_elec',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[prefix + ('output_2',)][t] == model.component_dict[prefix + ('input_1',)][t] * self.heat_eff
-        model.add(prefix + ('conser_heat',), pyo.Constraint(model.T, rule = rule))
-
-    # Flexibility stuff
-
-    def check_limits(self, input_flows, output_flows, results, dynamic):
-        """
-        calculate the consequences of maximal positive and negative flexibility. Positive flexibility can
-        increase  (pos_flex_inc) and decrease (pos_flex_dec) the inputs of an Inverter.
-        The negative flexibility increases/decreases the outputs of the Inverter.
-        Therefore, they have to be transformed to input values.
-        A
-        Parameters
-        ----------
-        path_comps
-        comp
-
-        Returns
-        -------
-        diff --> the differences between maximal input flows from flexibility usage and power limit
-        """
-        # flexibility that was already translated to the input side by def adjust_with_efficiency
-        pos_flex_dec = self.temp_flex['flex_pos_dec'][dynamic.time_steps()]
-        neg_flex_inc = self.temp_flex['flex_neg_inc'][dynamic.time_steps()]
-
-        power_limit = 1 * results[(self.name, 'capacity')]  # excluding the excess power for safety
-
-        # ----------------CHECK POWER--------------------
-
-        # additional input power from positive flexibility
-        # the increasing part has to be retransformed to the 'other' side of the comp becaus ethat ois  the value that is really increased
-        # the decreasing part stays the same because the input flow of this flex type comes from the grid
-        max_input_1 = input_flows + self.flex_pos_inc_old - pos_flex_dec
-        # the decreasing part has to be retransformed to the 'other' side of the comp becaus ethat ois  the value that is really increased
-        # the increasing part stays the same because the input flow of this flex type comes from the grid
-        # max_input_2 = neg_flex_inc.combine(input_flows - self.flex_neg_dec_old,  max)  # additional input power from negative flexibility
-        max_input_2 = input_flows + neg_flex_inc - self.flex_neg_dec_old  # additional input power from negative flexibility
-
-
-        excess_input_1 = max_input_1 - power_limit
-        excess_input_2 = max_input_2 - power_limit
-        excess_input_1.loc[excess_input_1 < 0] = 0
-        excess_input_2.loc[excess_input_2 < 0] = 0
-
-        # adjust the flexibility values
-        # the energy values are not affected because they are still availabele even if inter_comps can not handle the amount
-        trans_excess_1 = excess_input_1 * self.elec_eff
-        trans_excess_1.loc[trans_excess_1 < 0] = 0
-
-        self.temp_flex['flex_pos_inc'] -= trans_excess_1  # excess has to be transformed to inv output value
-        self.temp_flex.loc[self.temp_flex['flex_pos_inc'] < 0, 'flex_pos_inc'] = 0
-
-        self.temp_flex['flex_neg_inc'] -= excess_input_2
-        self.temp_flex.loc[self.temp_flex['flex_neg_inc'] < 0, 'flex_neg_inc'] = 0
-
-        self.temp_flex['flex_pos'] = self.temp_flex['flex_pos_inc'] + self.temp_flex['flex_pos_dec']
-        self.temp_flex['flex_neg'] = self.temp_flex['flex_neg_inc'] + self.temp_flex['flex_neg_dec']
-
-    def adjust_flex_with_efficiency(self, results, dynamic):
-        # ToDo: Is this correct? Efficiency effects energy as well because in the end the community has to know how much
-        # energy is still available after each flex activation. e.g.: If the efficiency would not affect the energy,
-        # the community would always underestimate the effect of a positive flexibility activation on the energy amount
-        # left
-
-        self.flex_pos_inc_old = self.temp_flex['flex_pos_inc'].copy()
-        self.flex_neg_dec_old = self.temp_flex['flex_neg_dec'].copy()
-
-        # ------------EFFICIENCY--------------
-        # the idea here is that the flexibility that comes out of the inverter is
-        # related to the efficiency of the inverter.
-        # So considering the power flow of the flexibility type,
-        # the net flexibility gets higher or lower compared to the flexibility at the origin component
-
-        # ------POSITIVE--------------
-        # increasing positive flex means lower flex at grid
-        self.temp_flex['flex_pos_inc'] = self.temp_flex['flex_pos_inc'] * self.elec_eff
-        # decreasing positive flex means higher flex at grid
-        self.temp_flex['flex_pos_dec'] = self.temp_flex['flex_pos_dec'] / self.elec_eff
-
-        self.temp_flex['flex_pos'] = self.temp_flex['flex_pos_inc'] + self.temp_flex['flex_pos_dec']
-
-        # ------NEGATIVE--------------
-        # increasing negative flex means higher flex at grid
-        self.temp_flex['flex_neg_inc'] = self.temp_flex['flex_neg_inc'] / self.elec_eff
-        # decreasing neg flex means lower flex at grid
-        self.temp_flex['flex_neg_dec'] = self.temp_flex['flex_neg_dec'] * self.elec_eff
-
-        self.temp_flex['flex_neg'] = (self.temp_flex['flex_neg_inc'] + self.temp_flex['flex_neg_dec'])
-
-    def calc_correction_factors(self, time_steps, c_f_df, results):
-
-        c_f_dch = pd.Series(data=1/self.elec_eff, index=time_steps)
-        c_static_dch = pd.Series(data=0, index=time_steps)
-
-        c_f_df['c_f_dch'] = c_f_dch * c_f_df['c_f_dch']
-        c_f_df['c_static_dch'] = c_static_dch + c_f_df['c_static_dch']
-
-        return c_f_df
diff --git a/Component/model/hydrogen_components/fuel_cells/__init__.py b/Component/model/hydrogen_components/fuel_cells/__init__.py
deleted file mode 100644
index da4858511487455d3e1c67d8c895786d5501f77b..0000000000000000000000000000000000000000
--- a/Component/model/hydrogen_components/fuel_cells/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by mce on 06.01.2020.
-
-from .PEMFuelCell import PEMFuelCell
diff --git a/Component/model/hydrogen_components/storages/PressureStorage.py b/Component/model/hydrogen_components/storages/PressureStorage.py
deleted file mode 100644
index 9197a7ef4564f335f10791213bdebcfc3a0ce01c..0000000000000000000000000000000000000000
--- a/Component/model/hydrogen_components/storages/PressureStorage.py
+++ /dev/null
@@ -1,128 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-from Model_Library.Component.model.BaseStorage import BaseStorage
-
-import pandas as pd
-
-class PressureStorage(BaseStorage):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type='PressureStorage',
-                         commodity='hydrogen',
-                         configuration=configuration,
-                         model_directory=model_directory)
-
-    # Flexibility stuff
-
-    def calc_flex_comp(self, results, dynamic, init_results):
-        """
-        #TODO in what way is this method different from calc_flex_comp in BaseStoreage
-        This method calculates the maximal possible flexibility from the battery.
-        This is done by comparing two values:
-            1) The maximal possible CHA/DCH power
-            2) The maximal allowed CHA/DCH power regarding the available power BEFORE the PLANNED CHA/DCH
-        The minimal value is chosen to be the maximal possible CHA/DCH power. To claculate the available flexibility from this,
-        the PLANNED CHA/DCH rates are added/substracted depending on the type of flexibility.
-        E.g. positive flex: the current DCH power is substracted and the current CHA power is added
-        Parameters
-        ----------
-        flows
-        results
-        T
-
-        Returns
-        -------
-
-        """
-
-        cap_storage = results[(self.name, 'capacity')]  # the total theoretical capacity of the comm_batt
-        e_storage = results[(self.name, 'energy')]
-
-
-        # maximal in/out powers that the comm_batt is capable of
-        max_power_out = cap_storage * self.e2p_out
-        max_power_in = cap_storage * self.e2p_in
-
-        # get the total output/input powers
-        total_dch = pd.Series(data=0.0, index=dynamic.time_steps())
-        total_dch += results[(self.name, 'output_1')] #  self.output_efficiency #here not necessary
-
-        total_cha = pd.Series(data=0.0, index=dynamic.time_steps())
-        total_cha += results[(self.name, 'input_1')] # self.input_efficiency
-
-        # these are the power values that are theoretically available
-        # for flexibility through CHA and DCH (missing curtailment)
-        max_power_dch = max_power_out
-        max_power_cha = max_power_in
-
-        # now the energy restriction is calculated
-        # ToDO @jbr: in the future here should be an extra restriction if some SOE values
-        #  have to be reached in future timesteps
-        e_cha = pd.Series(index=dynamic.time_steps())
-        e_dch = pd.Series(index=dynamic.time_steps())
-
-        for t in dynamic.time_steps():
-            if total_dch[t] > 0:
-                e_dch[t] = (e_storage[t] - 0.2 * cap_storage[t])
-            else:
-                e_dch[t] = 0
-
-            if total_cha[t] > 0:
-                e_cha[t] = (0.8 * cap_storage[t] - e_storage[t] )
-            else:
-                e_cha[t] = 0
-
-
-        for t in dynamic.time_steps():
-            if e_dch[t] < -1 * e_cha[t]:
-                print('Fehler zum Zeitpunkt ' + str(t))
-
-        # problem with these ones is: a negative energy means that the energy level is in "restricted" levels.
-        # This may lead to infeasailities as this constraint requires the Prosumer to provide charge/discharge
-        # even if this is not possible. But this should be correct
-
-        #e_cha.loc[e_cha < 0] = 0
-        #e_dch.loc[e_dch < 0] = 0
-
-        # pos flex:
-        self.temp_flex['flex_pos_inc'] = max_power_dch - total_dch
-        #self.temp_flex['flex_pos_inc'].loc[self.temp_flex['flex_pos_inc'] < 0] = 0
-        self.temp_flex.loc[self.temp_flex['flex_pos_inc'] < 0, 'flex_pos_inc'] = 0
-        self.temp_flex['flex_pos_dec'] = total_cha
-        self.temp_flex['flex_pos'] = max_power_dch - total_dch + total_cha
-        self.temp_flex.loc[self.temp_flex['flex_pos'] < 0, 'flex_pos'] = 0
-        #self.temp_flex['flex_pos'].loc[self.temp_flex['flex_pos'] < 0] = 0
-        self.temp_flex['e_dch'] = e_dch
-
-        # neg flex: negative signs due to negative flexibility
-        self.temp_flex['flex_neg_inc'] = max_power_cha - total_cha
-        self.temp_flex.loc[self.temp_flex['flex_neg_inc'] < 0, 'flex_neg_inc'] = 0
-        #self.temp_flex['flex_neg_inc'].loc[self.temp_flex['flex_neg_inc'] < 0] = 0
-        self.temp_flex['flex_neg_dec'] = total_dch
-        self.temp_flex['flex_neg'] = max_power_cha - total_cha + total_dch
-        self.temp_flex.loc[self.temp_flex['flex_neg'] < 0, 'flex_neg'] = 0
-        #self.temp_flex['flex_neg'].loc[self.temp_flex['flex_neg'] < 0] = 0
-        self.temp_flex['e_cha'] = e_cha
diff --git a/Component/model/hydrogen_components/storages/__init__.py b/Component/model/hydrogen_components/storages/__init__.py
deleted file mode 100644
index da537c91dff996ee509b00c0b4d26e22f3653b55..0000000000000000000000000000000000000000
--- a/Component/model/hydrogen_components/storages/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by kss on 13.01.2021.
-
-from .PressureStorage import PressureStorage
diff --git a/District/__init__.py b/District/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/District/main.py b/District/main.py
deleted file mode 100644
index 73cd0fd63fcc1e5829502185ac0ce0fbb86471d0..0000000000000000000000000000000000000000
--- a/District/main.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import Model_Library.District.model.Community as Community
-
-class DistrictMain:
-    def __init__(self, configuration, prosumers, district_assets, input_profiles, dynamic):
-        # Create district object
-        self.district = Community('community', configuration['community'], prosumers, district_assets, input_profiles, dynamic)
-
-    def optimize_sizing(self, key, district_sizing_strategy):
-        self.district.optimize_sizing(key, district_sizing_strategy)
-
-    def optimize_operation(self, key, district_operation_strategy):
-        self.district.optimize_operation(key, district_operation_strategy)
-
-    def save_results(self):
-        self.district.analyze_results()
-        self.district.save_results()
diff --git a/District/model/Community.py b/District/model/Community.py
deleted file mode 100644
index 9389ba8992ce22d01fe8489e5424901e85966e11..0000000000000000000000000000000000000000
--- a/District/model/Community.py
+++ /dev/null
@@ -1,357 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import pandas as pd
-import os
-from datetime import timedelta
-from Model_Library.Component.model.EMS_components.Coordinator import implement_sizing_strategy
-from Model_Library.OptimizationModel import OptimizationModel, EntityResult
-from Tooling.dynamics.Dynamic import resample
-from Model_Library.flexibility import flexibility_activation
-
-class Community:
-
-    def __init__(self, name, configuration, prosumers, community_assets, profiles, dynamic):
-
-        self.name = name
-        self.configuration = configuration
-        self.prosumers = prosumers
-        self.community_assets = community_assets
-
-        wholesale_price = resample(profiles[self.configuration['wholesale_price']][0], profiles[self.configuration['wholesale_price']][1], dynamic)
-        self.configuration['elec_price_int'] = (wholesale_price + self.configuration['network_usage_energy_fee'] + self.configuration['levies_int'] + self.configuration['concession'] + self.configuration['electricity_tax_int']) * (1 + self.configuration['VAT'])
-        self.configuration['elec_price_ext'] = (wholesale_price + self.configuration['network_usage_energy_fee'] + self.configuration['levies_ext'] + self.configuration['concession'] + self.configuration['electricity_tax_ext']) * (1 + self.configuration['VAT'])
-        self.configuration['injection_price'] = resample(profiles[self.configuration['injection_price']][0], profiles[self.configuration['injection_price']][1], dynamic)
-        
-        self.dynamic = dynamic
-
-        self.result = {'initial': EntityResult(dynamic, [('agg_balance', 'T'), ('agg_export', 'T'), ('agg_import', 'T'), ('internal_exchange', 'T')])}
-
-        agg_export = pd.Series(0.0, index=dynamic.time_steps())
-        agg_import = pd.Series(0.0, index=dynamic.time_steps())
-        for ps in self.prosumers.values():
-            ps_data = ps.get_export_import(ps._result[ps._last_result_key], self.dynamic)
-            for ps_export, ps_import in ps_data.values():
-                agg_export += ps_export
-                agg_import += ps_import
-        self.result['initial']['agg_balance'] = agg_export - agg_import
-        self.result['initial']['agg_export'] = agg_export
-        self.result['initial']['agg_import'] = agg_import
-        self.result['initial']['internal_exchange'] = pd.concat([agg_export, agg_import], axis=1).min(axis=1)
-        self.last_result_key = 'initial'
-
-    def optimize_sizing(self, key, strategy_name):
-        if not self.community_assets:
-            return
-        """
-        Sizing the community assets.
-        1) All Prosumer objects containing CAs are iteratively called and
-        an aggregated mathematical model is build. This aggregated model is necessary because the CAs do not act
-        independently as a prosumer but collectively.
-        2) pass the CA strategy to the coordinator so it can implment the objective function
-        3) start optimization
-        4) add information to community data
-        5) update data for coordinator
-
-        Parameters
-        ----------
-        time_steps: time steps of the whole time horizon
-        """
-
-        self.build_sizing_model(strategy_name)
-
-        options = dict()
-        options['MIPGap'] = 0.02
-        options['Presolve'] = 2
-        options['TimeLimit'] = 200
-
-        self._model_sizing.solve(options, False)
-
-        if self._model_sizing.is_ok():
-            # add the result to the community assets
-            for ca_name, ca in self.community_assets.items():
-                # no resampling necessary, because the model has the same dynamic as the prosumer
-                ca._result[key] = ca.get_empty_entity_result()
-                ca._result[key].extract_results_from_model(self._model_sizing.blocks[(ca_name,)])
-                ca._last_result_key = key
-        else:
-            print('ERROR: The model is infeasible or unbounded: no optimal solution found')
-
-        self.result[key] = EntityResult(self.dynamic, [('agg_balance', 'T'), ('agg_export', 'T'), ('agg_import', 'T'), ('internal_exchange', 'T')])
-
-        agg_export = self.result[self.last_result_key]['agg_export'].copy()
-        agg_import = self.result[self.last_result_key]['agg_import'].copy()
-        for ca in self.community_assets.values():
-            ca_data = ca.get_export_import(ca._result[key], self.dynamic)
-            for ca_export, ca_import in ca_data.values():
-                agg_export += ca_export
-                agg_import += ca_import
-        self.result[key]['agg_balance'] = agg_export - agg_import
-        self.result[key]['agg_export'] = agg_export
-        self.result[key]['agg_import'] = agg_import
-        self.result[key]['internal_exchange'] = pd.concat([agg_export, agg_import], axis=1).min(axis=1)
-        self.last_result_key = key
-
-    def build_sizing_model(self, strategy_name):
-        self._model_sizing = OptimizationModel(self.name, self.dynamic)
-
-        for ca in self.community_assets.values():
-
-            ca.build_sizing_model(self._model_sizing)
-
-        implement_sizing_strategy(self, self._model_sizing, strategy_name)
-
-    def optimize_operation(self, key, community_operation_strategy):
-        flexibility_activation(self, key, community_operation_strategy)
-        return
-
-    def analyze_results(self):
-        """
-        Analyze community results with economic and technical indices.
-        """
-
-        # The factor that converts the simulation to ONE year
-        annual_factor = timedelta(days=365) / timedelta(hours=sum(self.dynamic.step_size_p(position) for position in range(self.dynamic.number_of_steps())))
-
-        self.analysis = dict()
-        self.analysis['economical'] = dict()
-        self.analysis['technical'] = dict()
-        self.annuity_dict = dict()
-
-        for key in self.result:
-            print('---------------------WELFARE ' + key + '---------------------')
-
-            community_import = self.result[key]['agg_import'] - self.result[key]['internal_exchange']
-            community_export = self.result[key]['agg_export'] - self.result[key]['internal_exchange']
-            total_community_import = sum(community_import)
-            peak_community_import = community_import.max()
-            peak_community_import_costs = peak_community_import * self.configuration['network_usage_capacity_fee']
-
-            try:
-                t_fullload = total_community_import / peak_community_import
-                print('Fullload hours ' + key + ': ' + str(t_fullload))
-            except ZeroDivisionError:
-                t_fullload = float('inf') # to get the higher price
-
-            internal_costs = self.result[key]['internal_exchange'] * self.dynamic.step_sizes() * self.configuration['elec_price_int'].values
-            external_costs = community_import * self.dynamic.step_sizes() * self.configuration['elec_price_ext'].values
-
-            internal_revenue = self.result[key]['internal_exchange'] * self.dynamic.step_sizes() * self.configuration['injection_price'].values
-            external_revenue = community_export * self.dynamic.step_sizes() * self.configuration['injection_price'].values
-
-            inv_costs = 0
-            if len(self.community_assets) > 0 and key != 'initial':
-                for var in self._model_sizing.component_dict:
-                    if len(var) == 3 and var[0] in self.community_assets.keys() and var[2] == 'capital_cost':
-                        inv_costs += self._model_sizing.component_dict[var].value
-
-            annuity = -inv_costs + (internal_revenue.sum() + external_revenue.sum() - internal_costs.sum() - external_costs.sum()) * annual_factor - peak_community_import_costs
-
-            self.analysis['economical'][key] = {'inv costs': inv_costs,
-                                                'peak power costs': peak_community_import_costs,
-                                                'internal costs': internal_costs.sum(),
-                                                'external costs': external_costs.sum(),
-                                                'internal revenue': internal_revenue.sum(),
-                                                'external revenue': external_revenue.sum(),
-                                                'annuity': annuity}
-
-            print('--------------------TECHNICAL ' + key + '------------------------------')
-
-            total_community_export = community_export.sum()
-            peak_community_export = community_export.max()
-            try:
-                energy_balance_index = 1 - (total_community_import + total_community_export) / \
-                                       (self.result[key]['agg_import'].sum() + self.result[key]['agg_export'].sum())
-
-                self_sufficiency_index = 1 - (total_community_import / self.result[key]['agg_import'].sum())  # same as dividing internal exchange with aggregated demand
-            except ZeroDivisionError:
-                energy_balance_index = float('inf')
-                self_sufficiency_index = float('inf')
-            try:
-                self_cons_index = (self.result[key]['internal_exchange'].sum() / self.result[key]['agg_export'].sum())
-            except ZeroDivisionError:
-                self_cons_index = float('inf')
-            try:
-                Peak2AVG_ext_demand = peak_community_import / community_import.mean()
-            except ZeroDivisionError:
-                Peak2AVG_ext_demand = 0
-            try:
-                Peak2AVG_ext_supply = peak_community_export / community_export.mean()
-            except ZeroDivisionError:
-                Peak2AVG_ext_supply = 0
-
-            self.analysis['technical'][key] = {'full load hours': t_fullload,
-                                               'external_demand': community_import.sum(),
-                                               'external_supply': community_export.sum(),
-                                               'internal_exchange': sum(self.result[key]['internal_exchange']),
-                                               #'CO2 emissions': external_demand.sum() * self.configuration['elec_emission'],
-                                                'peak_ext_demand': peak_community_import,
-                                                'peak_ext_supply': peak_community_export,
-                                                'energy_balance_index': energy_balance_index,
-                                                'self_suff_index': self_sufficiency_index,
-                                                'self_cons_index': self_cons_index,
-                                               'Peak2AVG_ext_demand': Peak2AVG_ext_demand,
-                                               'Peak2AVG_ext_supply': Peak2AVG_ext_supply}
-
-        """--------------------ANALYZE PROSUMER ANNUITIES--------------------------------------"""
-        for key in self.result.keys():
-            if key == 'validated':
-                df = True
-            else:
-                df = False
-
-            cint_buy, cint_sell = self.calc_internal_prices(
-                self.result[key]['agg_export'],
-                self.result[key]['agg_import'],
-                self.configuration['injection_price'].values,
-                self.configuration['elec_price_int'].values,
-                self.configuration['elec_price_ext'].values)
-
-            injection_price = self.configuration['injection_price'].values
-            ca_profit_df = 0
-            ca_profit_agg = 0
-            # CA
-            for ps_name, ps in self.community_assets.items():
-                if ps_name not in self.annuity_dict.keys():
-                    self.annuity_dict[ps_name] = dict()
-
-                a1,a2 = ps.calc_annuity(cint_buy, cint_sell,df)
-
-                if df:
-                    ca_profit_df = a2
-                    self.annuity_dict[ps_name]['validated'] = a2
-                else:
-                    ca_profit_agg = a2
-                    self.annuity_dict[ps_name]['initial'] = 0
-                    self.annuity_dict[ps_name]['CA'] = a2
-
-            for ps_name, ps in self.prosumers.items():
-                if ps_name not in self.annuity_dict.keys():
-                    self.annuity_dict[ps_name] = dict()
-                a1,a2 = ps.calc_annuity(cint_buy, cint_sell,df)
-                if df:
-                    self.annuity_dict[ps_name]['validated'] = a2 - (inv_costs - ca_profit_df)/len(self.prosumers)
-                else:
-                    self.annuity_dict[ps_name]['initial'] = a1
-                    self.annuity_dict[ps_name]['CA'] = a2 - (inv_costs - ca_profit_agg)/len(self.prosumers)
-
-        return
-
-    def calc_internal_prices(self, supply, demand, sell, p_int, p_ext, price_mech ='split'):
-        """
-        Calculating the internal prices for covering the communities internal demand with overproduction.
-        SDR price:  from "Evaluation of peer-to-peer energy sharing mechanisms based on a
-                   multiagent simulation framework" Zhou 2018
-                   --> depends on the ratio between demand and supply
-        Split:  Bill sharing from "Evaluation of peer-to-peer energy sharing mechanisms based on a
-                   multiagent simulation framework" Zhou 2018
-        Parameters
-        ----------
-        supply: amount of energy supplied to the grid in every time step
-        demand: amount of energy demanded by the generation in every time step
-        sell: remuneration for selling electricity
-        p_int: price for buying electricity internally
-        p_ext: price for buying electricity externally
-        price_mech: adapted price_mech
-
-        Returns:
-        ----------
-        price_internal_buy: new price for buying electricity
-        price_internal_sell: new remuneration for selling electricity
-        """
-
-        if price_mech == 'SDR':
-
-            price_internal_sell = pd.Series(data=0.0, index=demand.index)
-            price_internal_buy = pd.Series(data=0.0, index=demand.index)
-
-            sd_ratio = pd.Series(data=2.0, index=demand.index)
-            sd_ratio.loc[demand > 0] = supply.loc[demand > 0] / demand.loc[demand > 0]
-
-
-            for t in demand.index:
-
-                if sd_ratio[t] < 1:
-                    price_internal_sell[t] = (self.configuration['elec_price_ext_low'][t] * self.configuration['injection_price'].values[t]) \
-                                             / ((self.configuration['elec_price_ext_low'][t] - self.configuration['injection_price'].values[t]) * sd_ratio[t] + self.configuration['injection_price'].values[t])
-
-                    price_internal_buy[t] = price_internal_sell[t] * sd_ratio[t] \
-                                            + self.configuration['elec_price_ext_low'][t] * (1 - sd_ratio[t])
-                else:
-                    price_internal_sell[t] = self.configuration['injection_price'].values[t]
-                    price_internal_buy[t] = self.configuration['injection_price'.values][t]
-        elif price_mech == 'split':
-            price_internal_sell = pd.Series(data=0.0, index=demand.index)
-            price_internal_buy = pd.Series(data=0.0, index=demand.index)
-            for t in demand.index:
-                p_avg = (p_ext[t] - p_int[t])/2
-
-                if demand[t] <= supply[t] and supply[t] > 0:
-                    price_internal_sell[t] = (demand[t] * (sell[t] + p_avg) + (supply[t] - demand[t]) * sell[t]) / supply[t]
-                    price_internal_buy[t] = p_int[t] + p_avg
-
-                elif demand[t] >= supply[t] and demand[t] > 0:
-                    price_internal_buy[t] = (supply[t] * (p_int[t] + p_avg) + (demand[t] - supply[t]) * p_ext[t]) / demand[t]
-                    price_internal_sell[t] = sell[t] + p_avg
-
-                else:
-                    price_internal_sell[t] = 5000
-                    price_internal_buy[t] = 5000
-
-        return price_internal_buy, price_internal_sell
-
-    def save_results(self):
-        if not os.path.exists('output_files/'):
-            os.makedirs('output_files/')
-        if not os.path.exists('output_files/' + self.name + '/'):
-            os.makedirs('output_files/' + self.name + '/')
-        
-        for ps in self.prosumers.values():
-            with pd.ExcelWriter('output_files/' + self.name + '/results_' + str(ps._name) + '.xlsx') as writer:
-                for key, result in ps._result.items():
-                    result.to_excel(writer, sheet_name=key)
-        
-        for ca in self.community_assets.values():
-            with pd.ExcelWriter('output_files/' + self.name + '/results_' + str(ca._name) + '.xlsx') as writer:
-                for key, result in ca._result.items():
-                    result.to_excel(writer, sheet_name=key)
-
-        p_int = self.configuration['elec_price_int'].values
-        p_inj = self.configuration['injection_price'].values
-        p_ext = self.configuration['elec_price_ext'].values
-
-        prices = pd.DataFrame({'internal buy': p_int,  'sell': p_inj, 'external buy': p_ext})
-
-        annuities_df = pd.DataFrame(self.annuity_dict)
-        analysis_eco = pd.DataFrame(self.analysis['economical'])
-        analysis_tech = pd.DataFrame(self.analysis['technical'])
-
-        with pd.ExcelWriter('output_files/' + self.name + '/results_' + self.name + '.xlsx') as writer:
-            prices.to_excel(writer, sheet_name='Prices')
-            annuities_df.to_excel(writer, sheet_name='PS-Annuities')
-            for key, value in self.result.items():
-                value.to_excel(writer, sheet_name=str(key))
-
-            analysis_eco.to_excel(writer, sheet_name='Analysis economical')
-            analysis_tech.to_excel(writer, sheet_name='Analysis technical')
diff --git a/District/model/__init__.py b/District/model/__init__.py
deleted file mode 100644
index 44df72658416ed7ebeb9dcfd53aa29e4086fd0c0..0000000000000000000000000000000000000000
--- a/District/model/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .Community import Community
diff --git a/OptimizationModel.py b/OptimizationModel.py
deleted file mode 100644
index da4c154268aae9df0daec2bcafee731032be7e90..0000000000000000000000000000000000000000
--- a/OptimizationModel.py
+++ /dev/null
@@ -1,185 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import pyomo.environ as pyo
-from pyomo.opt import SolverStatus, TerminationCondition
-import numpy as np
-import pandas as pd
-from Tooling.dynamics.Dynamic import resample_variable
-    
-class OptimizationBlock:
-    def __init__(self, name, dynamic, type = pyo.Block):
-        self.name = name
-        self.dynamic = dynamic
-        self.time_steps = list(dynamic.time_steps())
-
-        if type == pyo.Block:
-            self.block = type()
-        else:
-            self.block = type(name)
-
-        self.block.T = pyo.Set(initialize = self.time_steps, ordered = True)
-        self.T = self.block.T
-
-        self.block.T_prime = pyo.Set(initialize = [-1] + self.time_steps, ordered = True)
-        self.T_prime = self.block.T_prime
-
-        self.component_dict = dict()
-        self.u_var = []
-        self.i_var = []
-        self.i_var_prime = []
-        self.objective_terms = []
-        self.blocks = dict()
-
-    def step_size(self, index):
-        return self.dynamic.step_size(index)
-
-    def add(self, name, component):
-        if not isinstance(component, OptimizationBlock):
-            self.block.add_component('_'.join(name), component)
-        else:
-            self.blocks[name] = component
-            self.block.add_component('_'.join(name), component.block)
-        if isinstance(component, pyo.Var):
-            self.component_dict[name] = component
-            if component.is_indexed() and component.index_set().local_name == 'T':
-                self.i_var.append(name)
-            elif not component.is_indexed():
-                self.u_var.append(name)
-            elif component.is_indexed() and component.index_set().local_name == 'T_prime':
-                self.i_var_prime.append(name)
-            else:
-                raise ValueError('A variable to be added can only be indexed over T, T_prime or be unindexed!')
-        if isinstance(component, pyo.Expression):
-            self.component_dict[name] = component
-        elif isinstance(component, pyo.Param):
-            self.component_dict[name] = component
-
-    def add_objective_term(self, term):
-        self.block.add_component('objective_' + str(len(self.objective_terms)), term)
-        self.objective_terms.append(term)
-
-    def lift_expression(self, name, other, other_expression):
-        self.add(name, resample_variable(other_expression, other.dynamic, self.dynamic, self.T))
-
-    # assumes that the given values have the same dynamic as the block
-    def set_value(self, name, values):
-        component = self.component_dict[name]
-        if isinstance(component, pyo.Param):
-            for t in component.index_set():
-                self.component_dict[name][t] = values[t]
-
-    def __getitem__(self, var_name):
-        if var_name in self.u_var:
-            return self.component_dict[var_name].value
-        elif var_name in self.i_var:
-            values = self.component_dict[var_name].get_values()
-            return pd.Series(data=(values[t] for t in self.T), index=self.T.ordered_data())
-        elif var_name in self.i_var_prime:
-            values = self.component_dict[var_name].get_values()
-            return pd.Series(data=(values[t] for t in self.T), index=self.T.ordered_data())
-        else:
-            None
-
-class OptimizationModel(OptimizationBlock):
-    def __init__(self, name, dynamic):
-        super().__init__(name, dynamic, pyo.ConcreteModel)
-
-    def solve(self, options, tee):
-        # glpk(bad for milp), cbc(good for milp), gurobi: linear, ipopt: nonlinear
-        # in order to install a new solver paste the .exe file in env. path 'C:\Users\User\anaconda3\envs\envINEED'
-        solver = pyo.SolverFactory('gurobi')
-        solver.options.update(options)
-        self.solver_result = solver.solve(self.block, tee = tee)
-
-    def is_ok(self):
-        return self.solver_result.solver.status == SolverStatus.ok and self.solver_result.solver.termination_condition == TerminationCondition.optimal
-
-class EntityResult:
-    def __init__(self, dynamic, variables):
-        self.dynamic = dynamic
-
-        u_var = []
-        i_var =  []
-        i_var_prime = []
-        self.var_to_res = dict()
-        for var, set in variables:
-            if set == None:
-                u_var.append(var)
-                self.var_to_res[var] = 'u_result'
-            elif set == 'T':
-                i_var.append(var)
-                self.var_to_res[var] = 'i_result'
-            elif set == 'T_prime':
-                i_var_prime.append(var)
-                self.var_to_res[var] = 'i_result_prime'
-                
-        self.u_result = pd.Series(index=u_var)
-
-        self.i_result = pd.DataFrame(index=dynamic.time_steps(), columns=i_var)
-
-        self.i_result_prime = pd.DataFrame(index=[-1] + list(dynamic.time_steps()), columns=i_var_prime)
-
-    def __getitem__(self, var_name):
-        if var_name in self.var_to_res:
-            res_name = self.var_to_res[var_name]
-            return getattr(self, res_name).get(var_name)
-        else:
-            return None
-
-    def __setitem__(self, var_name, value):
-        if var_name in self.var_to_res:
-            res_name = self.var_to_res[var_name]
-            getattr(self, res_name)[var_name] = value
-
-    def extract_results_from_model(self, model):
-        if self.dynamic != model.dynamic:
-            raise ValueError('Cannot extract results from a model into a EntitResult with a different dynamic!')
-        for var_name in self.u_result.index:
-            self.u_result[var_name] = pyo.value(model.component_dict[var_name])
-        for var_name in self.i_result.columns:
-            values = model.component_dict[var_name].get_values()
-            self.i_result[var_name] = np.array(list(values[t] for t in model.T))
-        for var_name in self.i_result_prime.columns:
-            values = model.component_dict[var_name].get_values()
-            self.i_result_prime[var_name] = np.array(list(values[t] for t in model.T_prime))
-
-    def extract_partial_results_from_model(self, model, i_start, i_end):
-        if self.dynamic.partial_dynamic(i_start, i_end) != model.dynamic:
-            raise ValueError('Cannot extract partial results from a model into a EntitResult with a different dynamic!')
-        for var_name in self.i_result.columns:
-            values = model.component_dict[var_name].get_values()
-            self.i_result[var_name][i_start:i_end] = np.array(list(values[t] for t in model.T))
-        for var_name in self.i_result_prime.columns:
-            values = model.component_dict[var_name].get_values()
-            self.i_result_prime[var_name][i_start+1:i_end+1] = np.array(list(values[t] for t in model.T))
-
-    def to_excel(self, excel_writer, **kwargs):
-        sheet_name = kwargs.get('sheet_name', '')
-        kwargs['sheet_name'] = 'unindexed' if sheet_name == '' else sheet_name + '_unindexed'
-        self.u_result.to_excel(excel_writer, **kwargs)
-        kwargs['sheet_name'] = 'T' if sheet_name == '' else sheet_name + '_T'
-        self.i_result.to_excel(excel_writer, **kwargs)
-        kwargs['sheet_name'] = 'T_prime' if sheet_name == '' else sheet_name + '_T_prime'
-        self.i_result_prime.to_excel(excel_writer, **kwargs)
diff --git a/Prosumer/__init__.py b/Prosumer/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/Prosumer/main.py b/Prosumer/main.py
deleted file mode 100644
index 38dc4b6b594ab90b1a8fc4c640325059dc094a12..0000000000000000000000000000000000000000
--- a/Prosumer/main.py
+++ /dev/null
@@ -1,94 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-# main.py is the central script to execute the flexible modelling of prosumers within this simulation tool.
-# For further Information please have a closer look at the documentation in "README.txt"
-# This work is published under the public license: XXX
-
-import Model_Library.Prosumer.model.Prosumer as Prosumer
-import Model_Library.Prosumer.model.DistrictAsset as DistrictAsset
-import Model_Library.Prosumer.model.BusChargingStation as BusChargingStation
-import Model_Library.Prosumer.model.BusChargingStationWithHPC as BusChargingStationWithHPC
-import Model_Library.Prosumer.model.OverheadLineForIMC as OverheadLineForIMC
-
-
-class ProsumerMain:
-    def __init__(self, configurations, input_profiles, dynamic):
-        self.prosumers = {}
-
-        for name, configuration in configurations.items():
-            # Create prosumer object
-            if configuration['type'].lower() == 'buschargingstation':
-                self.prosumers[name] = BusChargingStation(name, configuration, input_profiles, dynamic)
-            elif configuration['type'].lower() == 'buschargingstationwithhpc':
-                self.prosumers[name] = BusChargingStationWithHPC(name, configuration, input_profiles, dynamic)
-            elif configuration['type'].lower() == 'overheadlineforimc':
-                self.prosumers[name] = OverheadLineForIMC(name, configuration, input_profiles, dynamic)
-            else:
-                self.prosumers[name] = Prosumer(name, configuration, input_profiles, dynamic)
-
-    def optimize_sizing(self, key, prosumer_sizing_strategy):
-        # ToDo: callback, abbruchbedingung, maybe check the feasebility of the model before building,
-        #  häufige fehler bei der eingabe prüfen usw
-        for ps in self.prosumers.values():
-            ps.optimize_sizing(key, prosumer_sizing_strategy)
-
-    def pareto_analysis(self, prosumer_sizing_strategy_1, prosumer_sizing_strategy_2):
-        for ps in self.prosumers.values():
-            ps.pareto_analysis(prosumer_sizing_strategy_1, prosumer_sizing_strategy_2)
-
-    def save_results(self):
-        for ps in self.prosumers.values():
-            ps.save_results()
-
-
-class DistrictAssetMain:
-    def __init__(self, configurations, input_profiles, dynamic):
-        self.district_assets = {}
-
-        for name, configuration in configurations.items():
-            # Create district_asset object
-            self.district_assets[name] = DistrictAsset(name, configuration, input_profiles, dynamic)
-
-
-# class BusChargingStationMain:
-#     def __init__(self, configurations, input_profiles, t_horizon, t_step):
-#         self.bus_charging_stations = {}
-#
-#         for name, configuration in configurations.items():
-#             # Create prosumer object
-#             self.bus_charging_stations[name] = BusChargingStation(name, configuration, input_profiles, t_horizon, t_step)
-#
-#     def optimize_sizing(self, bus_charging_station_sizing_strategy):
-#         for bcs in self.bus_charging_stations.values():
-#             bcs.optimize_sizing(bus_charging_station_sizing_strategy)
-#
-#     def pareto_analysis(self, bus_charging_station_sizing_strategy_1, bus_charging_station_sizing_strategy_2):
-#         for bcs in self.bus_charging_stations.values():
-#             bcs.pareto_analysis(bus_charging_station_sizing_strategy_1, bus_charging_station_sizing_strategy_2)
-#
-#     def save_results(self):
-#         for bcs in self.bus_charging_stations.values():
-#             bcs.save_results()
-
diff --git a/Prosumer/model/BusChargingStation.py b/Prosumer/model/BusChargingStation.py
deleted file mode 100644
index a8eee5207c69e2b92ee193fa2630b1573f2b2255..0000000000000000000000000000000000000000
--- a/Prosumer/model/BusChargingStation.py
+++ /dev/null
@@ -1,296 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import Model_Library.Prosumer.model.Prosumer as Prosumer
-import pyomo.environ as pyo
-
-
-class BusChargingStation(Prosumer):
-    def __init__(self, name, configuration, profiles, dynamic):
-        super().__init__(name=name,
-                         configuration=configuration,
-                         profiles=profiles,
-                         dynamic=dynamic)
-
-        self.buses = {component_name: component for component_name, component in configuration['components'].items() if component['type'] == 'ElecBus'}
-        self.driving_consumptions = {component_name: component for component_name, component in configuration['components'].items() if component['type'] == 'DrivingConsumption'}
-
-        for bus, driving_cons in zip(self.buses.keys(), self.driving_consumptions.keys()):
-            profile_dict = configuration['components'][driving_cons]
-            self.buses[bus]['z_driving'] = profiles[profile_dict['consumption']][0].loc[:, 'Driving']
-            self.buses[bus]['z_connected_to_depot'] = profiles[profile_dict['consumption']][0].loc[:, 'Connected to depot']
-
-
-    def add_prosumer_variables(self, model, prefix = ()):
-        super().add_prosumer_variables(model, prefix)
-        #self.add_binary_flow_variables(model, prefix) # activate if m x n architecture (each bus can connect to every charging point)
-
-        for bus in self.get_buses():
-            model.add((bus.name, 'current_charge_power'), pyo.Var(model.T, bounds=(0,None)))
-
-            #model.add((bus.name, 'z_driving'), pyo.Var(model.T, within=pyo.Binary))
-            model.add((bus.name, 'z_driving'), pyo.Param(model.T, mutable=True))
-            model.set_value((bus.name, 'z_driving'), self.buses[bus.name]['z_driving'])
-
-            model.add((bus.name, 'z_connected_to_depot_check'), pyo.Var(model.T, within=pyo.Binary))
-            model.add((bus.name, 'z_connected_to_depot'), pyo.Param(model.T, mutable=True))
-            model.set_value((bus.name, 'z_connected_to_depot'), self.buses[bus.name]['z_connected_to_depot'])
-
-    # activate if m x n architecture (each bus can connect to every charging point)
-    def add_binary_flow_variables(self, model, prefix = ()):
-        for flow in self.get_depot_to_bus_flows():
-            model.add(prefix + ('z_' + flow,), pyo.Var(model.T, within=pyo.Binary))
-
-        for flow in self.get_bus_to_depot_flows():
-            model.add(prefix + ('z_' + flow,), pyo.Var(model.T, within=pyo.Binary))
-
-    def add_prosumer_constraints(self, model, prefix = ()):
-        super().add_prosumer_constraints(model, prefix)
-        self.add_equal_battery_sizing_constraints(model, prefix)
-        self.add_equal_depot_inverter_sizing_constraints(model, prefix)
-        self.add_depot_connection_constraints(model, prefix)
-        #self.add_charge_power_constraints(model, prefix) # activate if buses can switch charging points
-        #self.add_simultaneous_connect_constraints(model, prefix) # activate if buses can switch charging points
-
-    # all bus batteries should have the same capacity
-    def add_equal_battery_sizing_constraints(self, model, prefix):
-        buses = self.get_buses()
-        for i in range(len(buses) - 1):
-            def rule(m, t):
-                return model.component_dict[(buses[i].name, 'capacity')] == model.component_dict[(buses[i + 1].name, 'capacity')]
-            model.add(('equal_bat_capacities_constraint_' + str(i + 1),), pyo.Constraint(model.T, rule=rule))
-
-    # all depot chargers (AC-DC inverters) should have the same capacity
-    def add_equal_depot_inverter_sizing_constraints(self, model, prefix):
-        inverters = self.get_depot_chargers()
-        for i in range(len(inverters) - 1):
-            def rule(m, t):
-                return model.component_dict[(inverters[i].name, 'capacity')] == model.component_dict[(inverters[i + 1].name, 'capacity')]
-            model.add(('equal_inverter_capacities_constraint_' + str(i + 1),), pyo.Constraint(model.T, rule=rule))
-
-    # switch off power flow to bus if not connected to depot charger
-    def add_depot_connection_constraints(self, model, prefix):
-        for bus, depot_charger in zip(self.get_buses(), self.get_depot_chargers()):
-
-            # just to make status visible in results file to verify results
-            def rule(m,t):
-                return model.component_dict[(bus.name, 'z_connected_to_depot_check')][t] == model.component_dict[(bus.name, 'z_connected_to_depot')][t]
-            model.add((bus.name + '_z_connected_to_depot_constraint',), pyo.Constraint(model.T, rule=rule))
-
-            t_not_connected = set()
-            for t in model.T:
-                if model.component_dict[(bus.name, 'z_connected_to_depot')][t] == 0:
-                    t_not_connected.add(t)
-
-            # no flows for 1 to 1 architecture, must work with component outputs
-            def rule(m, t):
-                return model.component_dict[(depot_charger.name, 'output_1')][t] == 0
-            model.add((depot_charger.name + 'disable_output_to_' + bus.name + '_constraint',), pyo.Constraint(t_not_connected, rule=rule))
-
-            # for m x n architecture
-            # for depot_to_bus in self.get_depot_to_bus_flows():
-            #     if bus.name in depot_to_bus:
-            #         def rule(m,t):
-            #             return model.component_dict[prefix + (depot_to_bus,)][t] == 0
-            #         model.add((depot_to_bus + 'disable_power_flow_to_' + bus.name + '_constraint',), pyo.Constraint(t_not_connected, rule=rule))
-            # for bus_to_depot in self.get_bus_to_depot_flows():
-            #     if bus.name in bus_to_depot:
-            #         def rule(m,t):
-            #             return model.component_dict[prefix + (bus_to_depot,)][t] == 0
-            #         model.add((bus_to_depot + 'disable_power_flow_from_' + bus.name + '_constraint',), pyo.Constraint(t_not_connected, rule=rule))
-
-    # in case of no fixed connection between bus and charging station, bus can switch charging station
-    # if bus is not driving and SOC is below 30%, it should be charged with max power of the respective inverter it is connected to
-    def add_charge_power_constraints(self, model, prefix):
-        for bus, driving_consumption in zip(self.get_buses(),self.get_driving_consumptions()):
-            if hasattr(driving_consumption, 'z_driving'):
-                def rule(m,t):
-                    return model.component_dict[(bus.name,'z_driving')][t] == model.component_dict[(driving_consumption.name, 'z_driving')][t]
-                model.add((bus.name + 'z_driving_constraint',), pyo.Constraint(model.T, rule=rule))
-
-                t_not_driving = set()
-                for t in model.T:
-                    if driving_consumption.z_driving[t] == 0:
-                        t_not_driving.add(t)
-
-                # auxiliary variable and constraint
-                # sets current charge power for bus to max power of the inverter it is connected to at that particular moment
-                inv_to_bus_flows = []
-                for inv_to_bus_flow in self.get_depot_to_bus_flows():
-                    if bus.name in inv_to_bus_flow:
-                        for inverter in self.get_inverters():
-                            if inverter.name in inv_to_bus_flow:
-                                inv_to_bus_flows.append(('z_' + inv_to_bus_flow, inverter))
-
-                def rule(m,t):
-                    return model.component_dict[(bus.name,'current_charge_power')][t] == pyo.quicksum(model.component_dict[(flow,)][t] * model.component_dict[(inverter.name,'capacity')] for (flow,inverter) in inv_to_bus_flows)
-                model.add((bus.name + '_current_charge_power_constraint',), pyo.Constraint(t_not_driving, rule=rule))
-
-                # if bus is not driving and SOC is below 30%, it should be charged with max power of the respective inverter it is connected to
-                def rule(m, t):
-                    return model.component_dict[(bus.name, 'input_1')][t] * bus.input_efficiency >= 0.99 * model.component_dict[(bus.name,'current_charge_power',)][t] * (model.component_dict[(bus.name, 'z_SOC_below_30')][t]) / bus.e2p_in
-                model.add(prefix + (bus.name + '_not_driving_input_cons',), pyo.Constraint(t_not_driving, rule=rule))
-
-                # in case of multiple inverters with different capacities
-                # bus should at least be charged with the max power of the inverter with the lowest capacity
-                # does not work if inverter size is to be optimized
-                # def rule(m,t):
-                #     return model.component_dict[(bus.name, 'input_1')][t] * bus.input_efficiency >= 0.99 * min_charging_power * (model.component_dict[(bus.name, 'z_SOC_below_30')][t]) / bus.e2p_in
-                # model.add(prefix + (bus.name + 'not_driving_min_input_cons',), pyo.Constraint(t_not_driving, rule=rule))
-
-    # constraints for multiple buses and multiple charging stations
-    # no fixed connection between bus and charging station, bus can switch charging station
-    # only allows power flows between one bus and one inverter
-    def add_simultaneous_connect_constraints(self, model, prefix):
-        inverters = self.get_inverter_names()
-        buses = self.get_bus_names()
-
-        for connector in self._connectors:
-
-            # an inverter can only charge one bus at a time
-            if connector.name in inverters and connector.type.value == 'output_1':
-                def rule(m, t):
-                    return pyo.quicksum(model.component_dict[prefix + ('z_' + flow[0],)][t] for [flow, other_side] in zip(connector.flows, connector.other_sides) if other_side in buses) <= 1
-                model.add(prefix + (connector.name + '_to_buses_simul_flow_constraint',), pyo.Constraint(model.T, rule=rule))
-
-                flows = [flow for [flow, other_side] in zip(connector.flows, connector.other_sides) if other_side in buses]
-                for flow in flows:
-                    def rule(m, t):
-                        return model.component_dict[prefix + (flow[0],)][t] <= model.component_dict[prefix + ('z_' + flow[0],)][t] * 100000
-                    model.add(prefix + (connector.name + '_output_1: ' + flow[0] + '_flow_enable_constraint',), pyo.Constraint(model.T, rule=rule))
-
-            # an inverter can only receive power from one bus at a time
-            elif connector.name in inverters and connector.type.value == 'input_1':
-                def rule(m, t):
-                    return pyo.quicksum(model.component_dict[prefix + ('z_' + flow[0],)][t] for [flow, other_side] in zip(connector.flows, connector.other_sides) if other_side in buses) <= 1
-                model.add(prefix + ('from_buses_to_' + connector.name + '_simul_flow_constraint',),pyo.Constraint(model.T, rule=rule))
-
-                flows = [flow for [flow, other_side] in zip(connector.flows, connector.other_sides) if other_side in buses]
-                for flow in flows:
-                    def rule(m, t):
-                        return model.component_dict[prefix + (flow[0],)][t] <= model.component_dict[prefix + ('z_' + flow[0],)][t] * 100000
-                    model.add(prefix + (connector.name + '_input_1: ' + flow[0] + '_flow_enable_constraint',), pyo.Constraint(model.T, rule=rule))
-
-            # a bus can only be charged by one inverter at a time
-            elif connector.name in buses and connector.type.value == 'input_1':
-                def rule(m, t):
-                    return pyo.quicksum(model.component_dict[prefix + ('z_' + flow[0],)][t] for [flow, other_side] in zip(connector.flows, connector.other_sides) if other_side in inverters) <= 1
-                model.add(prefix + ('from_inverters_to_' + connector.name + '_simul_flow_constraint',),pyo.Constraint(model.T, rule=rule))
-
-                flows = [flow for [flow, other_side] in zip(connector.flows, connector.other_sides) if other_side in inverters]
-                for flow in flows:
-                    def rule(m, t):
-                        return model.component_dict[prefix + (flow[0],)][t] <= model.component_dict[prefix + ('z_' + flow[0],)][t] * 100000
-                    model.add(prefix + (connector.name + '_input_1: ' + flow[0] + '_flow_enable_constraint',), pyo.Constraint(model.T, rule=rule))
-
-            # a bus can only give back power to one inverter at a time
-            elif connector.name in buses and connector.type.value == 'output_1':
-                def rule(m, t):
-                    return pyo.quicksum(model.component_dict[prefix + ('z_' + flow[0],)][t] for [flow, other_side] in zip(connector.flows, connector.other_sides) if other_side in inverters) <= 1
-                model.add(prefix + (connector.name + '_to_inverters_simul_flow_constraint',),pyo.Constraint(model.T, rule=rule))
-
-                flows = [flow for [flow, other_side] in zip(connector.flows, connector.other_sides) if other_side in inverters]
-                for flow in flows:
-                    def rule(m, t):
-                        return model.component_dict[prefix + (flow[0],)][t] <= model.component_dict[prefix + ('z_' + flow[0],)][t] * 100000
-                    model.add(prefix + (connector.name + '_output_1: ' + flow[0] + '_flow_enable_constraint',), pyo.Constraint(model.T, rule=rule))
-
-
-    def get_inverter_names(self):
-        inverters = []
-        for name, component in self._components.items():
-            if component.type == 'StaticBiInverter':
-                inverters.append(name)
-        return inverters
-
-    def get_bus_names(self):
-        buses = []
-        for name, component in self._components.items():
-            if component.type == 'ElecBus':
-                buses.append(name)
-        return buses
-
-    def get_grid(self):
-        for component in self._components.values():
-            if component.type == 'ElectricalGrid':
-                return component
-
-    def get_buses(self):
-        buses = []
-        for component in self._components.values():
-            if component.type == 'ElecBus':
-                buses.append(component)
-        return buses
-
-    def get_inverters(self):
-        inverter = []
-        for component in self._components.values():
-            if component.type == 'StaticBiInverter':
-                inverter.append(component)
-        return inverter
-
-    def get_depot_chargers(self):
-        depot_chargers = []
-        for inverter in self.get_inverters():
-            for connector in self._connectors:
-                if connector.name == inverter.name and connector.type.value == 'output_2':
-                    if self.get_grid().name not in connector.other_sides:
-                        depot_chargers.append(inverter)
-                        break
-        return depot_chargers
-
-    def get_driving_consumptions(self):
-        consumptions = []
-        for component in self._components.values():
-            if component.type == 'DrivingConsumption':
-                consumptions.append(component)
-        return consumptions
-
-    def get_depot_to_bus_flows(self):
-        flows = []
-        for flow in self._flows:
-            flow_found = False
-            for inverter in self.get_depot_chargers():
-                if flow_found:
-                    break
-                for bus in self.get_buses():
-                    if flow.startswith(inverter.name) and bus.name in flow:
-                        flows.append(flow)
-                        flow_found = True
-                        break
-        return flows
-
-    def get_bus_to_depot_flows(self):
-        flows = []
-        for flow in self._flows:
-            flow_found = False
-            for bus in self.get_buses():
-                if flow_found:
-                    break
-                for inverter in self.get_depot_chargers():
-                    if flow.startswith(bus.name) and inverter.name in flow:
-                        flows.append(flow)
-                        flow_found = True
-                        break
-        return flows
diff --git a/Prosumer/model/BusChargingStationWithHPC.py b/Prosumer/model/BusChargingStationWithHPC.py
deleted file mode 100644
index 81d16a9fb2836b9d86d3007a606fd48b4d883b03..0000000000000000000000000000000000000000
--- a/Prosumer/model/BusChargingStationWithHPC.py
+++ /dev/null
@@ -1,146 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import Model_Library.Prosumer.model.BusChargingStation as BusChargingStation
-import pyomo.environ as pyo
-
-
-class BusChargingStationWithHPC(BusChargingStation):
-    def __init__(self, name, configuration, profiles, dynamic):
-        super().__init__(name=name,
-                         configuration=configuration,
-                         profiles=profiles,
-                         dynamic=dynamic)
-
-        for bus, driving_cons in zip(self.buses.keys(), self.driving_consumptions.keys()):
-            profile_dict = configuration['components'][driving_cons]
-            self.buses[bus]['z_connected_to_HPC_1'] = profiles[profile_dict['consumption']][0].loc[:, 'Connected to HPC 1']
-            self.buses[bus]['z_connected_to_HPC_2'] = profiles[profile_dict['consumption']][0].loc[:, 'Connected to HPC 2']
-            self.buses[bus]['z_connected_to_HPC_3'] = profiles[profile_dict['consumption']][0].loc[:, 'Connected to HPC 3']
-        print('test')
-
-
-    def add_prosumer_variables(self, model, prefix = ()):
-        super().add_prosumer_variables(model, prefix)
-        for bus in self.get_buses():
-            model.add((bus.name, 'z_connected_to_HPC_1_check'), pyo.Var(model.T, bounds=(0,1)))
-            model.add((bus.name, 'z_connected_to_HPC_2_check'), pyo.Var(model.T, bounds=(0,1)))
-            model.add((bus.name, 'z_connected_to_HPC_3_check'), pyo.Var(model.T, bounds=(0,1)))
-
-            model.add((bus.name, 'z_connected_to_HPC_1'), pyo.Param(model.T, mutable=True))
-            model.set_value((bus.name, 'z_connected_to_HPC_1'), self.buses[bus.name]['z_connected_to_HPC_1'])
-
-            model.add((bus.name, 'z_connected_to_HPC_2'), pyo.Param(model.T, mutable=True))
-            model.set_value((bus.name, 'z_connected_to_HPC_2'), self.buses[bus.name]['z_connected_to_HPC_2'])
-
-            model.add((bus.name, 'z_connected_to_HPC_3'), pyo.Param(model.T, mutable=True))
-            model.set_value((bus.name, 'z_connected_to_HPC_3'), self.buses[bus.name]['z_connected_to_HPC_3'])
-
-    def add_prosumer_constraints(self, model, prefix = ()):
-        super().add_prosumer_constraints(model, prefix)
-        self.add_HPC_connection_constraints(model, prefix)
-
-    # switch off power flow to bus if not connected to high power charger
-    def add_HPC_connection_constraints(self, model, prefix):
-        for bus in self.get_buses():
-            connected_to_HPC_1 = self.buses[bus.name]['z_connected_to_HPC_1']
-            connected_to_HPC_2 = self.buses[bus.name]['z_connected_to_HPC_2']
-            connected_to_HPC_3 = self.buses[bus.name]['z_connected_to_HPC_3']
-            connected_profiles = [connected_to_HPC_1, connected_to_HPC_2, connected_to_HPC_3]
-
-            # following three constraints just to make connection status visible
-            def rule(m, t):
-                return model.component_dict[(bus.name, 'z_connected_to_HPC_1_check')][t] == model.component_dict[(bus.name, 'z_connected_to_HPC_1')][t]
-            model.add((bus.name + '_z_connected_to_HPC_1_constraint',), pyo.Constraint(model.T, rule=rule))
-
-            def rule(m, t):
-                return model.component_dict[(bus.name, 'z_connected_to_HPC_2_check')][t] == model.component_dict[(bus.name, 'z_connected_to_HPC_2')][t]
-            model.add((bus.name + '_z_connected_to_HPC_2_constraint',), pyo.Constraint(model.T, rule=rule))
-
-
-            def rule(m, t):
-                return model.component_dict[(bus.name, 'z_connected_to_HPC_3_check')][t] == model.component_dict[(bus.name, 'z_connected_to_HPC_3')][t]
-            model.add((bus.name + '_z_connected_to_HPC_3_constraint',), pyo.Constraint(model.T, rule=rule))
-
-
-            for hpc, connected_profile in zip(self.get_HPC_chargers(), connected_profiles):
-                t_not_connected = set()
-                for t in model.T:
-                    if connected_profile[t] == 0:
-                        t_not_connected.add(t)
-
-                for hpc_to_bus in self.get_HPC_to_bus_flows():
-                    if bus.name in hpc_to_bus and hpc.name in hpc_to_bus:
-                        def rule(m, t):
-                            return model.component_dict[prefix + (hpc_to_bus,)][t] == 0
-                        model.add((hpc_to_bus + '_disable_constraint',), pyo.Constraint(t_not_connected, rule=rule))
-                        break
-
-                for bus_to_hpc in self.get_bus_to_HPC_flows():
-                    if bus.name in bus_to_hpc and hpc.name in bus_to_hpc:
-                        def rule(m, t):
-                            return model.component_dict[prefix + (bus_to_hpc,)][t] == 0
-                        model.add((bus_to_hpc + 'disable_constraint',), pyo.Constraint(t_not_connected, rule=rule))
-                        break
-
-    # differenciation is done via connectors' other sides.
-    # HPC chargers should be directly connected to the grid in the JSON file
-    # depot chargers are connected to a bus bar which is then connected to the grid
-    def get_HPC_chargers(self):
-        HPC_chargers = []
-        for inverter in self.get_inverters():
-            for connector in self._connectors:
-                if connector.name == inverter.name and connector.type.value == 'output_2':
-                    if self.get_grid().name in connector.other_sides:
-                        HPC_chargers.append(inverter)
-                        break
-        return HPC_chargers
-
-    def get_HPC_to_bus_flows(self):
-        flows = []
-        for flow in self._flows:
-            flow_found = False
-            for hpc in self.get_HPC_chargers():
-                if flow_found:
-                    break
-                for bus in self.get_buses():
-                    if flow.startswith(hpc.name) and bus.name in flow:
-                        flows.append(flow)
-                        flow_found = True
-                        break
-        return flows
-
-    def get_bus_to_HPC_flows(self):
-        flows = []
-        for flow in self._flows:
-            flow_found = False
-            for bus in self.get_buses():
-                if flow_found:
-                    break
-                for hpc in self.get_HPC_chargers():
-                    if flow.startswith(bus.name) and hpc.name in flow:
-                        flows.append(flow)
-                        flow_found = True
-                        break
-        return flows
diff --git a/Prosumer/model/DistrictAsset.py b/Prosumer/model/DistrictAsset.py
deleted file mode 100644
index abee03f788a9b5fedbc24b3e4530f0a56c7a9cbb..0000000000000000000000000000000000000000
--- a/Prosumer/model/DistrictAsset.py
+++ /dev/null
@@ -1,57 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import Model_Library.Prosumer.model.Prosumer as Prosumer
-from Model_Library.OptimizationModel import OptimizationBlock
-import pyomo.environ as pyo
-
-class DistrictAsset(Prosumer):
-    def __init__(self, name, configuration, profiles, dynamic):
-        super().__init__(name, configuration, profiles, dynamic)
-
-    def build_sizing_model(self, model):
-        block = OptimizationBlock(self._name, self._dynamic)
-        model.add((self._name,), block)
-
-        for component in self._components:
-            self._components[component].build_model(block, {})
-
-        self.add_flow_variables(block)
-
-        self.add_connection_constraints(block)
-
-    def get_export_import_expressions(self, block):
-        export_vars = []
-        import_vars = []
-        for component in self._components:
-            if self._components[component].type == 'ElectricalGrid':
-                export_vars.append((component, 'input_1'))
-                import_vars.append((component, 'output_1'))
-        def export_rule(m, t):
-            return pyo.quicksum(block.component_dict[export_var][t] for export_var in export_vars)
-        block.add(('export',), pyo.Expression(block.T, rule=export_rule))
-        def import_rule(m, t):
-            return pyo.quicksum(block.component_dict[import_var][t] for import_var in import_vars)
-        block.add(('import',), pyo.Expression(block.T, rule=import_rule))
-        return block.component_dict[('export',)], block.component_dict[('import',)]
diff --git a/Prosumer/model/OverheadLineForIMC.py b/Prosumer/model/OverheadLineForIMC.py
deleted file mode 100644
index c08d6f9e234c7654be82b5fba89527b9f8d085dd..0000000000000000000000000000000000000000
--- a/Prosumer/model/OverheadLineForIMC.py
+++ /dev/null
@@ -1,199 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import Model_Library.Prosumer.model.Prosumer as Prosumer
-import pyomo.environ as pyo
-
-class OverheadLineForIMC(Prosumer):
-    def __init__(self, name, configuration, profiles, dynamic):
-        super().__init__(name=name,
-                         configuration=configuration,
-                         profiles=profiles,
-                         dynamic=dynamic)
-
-        self.buses = {component_name: component for component_name, component in configuration['components'].items() if component['type'] == 'ElecBus'}
-        self.driving_consumptions = {component_name: component for component_name, component in configuration['components'].items() if component['type'] == 'DrivingConsumption'}
-
-        for bus, driving_cons in zip(self.buses.keys(), self.driving_consumptions.keys()):
-            profile_dict = configuration['components'][driving_cons]
-            self.buses[bus]['z_connected_to_OHL'] = profiles[profile_dict['consumption']][0].loc[:, 'Connected to OHL']
-
-
-    def add_prosumer_variables(self, model, prefix = ()):
-        super().add_prosumer_variables(model, prefix)
-        for bus in self.get_buses():
-            # switched from 'within binary' to bounds due to interpolation of input profiles
-            model.add((bus.name, 'z_connected_to_OHL_check'), pyo.Var(model.T, bounds=(0, 1)))
-
-            model.add((bus.name, 'z_connected_to_OHL'), pyo.Param(model.T, mutable=True))
-            model.set_value((bus.name, 'z_connected_to_OHL'), self.buses[bus.name]['z_connected_to_OHL'])
-
-
-    def add_prosumer_constraints(self, model, prefix = ()):
-        super().add_prosumer_constraints(model, prefix)
-        self.add_inverter_adjusted_to_peak_power_demand_constraint(model, prefix)
-        self.add_equal_battery_sizing_constraints(model, prefix)
-        self.add_equal_converter_sizing_constraints(model, prefix)
-        #self.add_equal_inverter_sizing_constraints(model, prefix) # not required if all inverters have same efficiency, if not disable peak power demand constraint and enable this one
-        self.add_OHL_connection_constraints(model, prefix)
-
-
-    # size of DC-AC Inverter should be fitted to peak power demand of mobility consumption
-    def add_inverter_adjusted_to_peak_power_demand_constraint(self, model, prefix):
-        for inverter, driving_consumption in zip(self.get_inverters(), self.get_driving_consumptions()):
-            capacity = self.get_max_consumption()/inverter.efficiency
-            model.add((inverter.name, 'size_cons'), pyo.Constraint(expr=model.component_dict[(inverter.name, 'capacity')] == capacity))
-
-
-    # all bus batteries should have the same capacity
-    def add_equal_battery_sizing_constraints(self, model, prefix):
-        buses = self.get_buses()
-        for i in range(len(buses)-1):
-            def rule(m,t):
-                return model.component_dict[(buses[i].name,'capacity')] == model.component_dict[(buses[i+1].name,'capacity')]
-            model.add(('equal_bat_capacities_constraint_' + str(i+1),),pyo.Constraint(model.T, rule=rule))
-
-
-    # all DC-DC converters in buses (not wayside) should have the same capacity
-    def add_equal_converter_sizing_constraints(self, model, prefix):
-        converters = self.get_converters_of_buses()
-        for i in range(len(converters)-1):
-            def rule(m,t):
-                return model.component_dict[(converters[i].name, 'capacity')] == model.component_dict[(converters[i+1].name,'capacity')]
-            model.add(('equal_converter_capacities_constraint_' + str(i+1),),pyo.Constraint(model.T, rule=rule))
-
-
-    # all DC-AC inverters should have the same capacity
-    def add_equal_inverter_sizing_constraints(self, model, prefix):
-        inverters = self.get_inverters()
-        for i in range(len(inverters)-1):
-            def rule(m,t):
-                return model.component_dict[(inverters[i].name,'capacity')] == model.component_dict[(inverters[i+1].name,'capacity')]
-            model.add(('equal_inverter_capacities_constraint_' + str(i+1),),pyo.Constraint(model.T, rule=rule))
-
-    # set power flow from OHL to bus to zero when bus disconnected
-    def add_OHL_connection_constraints(self,model,prefix):
-        for bus, OHL_to_bus, bus_to_OHL in zip(self.get_buses(), self.get_OHL_to_bus_flows(), self.get_bus_to_OHL_flows()):
-            t_not_connected = set()
-            for t in model.T:
-                if model.component_dict[(bus.name, 'z_connected_to_OHL')][t] == 0:
-                    t_not_connected.add(t)
-
-            def rule(m,t):
-                return model.component_dict[(bus.name, 'z_connected_to_OHL_check')][t] == model.component_dict[bus.name, 'z_connected_to_OHL'][t]
-            model.add((bus.name + 'z_connected_to_OHL_check_constraint'), pyo.Constraint(model.T, rule=rule))
-
-            def rule(m, t):
-                return model.component_dict[prefix + (OHL_to_bus,)][t] == 0
-            model.add((OHL_to_bus + 'z_connected_constraint',), pyo.Constraint(t_not_connected, rule=rule))
-
-            def rule(m, t):
-                return model.component_dict[prefix + (bus_to_OHL,)][t] == 0
-            model.add((bus_to_OHL + 'z_connected_constraint',), pyo.Constraint(t_not_connected, rule=rule))
-
-
-    def get_grid(self):
-        for component in self._components.values():
-            if component.type == 'ElectricalGrid':
-                return component
-
-    def get_buses(self):
-        buses = []
-        for component in self._components.values():
-            if component.type == 'ElecBus':
-                buses.append(component)
-        return buses
-
-    def get_converters(self):
-        converter = []
-        for component in self._components.values():
-            if component.type == 'DcDcConverter':
-                converter.append(component)
-        return converter
-
-    def get_converters_of_buses(self):
-        converter_names = []
-        buses = self.get_buses()
-        for connector in self._connectors:
-            for bus in buses:
-                if connector.type.value == 'output_2' and bus.name in connector.other_sides:
-                    converter_names.append(connector.name)
-        converters = [converter for converter in self.get_converters() if converter.name in converter_names]
-        return converters
-
-
-    def get_inverters(self):
-        inverter = []
-        for component in self._components.values():
-            if component.type == 'StaticInverter':
-                inverter.append(component)
-        return inverter
-
-    def get_driving_consumptions(self):
-        consumptions = []
-        for component in self._components.values():
-            if component.type == 'DrivingConsumption':
-                consumptions.append(component)
-        return consumptions
-
-    def get_bus_bars(self):
-        bus_bars = []
-        for component in self._components.values():
-            if component.type == 'ElectricalBusBar':
-                bus_bars.append(component)
-        return bus_bars
-
-    def get_OHL(self):
-        bus_bar_names = [component.name for component in self.get_bus_bars()]
-        for connector in self._connectors:
-            if connector.name in bus_bar_names and self.get_grid().name in connector.other_sides:
-                for component in self._components.values():
-                    if connector.name == component.name:
-                        return component
-
-    def get_bus_bars_of_buses(self):
-        bus_bars_of_buses = [bus_bar for bus_bar in self.get_bus_bars() if bus_bar.name != self.get_OHL().name]
-        return bus_bars_of_buses
-
-    def get_OHL_to_bus_flows(self):
-        flows = []
-        for flow in self._flows:
-            for bus_bar in self.get_bus_bars_of_buses():
-                if flow.startswith(self.get_OHL().name) and bus_bar.name in flow:
-                    flows.append(flow)
-        return flows
-
-    def get_bus_to_OHL_flows(self):
-        flows = []
-        for flow in self._flows:
-            for bus_bar in self.get_bus_bars_of_buses():
-                if flow.startswith(bus_bar.name) and self.get_OHL().name in flow:
-                    flows.append(flow)
-        return flows
-
-    def get_max_consumption(self):
-        max_cons = []
-        for driving_consumption in self.get_driving_consumptions():
-            max_cons.append(max(driving_consumption.consumption))
-        return max(max_cons)
diff --git a/Prosumer/model/Prosumer.py b/Prosumer/model/Prosumer.py
deleted file mode 100644
index 650b570b58dd2e2c3ca802dee96e108f85a3f60f..0000000000000000000000000000000000000000
--- a/Prosumer/model/Prosumer.py
+++ /dev/null
@@ -1,331 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import pyomo.environ as pyo
-import pandas as pd
-import os
-from datetime import timedelta
-from Model_Library.Component.load_component_library import load_component_library
-from Model_Library.Component.model.AbstractComponent import ComponentKind, ComponentCommodity
-from Model_Library.Component.model.EMS_components.EnergyManagementSystem import implement_strategy
-from Tooling.pareto_analysis.pareto_analysis import pareto_analysis
-from Model_Library.OptimizationModel import OptimizationModel, EntityResult
-from Tooling.dynamics.Dynamic import resample
-
-from enum import Enum
-
-component_directory = __file__
-for _ in range(3):
-    component_directory = os.path.dirname(component_directory)
-component_directory = os.path.join(component_directory, 'Component', 'model')
-component_library = load_component_library(component_directory)
-
-model_directory = __file__
-for _ in range(3):
-    model_directory = os.path.dirname(model_directory)
-model_directory = os.path.join(model_directory, 'Component', 'data')
-
-class ConnectorType(Enum):
-    INPUT_1 = 'input_1'
-    INPUT_2 = 'input_2'
-    OUTPUT_1 = 'output_1'
-    OUTPUT_2 = 'output_2'
-
-class ConnectorMode(Enum):
-    EMPTY = 1
-    SINGLE_CONTRACTED = 2
-    SINGLE = 3
-    MULTIPLE = 4
-
-class Connector:
-    def __init__(self, name, type, commodity):
-        self.name = name
-        self.type = type
-        self.commodity = commodity
-        self.flows = []
-        self.other_sides = []
-
-class Prosumer:
-    def __init__(self, name, configuration, profiles, dynamic):
-        self._name = name
-        self._configuration = configuration
-        self._components = dict()
-        self._connectors = []
-
-        for name, component_configuration in configuration['components'].items():
-            component_type = component_configuration['type']
-            component = component_library[component_type](name, component_configuration, model_directory, profiles, dynamic)
-            self._components[name] = component
-
-            input_commodity_1, input_commodity_2, output_commodity_1, output_commodity_2 = component.get_input_output_commodities()
-            if input_commodity_1 != None:
-                self._connectors.append(Connector(name, ConnectorType.INPUT_1, input_commodity_1))
-            if input_commodity_2 != None:
-                self._connectors.append(Connector(name, ConnectorType.INPUT_2, input_commodity_2))
-            if output_commodity_1 != None:
-                self._connectors.append(Connector(name, ConnectorType.OUTPUT_1, output_commodity_1))
-            if output_commodity_2 != None:
-                self._connectors.append(Connector(name, ConnectorType.OUTPUT_2, output_commodity_2))
-        
-        self._flows = []
-        
-        for connection in configuration['connections']:
-            flow_from = connection['from']
-            flow_output = str(connection['output'])
-            flow_to = connection['to']
-            flow_input = str(connection['input'])
-            flow = flow_from + '_' + flow_output + '_' + flow_to + '_' + flow_input
-            self._flows.append(flow)
-            for connector in self._connectors:
-                if connector.name == flow_from and connector.type.value == 'output_' + flow_output:
-                    connector.flows.append((flow,))
-                    connector.other_sides.append(flow_to)
-                elif connector.name == flow_to and connector.type.value == 'input_' + flow_input:
-                    connector.flows.append((flow,))
-                    connector.other_sides.append(flow_from)
-
-        for connector in self._connectors:
-            if len(connector.flows) == 0:
-                connector.mode = ConnectorMode.EMPTY
-            elif len(connector.flows) > 1:
-                connector.mode = ConnectorMode.MULTIPLE
-            else:
-                # the connector is single, but if we can contract depends on the connector on the other side
-                # find connector on the other side
-                for other_side_connector_option in self._connectors:
-                    if connector == other_side_connector_option:
-                        continue
-                    if connector.flows[0] in other_side_connector_option.flows:
-                        other_side_connector = other_side_connector_option
-                        break
-                # test if connector on the other side has been assigned a mode
-                if hasattr(other_side_connector, 'mode'):
-                    # the other side has been assigend a mode, so it could be contracted
-                    if other_side_connector.mode != ConnectorMode.SINGLE_CONTRACTED:
-                        # it has not, we can contract
-                        connector.mode = ConnectorMode.SINGLE_CONTRACTED
-                    else:
-                        # it has, we cannot contract
-                        connector.mode = ConnectorMode.SINGLE
-                else:
-                    # the other side has not been assigend a mode, so it is not contracted, so we can contract
-                    connector.mode = ConnectorMode.SINGLE_CONTRACTED
-                # contract the connector
-                if connector.mode == ConnectorMode.SINGLE_CONTRACTED:
-                    # remove flow
-                    flow_to_remove = connector.flows[0]
-                    self._flows.remove(flow_to_remove[0]) # connector.flows stores the flow as a singleton tuple of the flow name, but self._flow stores just the flow name, so extract the flow name from the singleton tuple
-                    # replace flow on both sides with the input/output
-                    connector.flows[0] = (connector.name, connector.type.value)
-                    index = [i for i in range(len(other_side_connector.flows)) if other_side_connector.flows[i] == flow_to_remove][0]
-                    other_side_connector.flows[index] = (connector.name, connector.type.value)
-
-        self._dynamic = dynamic
-
-        self._result = {}
-        self._last_result_key = None
-
-    def get_components(self, kind=ComponentKind.ALL, commodity=ComponentCommodity.ALL):
-        return (component for component in self._components.values() if component.match(kind=kind, commodity=commodity))
-
-    def build_model(self, configuration):
-        model = OptimizationModel(self._name, configuration['dynamic'])
-
-        for component in self._components:
-            self._components[component].build_model(model, configuration)
-
-        self.add_flow_variables(model)
-
-        self.add_connection_constraints(model)
-
-        implement_strategy(self, model, configuration)
-
-        self.add_prosumer_variables(model)
-
-        self.add_prosumer_constraints(model)
-
-        return model
-
-    def add_prosumer_constraints(self, model, prefix = ()):
-        """
-        Add specific constraints of a prosumer
-        """
-        pass
-
-    def add_prosumer_variables(self, model, prefix = ()):
-        """
-        Add variables for specific constraints of a prosumer
-        """
-        pass
-
-    def optimize_sizing(self, key, strategy):
-        model = self.build_model({'dynamic': self._dynamic, 'strategy': strategy})
-
-        # todo: necessary solver options (MIPgap, ...) should be available from runme.py
-        options = dict()
-        options['MIPGap'] = 0.01
-        options['Presolve'] = 2
-        options['TimeLimit'] = 200
-
-        model.solve(options, False)
-
-        if model.is_ok():
-            # no resampling necessary, because the model has the same dynamic as the prosumer
-            self._result[key] = self.get_empty_entity_result()
-            self._result[key].extract_results_from_model(model)
-            self._last_result_key = key
-        else:
-            print('ERROR: The model is infeasible or unbounded: no optimal solution found')
-
-    def pareto_analysis(self, strategy):
-        if len(strategy) != 2:
-            raise ValueError('Pareto analysis can only be done with two strategies!')
-        model = self.build_model({'dynamic': self._dynamic, 'strategy': strategy})
-
-        pareto_analysis(self, model, list(strategy.keys()))
-
-    def get_empty_entity_result(self):
-        base_variables = []
-        for component in self._components.values():
-            base_variables.extend(component.get_base_variable_names())
-        for flow in self._flows:
-            base_variables.append(((flow,), 'T'))
-        return EntityResult(self._dynamic, base_variables)
-
-    def build_graph(self):
-        graph = dict.fromkeys(self._components.keys())
-        for key in graph:
-            graph[key] = {'neigh_all': [], 'neigh_in': [], 'neigh_out': []}
-        for connector in self._connectors:
-            if connector.type == ConnectorType.INPUT_1 or connector.type == ConnectorType.INPUT_2:
-                for other_side in connector.other_sides:
-                    graph[connector.name]['neigh_in'].append(other_side)
-                    if other_side not in graph[connector.name]['neigh_all']:
-                        graph[connector.name]['neigh_all'].append(other_side)
-            if connector.type == ConnectorType.OUTPUT_1 or connector.type == ConnectorType.OUTPUT_2:
-                for other_side in connector.other_sides:
-                    graph[connector.name]['neigh_out'].append(other_side)
-                    if other_side not in graph[connector.name]['neigh_all']:
-                        graph[connector.name]['neigh_all'].append(other_side)
-        return graph
-
-    def optimize(self, configuration, i_start, i_end):
-        dynamic = self._dynamic.partial_dynamic(i_start, i_end)
-
-        configuration['dynamic'] = dynamic
-
-        model = self.build_model(configuration)
-
-        options = dict()
-        options['MIPGap'] = 0.01
-        options['Presolve'] = 2
-        options['TimeLimit'] = 200
-        
-        model.solve(options, False)
-
-        if model.is_ok():
-            return model
-        else:
-            return None
-
-    def get_export_import(self, result, target_dynamic):
-        data = dict()
-        for component in self.get_components(kind=ComponentKind.GRID, commodity=ComponentCommodity.ELECTRICITY):
-            export_data = resample(result[(component.name, 'input_1')], result.dynamic, target_dynamic)
-            import_data = resample(result[(component.name, 'output_1')], result.dynamic, target_dynamic)
-            data[component.name] = (export_data, import_data)
-        return data
-
-    def calc_annuity(self, price_ext, price_injection, df=False):
-        """
-        Calculates the annuity this prosumer achieves.
-
-        Parameters
-        ----------
-        price_ext: Price for buying electricity from the main grid
-        price_injection: Remuneration for feeding electricity into the public grid
-        df: boolean variable. True --> consider DF acitvation; False--> before DF activation
-
-        Returns
-        ----------
-        annuity_init: annuity of prosumer based on the initial schedule with it's individual constract
-        annuity_community: prosumer's annuity with community conditions either with or without DF activations
-        """
-        # The factor that converts the simulation to ONE year
-        annual_factor = timedelta(days=365) / timedelta(hours=sum(self._dynamic.step_size_p(position) for position in range(self._dynamic.number_of_steps())))
-
-        self_data = self.get_export_import(self._result['sized'], self._dynamic)
-        grid_export = sum(self_export for self_export, _ in self_data.values())
-        grid_import = sum(self_import for _, self_import in self_data.values())
-
-        grid = [self._components[comp] for comp in self._components if self._components[comp].type == 'ElectricalGrid'][0]
-
-        annuity_init = (+ grid_export * self._dynamic.step_sizes() * grid.injection_price
-                        - grid_import * self._dynamic.step_sizes() * grid.price)
-
-        annuity_init = annuity_init.sum() * annual_factor
-
-        # for INITIAL SCHEDULE
-        if not df:
-
-            annuity_community = (+ grid_export * self._dynamic.step_sizes() * price_injection[self._dynamic.time_steps()]
-                                 - grid_import * self._dynamic.step_sizes() * price_ext)
-
-            annuity_community = annuity_community.sum() * annual_factor
-            print('Annuity init ' + self._name + ': ' + str(annuity_init))
-            print('Annuity agg ' + self._name + ': ' + str(annuity_community))
-        else:
-            # for FIXED SCHEDULE
-            self_data = self.get_export_import(self._result[self._last_result_key], self._dynamic)
-            grid_export = sum(self_export for self_export, _ in self_data.values())
-            grid_import = sum(self_import for _, self_import in self_data.values())
-
-            annuity_community = (+ grid_export * self._dynamic.step_sizes() * price_injection[self._dynamic.time_steps()]
-                                 - grid_import * self._dynamic.step_sizes() * price_ext)
-
-            annuity_community = annuity_community.sum() * annual_factor
-            print('Annuity with df ' + self._name + ': ' + str(annuity_community))
-
-        return annuity_init, annuity_community
-
-    def add_flow_variables(self, model):
-        for flow in self._flows:
-            model.add((flow,), pyo.Var(model.T, bounds=(0, None)))
-
-    def add_connection_constraints(self, model):
-        for connector in self._connectors:
-            def rule(m, t):
-                return model.component_dict[(connector.name, connector.type.value)][t] == pyo.quicksum(model.component_dict[flow_variable][t] for flow_variable in connector.flows)
-            model.add((connector.name + '_' + connector.type.value + '_sum',), pyo.Constraint(model.T, rule = rule))
-    
-    def save_results(self):
-        if not os.path.exists('output_files/'):
-            os.makedirs('output_files/')
-        if not os.path.exists('output_files/' + self._name + '/'):
-            os.makedirs('output_files/' + self._name + '/')
-
-        # ToDo: wrap errors, exceptions in all functions
-        # Results df
-        with pd.ExcelWriter('output_files/' + self._name + '/results_' + self._name + '.xlsx') as writer:
-            self._result[self._last_result_key].to_excel(writer)
diff --git a/Prosumer/model/__init__.py b/Prosumer/model/__init__.py
deleted file mode 100644
index e8f53979190f433351939333df65782f8dc10e88..0000000000000000000000000000000000000000
--- a/Prosumer/model/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-# -*- coding:utf-8 -*-
-# Created by jgn on 30.11.2020.
-
-from .Prosumer import Prosumer
-from .DistrictAsset import DistrictAsset
-from .BusChargingStation import BusChargingStation
-from .OverheadLineForIMC import OverheadLineForIMC
-from .BusChargingStationWithHPC import BusChargingStationWithHPC
diff --git a/Prosumer/scripts/__init__.py b/Prosumer/scripts/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/Prosumer/scripts/calc_annuity_vdi2067.py b/Prosumer/scripts/calc_annuity_vdi2067.py
deleted file mode 100644
index 5f712210962118b30ccda514b9ee70ec887dd064..0000000000000000000000000000000000000000
--- a/Prosumer/scripts/calc_annuity_vdi2067.py
+++ /dev/null
@@ -1,183 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import numpy as np
-import pandas as pd
-
-
-def annuity_factor(t, q=1.007):
-    """
-    The method calculates the annuity factor, which shows repeated payments of
-    equal amount; usually the annual instalments required to pay off the
-    principal and interest on a debt.
-    :param t: number of years of the observation period
-    :param q: interest factor (taken from Bundesministerium der Finanzen)
-    :return: annuity factor
-    """
-    # q:
-    if q == 1.0:
-        a = 1 / t
-    else:
-        try:
-            a = (q - 1) / (1 - pow(q, -t))
-        except ZeroDivisionError:
-            raise ValueError('Cannot calculate annuity')
-    return a
-
-
-def dynamic_cash_value(t, q=1.007, r=1.03):
-    """
-    The method calculates the price-dynamic cash value factor, which considers
-    the price changes for the ongoing costs during the observation period.
-    :param t: number of years of the observation period
-    :param q: interest factor (taken from Bundesministerium der Finanzen)
-    :param r: price change factor
-    :return: cash value factor
-    """
-    if r == q:
-        b = t / q
-    else:
-        b = (1 - pow(r / q, t)) / (q - r)
-    return b
-
-
-def calc_capital_cost(t, t_n, q, ann, a_0):
-    """
-    The method calculates the annuity of capital-related costs.
-    :param t: observation period, in year
-    :param t_n: service life of the equipment, in year
-    :param q: interest factor
-    :param ann: annuity factor
-    :param a_0: investment amount in first year
-    :return: annuity of capital-related costs
-    """
-
-    # r: price change factor, taken from Statistisches Bundesamt: median of
-    # yearly price index change for central heating devices (GP 25 21)
-    r = 1.02
-    # n: number of replacements, np.ceil gives a float64 -> do int(n) for range
-    n = np.ceil(t / t_n) - 1
-    # r_w: residual value
-    r_w = a_0 * pow(r, n * t_n) * ((n + 1) * t_n - t) / t_n * 1 / pow(q, t)
-
-    a_inv = []
-    for i in range(int(n) + 1):
-        a_i = a_0 * ((pow(r, i * t_n)) / (pow(q, i * t_n)))
-        a_inv.append(a_i)
-
-    a_n_k = (sum(a_inv) - r_w) * ann
-    return a_n_k
-
-
-def calc_operation_cost(t, q, ann, a_0, f_inst, f_w, f_op):
-    """
-    The method calculates the annuity of operation-related costs.
-    :param t: observation period, in year
-    :param q: interest factor
-    :param ann: annuity factor
-    :param a_0: investment amount in first year
-    :param f_inst: float, factor for repair effort (from VDI2067)
-    :param f_w: float, factor for servicing and inspection effort (from VDI2067)
-    :param f_op: int, effort for operation in h/a (from VDI2067)
-    :return: annuity of operation-related costs
-
-    Other used parameters in method, the values for r and price_op are taken
-    from Statistisches Bundesamt.
-    r_b: price change factor for actual operation, for labour cost (vdi 2067)
-    r_in: price change factor for maintenance (vdi 2067)
-    b_b: cash value factor for actual operation
-    b_in: cash value factor for maintenance
-    price_op: labour costs per hour worked (2019)
-    """
-    r_b = 1.02
-    r_in = 1.03
-    b_b = dynamic_cash_value(t, q, r_b)
-    b_in = dynamic_cash_value(t, q, r_in)
-    price_op = 55.6
-
-    # disable a_b1 first for this cost doesn't exist when cap is 0!
-    # The value of this is little for large system and too much for small
-    # system!
-    # a_b1 = f_op * price_op
-    a_b1 = 0
-    a_in = a_0 * (f_inst + f_w)
-
-    a_n_b = a_b1 * ann * b_b + a_in * ann * b_in
-    return a_n_b
-
-
-def run(t, t_n, invest, cap, f_inst, f_w, f_op, i):
-    """
-    The method calculates the annuity of technical building installations
-    according to VDI 2067.
-    :param t: the observation period
-    :param t_n: service life of the equipment, in year
-    :param invest: float or pyomo variable, investment cost in [EUR/kW] or
-                   [EUR/kWh], fixed value for each component or calculated
-                   variable
-    :param cap: power or capacity of the component in [kW] or [kWh]
-    :param f_inst: float, factor for repair effort (from VDI2067)
-    :param f_w: float, factor for servicing and inspection effort (from VDI2067)
-    :param f_op: int, effort for operation in h/a (from VDI2067)
-    :param i: float, interest rate. The value of i should be less than 1 and
-              the value of q should be greater than 1
-    :return: annuity (one year) for the technical building installation
-    """
-    q = i+1
-
-    ann = annuity_factor(t, q)
-    a_0 = cap * invest
-
-    # The revenue and demand related cost are set in the energy management
-    # system class. Because the cost are related to the model variables.
-    # WARN: in the energy management system class the cash value factor is
-    # not considered, because the observation is only 1 year. If the
-    # observation period longer than 1 year, should use the cash value factor
-    # for demand related cost and revenue as well
-    # a_n_e: annuity of revenue, like feed in electricity
-    # a_n_v: annuity of demand related cost, like purchased gas or electricity
-    # a_n_s: annuity of other cost
-    a_n_e = 0
-    a_n_v = 0
-    a_n_s = 0
-
-    a_n = (calc_capital_cost(t, t_n, q, ann, a_0) + a_n_v
-           + calc_operation_cost(t, q, ann, a_0, f_inst, f_w, f_op) +
-           a_n_s) - a_n_e
-
-    return a_n
-
-
-if __name__ == "__main__":
-    # validation for optimisation results
-    t = 1
-    t_n = 20
-    invest = 200
-    cap = 1.17
-    f_inst = 0.01
-    f_w = 0.015
-    f_op = 20
-    i = 0.02
-    a_n = run(t, t_n, invest, cap, f_inst, f_w, f_op, i)
-    print(a_n)
diff --git a/Prosumer/scripts/results_evaluation/__init__.py b/Prosumer/scripts/results_evaluation/__init__.py
deleted file mode 100644
index 79958c0a4fadccb8205388c15c3979ef790f3bc8..0000000000000000000000000000000000000000
--- a/Prosumer/scripts/results_evaluation/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-
-
-from .results_evaluation import Plot_savings
\ No newline at end of file
diff --git a/Prosumer/scripts/results_evaluation/results_evaluation.py b/Prosumer/scripts/results_evaluation/results_evaluation.py
deleted file mode 100644
index 8c586ff71375ff58822144d10bd9474db9b8de07..0000000000000000000000000000000000000000
--- a/Prosumer/scripts/results_evaluation/results_evaluation.py
+++ /dev/null
@@ -1,119 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import os
-import pandas as pd
-import numpy as np
-import matplotlib.pyplot as plt
-import seaborn as sns
-from multiprocessing import Pool
-import os
-import matplotlib.style as style
-
-
-def Plot_savings(reference_scenario, topology):#='C:/Data/git/intraday/output_files/results_SCN0_CAT1'):
-
-        #reference_scenario: path to the reference scenario
-        #topology: name of the considered topology
-
-        # calculate savings and exract demand
-        all_folders = os.listdir('output_files')
-        scenario_savings_percent = []
-        scenario_savings_euro = []
-        scenario_elec_demand = []
-        scenario_therm_demand = []
-
-        try:
-            try:
-                for folder_name in all_folders:
-                    all_files = os.listdir('output_files/'+folder_name+'/')
-                    for file_name in all_files:
-                        if topology in file_name and file_name.endswith('.xlsx'):
-                            scenario_results = pd.read_excel('output_files/'+folder_name+'/'+file_name)
-                            scenario_costs = scenario_results['total_annual_costs'][0]
-                            scenario_elec_demand.append(scenario_results['elec_demand_yearly'][0])
-                            scenario_therm_demand.append(scenario_results['therm_demand_yearly'][0])
-                            reference_results = pd.read_excel('output_files/'+ reference_scenario + '_' +
-                                                          str(scenario_results['elec_demand_yearly'][0]) + '_' +
-                                                          str(scenario_results['therm_demand_yearly'][0]) + '/'
-                                                          + 'results_' + reference_scenario + '_' +
-                                                          str(scenario_results['elec_demand_yearly'][0]) + '_' +
-                                                          str(scenario_results['therm_demand_yearly'][0]) + '.xlsx')
-                            reference_costs = reference_results['total_annual_costs'][0]
-                            scenario_savings_percent.append((reference_costs - scenario_costs) / reference_costs * 100)
-                            scenario_savings_euro.append(reference_costs - scenario_costs)
-
-                # heatmap plot
-                if scenario_savings_percent:
-                    file_dir = os.path.dirname(os.path.abspath(__file__))
-                    for j in range(2):
-                        file_dir = os.path.dirname(file_dir)
-
-                    # Create result path
-                    if not os.path.exists('output_files/plots/savings_percent'):
-                        os.makedirs('output_files/plots/savings_percent')
-                    if not os.path.exists('output_files/plots/savings_euros'):
-                        os.makedirs('output_files/plots/savings_euros')
-
-                    # Collect data and plot heat maps
-                    # Savings in percent
-                    data_percent = pd.DataFrame(
-                        data={'Electrical demand': scenario_elec_demand, 'Thermal demand': scenario_therm_demand,
-                              'Savings percent': scenario_savings_percent})
-                    data_percent = data_percent.pivot(index='Electrical demand', columns='Thermal demand', values='Savings percent')
-                    sns_plt_percent = sns.heatmap(data_percent)
-                    fig_percent = sns_plt_percent.get_figure()
-                    plt.legend(title='Cost savings in % of ' + topology)  # , labels=['test1', 'test2'])
-                    # Save plot
-                    fig_percent.savefig(os.path.join(file_dir, 'output_files/plots/savings_percent/', 'savings_heatmap_' + topology + '.jpeg'), dpi=500)
-                    plt.close()
-
-                    # Collect data and plot heat maps
-                    # Savings in euro
-                    data_euro = pd.DataFrame(
-                        data={'Electrical demand': scenario_elec_demand, 'Thermal demand': scenario_therm_demand,
-                              'Savings euro': scenario_savings_euro})
-                    data_euro = data_euro.pivot(index='Electrical demand', columns='Thermal demand', values='Savings euro')
-                    sns_plt_euro = sns.heatmap(data_euro)
-                    fig_euro = sns_plt_euro.get_figure()
-                    plt.legend(title='Cost savings in Euro of ' + topology)  # , labels=['test1', 'test2'])
-                    # Save plot
-                    fig_euro.savefig(os.path.join(file_dir, 'output_files/plots/savings_euros/', 'savings_heatmap_' + topology + '.jpeg'), dpi=500)
-                    plt.close()
-            except ValueError:
-                print('Numerical error! Please check results.')
-        except OSError as e:
-            print('No reference results found: Please check spelling!')
-
-
-
-
-'''prosumer_name = ['SCN0_CAT1',
-                 'SCN1_CAT1',
-                 'SCN2_CAT1_PV1',
-                 'SCN2_CAT1_PV2_BA',
-                 'SCN2_CAT1_PV3_BA_HP']
-
-for i in prosumer_name:
-    Plot_savings(i)'''
\ No newline at end of file
diff --git a/architecture.py b/architecture.py
new file mode 100644
index 0000000000000000000000000000000000000000..1d43ae0ba7bb6f8a19e8fc6ada92ccb6db873931
--- /dev/null
+++ b/architecture.py
@@ -0,0 +1,2785 @@
+from Model_Library.dynamics import Dynamic, DynamicTree, TreeDynamic
+import Model_Library
+
+import abc
+from dataclasses import dataclass
+from enum import Enum
+from functools import reduce
+from itertools import product
+import math
+import numpy as np
+import pandas as pd
+import pyomo.environ as pyo
+from sys import float_info
+from typing import Dict, List, Tuple, Union
+
+
+class ArchitectureKind(Enum):
+    NONE = ""
+    REDUCTION = "r"
+    AGGREGATION = "a"
+    PERIOD = "p"
+    STAGE = "d"
+    SCENARIO = "s"
+
+    @staticmethod
+    def from_char(char: str) -> "ArchitectureKind":
+        if char == "":
+            return ArchitectureKind.NONE
+        if char == "r":
+            return ArchitectureKind.REDUCTION
+        if char == "a":
+            return ArchitectureKind.AGGREGATION
+        if char == "p":
+            return ArchitectureKind.PERIOD
+        if char == "d":
+            return ArchitectureKind.STAGE
+        if char == "s":
+            return ArchitectureKind.SCENARIO
+        raise ValueError(f"Invalid character {char}!")
+
+
+class ArchitecturePiece(abc.ABC):
+    kind: ArchitectureKind
+
+    @staticmethod
+    def one_index(kind: ArchitectureKind, index: int) -> "ArchitecturePiece":
+        if kind == ArchitectureKind.AGGREGATION:
+            return AggregationPiece(index)
+        elif kind == ArchitectureKind.STAGE:
+            return StagePiece(index)
+        else:
+            raise ValueError(f"Invalid ArchitectureKind {kind}!")
+
+    @staticmethod
+    def two_index(
+        kind: ArchitectureKind, index_1: int, index_2: int
+    ) -> "ArchitecturePiece":
+        if kind == ArchitectureKind.PERIOD:
+            return PeriodPiece(index_1, index_2)
+        elif kind == ArchitectureKind.SCENARIO:
+            return ScenarioPiece(index_1, index_2)
+        else:
+            raise ValueError(f"Invalid ArchitectureKind {kind}!")
+
+    def is_prefix(self) -> bool:
+        return (
+            self.kind == ArchitectureKind.REDUCTION
+            or self.kind == ArchitectureKind.PERIOD
+            or self.kind == ArchitectureKind.SCENARIO
+        )
+
+    @abc.abstractmethod
+    def to_str(self) -> str:
+        pass
+
+    @abc.abstractmethod
+    def equal(self, other: "ArchitecturePiece") -> bool:
+        pass
+
+    @abc.abstractmethod
+    def child_piece(self, index_2: int) -> "ArchitecturePiece":
+        pass
+
+    @abc.abstractmethod
+    def contains(self, other: "ArchitecturePiece") -> Union[None, int]:
+        pass
+
+
+@dataclass
+class NonePiece(ArchitecturePiece):
+    kind = ArchitectureKind.NONE
+
+    def to_str(self) -> str:
+        return ""
+
+    def equal(self, other: ArchitecturePiece) -> bool:
+        return isinstance(other, NonePiece)
+
+    def child_piece(self, index_2: int) -> "ArchitecturePiece":
+        raise TypeError(f"Invalid type {type(self)}!")
+
+    def contains(self, other: "ArchitecturePiece") -> Union[None, int]:
+        return None
+
+
+@dataclass
+class ReductionPiece(ArchitecturePiece):
+    kind = ArchitectureKind.REDUCTION
+    index: int
+
+    def to_str(self) -> str:
+        return "r_" + str(self.index)
+
+    def equal(self, other: ArchitecturePiece) -> bool:
+        if isinstance(other, ReductionPiece):
+            return self.index == other.index
+        else:
+            return False
+
+    def child_piece(self, index_2: int) -> "ArchitecturePiece":
+        raise TypeError(f"Invalid type {type(self)}!")
+
+    def contains(self, other: "ArchitecturePiece") -> Union[None, int]:
+        if isinstance(other, ReductionPiece) and self.index == other.index:
+            return 0
+        else:
+            return None
+
+
+@dataclass
+class AggregationPiece(ArchitecturePiece):
+    kind = ArchitectureKind.AGGREGATION
+    index: int
+
+    def to_str(self) -> str:
+        return "a_" + str(self.index)
+
+    def equal(self, other: ArchitecturePiece) -> bool:
+        if isinstance(other, AggregationPiece):
+            return self.index == other.index
+        else:
+            return False
+
+    def child_piece(self, index_2: int) -> ArchitecturePiece:
+        return PeriodPiece(self.index, index_2)
+
+    def contains(self, other: "ArchitecturePiece") -> Union[None, int]:
+        if isinstance(other, PeriodPiece) and self.index == other.index_1:
+            return other.index_2
+        else:
+            return None
+
+
+@dataclass
+class PeriodPiece(ArchitecturePiece):
+    kind = ArchitectureKind.PERIOD
+    index_1: int
+    index_2: int
+
+    def to_str(self) -> str:
+        return "p_" + str(self.index_1) + "_" + str(self.index_2)
+
+    def equal(self, other: ArchitecturePiece) -> bool:
+        if isinstance(other, PeriodPiece):
+            return self.index_1 == other.index_1 and self.index_2 == other.index_2
+        else:
+            return False
+
+    def child_piece(self, index_2: int) -> "ArchitecturePiece":
+        raise TypeError(f"Invalid type {type(self)}!")
+
+    def contains(self, other: "ArchitecturePiece") -> Union[None, int]:
+        if (
+            isinstance(other, PeriodPiece)
+            and self.index_1 == other.index_1
+            and self.index_2 == other.index_2
+        ):
+            return 0
+        else:
+            return None
+
+
+@dataclass
+class StagePiece(ArchitecturePiece):
+    kind = ArchitectureKind.STAGE
+    index: int
+
+    def to_str(self) -> str:
+        return "d_" + str(self.index)
+
+    def equal(self, other: ArchitecturePiece) -> bool:
+        if isinstance(other, StagePiece):
+            return self.index == other.index
+        else:
+            return False
+
+    def child_piece(self, index_2: int) -> ArchitecturePiece:
+        return ScenarioPiece(self.index, index_2)
+
+    def contains(self, other: "ArchitecturePiece") -> Union[None, int]:
+        if isinstance(other, ScenarioPiece) and self.index == other.index_1:
+            return other.index_2
+        else:
+            return None
+
+
+@dataclass
+class ScenarioPiece(ArchitecturePiece):
+    kind = ArchitectureKind.SCENARIO
+    index_1: int
+    index_2: int
+
+    def to_str(self) -> str:
+        return "s_" + str(self.index_1) + "_" + str(self.index_2)
+
+    def equal(self, other: ArchitecturePiece) -> bool:
+        if isinstance(other, ScenarioPiece):
+            return self.index_1 == other.index_1 and self.index_2 == other.index_2
+        else:
+            return False
+
+    def child_piece(self, index_2: int) -> "ArchitecturePiece":
+        raise TypeError(f"Invalid type {type(self)}!")
+
+    def contains(self, other: "ArchitecturePiece") -> Union[None, int]:
+        if (
+            isinstance(other, ScenarioPiece)
+            and self.index_1 == other.index_1
+            and self.index_2 == other.index_2
+        ):
+            return 0
+        else:
+            return None
+
+
+class StringWindow:
+    def __init__(self, string: str, start: int, end: int):
+        self.string = string
+        self.start = start
+        self.end = end
+
+    def is_empty(self) -> bool:
+        return self.start == self.end
+
+    def next_char(self) -> str:
+        if self.is_empty():
+            raise SyntaxError("Expected char next, found nothing!")
+        self.start += 1
+        return self.string[self.start - 1]
+
+    def last_char(self) -> str:
+        if self.is_empty():
+            raise SyntaxError("Expected char last, found nothing!")
+        self.end -= 1
+        return self.string[self.end]
+
+    def next_number(self) -> int:
+        if self.is_empty():
+            raise SyntaxError("Expected number next, found nothing!")
+        length = 0
+        while (
+            self.start + length < self.end
+            and self.string[self.start + length] in "0123456789"
+        ):
+            length += 1
+        self.start += length
+        return int(self.string[self.start - length : self.start])
+
+    def split(self, char) -> List["StringWindow"]:
+        i = self.start
+        windows = []
+        current_split_start = self.start
+        stack = 0
+        ignore = False
+        while i < self.end:
+            if self.string[i] == char and not ignore:
+                windows.append(StringWindow(self.string, current_split_start, i))
+                current_split_start = i + 1
+            elif self.string[i] == "(":
+                ignore = True
+                stack += 1
+            elif self.string[i] == ")":
+                stack -= 1
+                if stack == 0:
+                    ignore = False
+            i += 1
+        windows.append(StringWindow(self.string, current_split_start, i))
+        if stack != 0:
+            raise SyntaxError("Encountered unmatched parentheses during splitting!")
+        return windows
+
+
+class Identifier:
+    def __init__(
+        self,
+        pieces: List[ArchitecturePiece],
+        children: List[List[int]],
+        parent: List[int],
+        root_pointer: int,
+        string: str,
+    ):
+        self.pieces = pieces
+        self.children = children
+        self.parent = parent
+        self.root_pointer = root_pointer
+        self.string = string
+
+    @staticmethod
+    def from_str(str: str) -> "Identifier":
+        pieces = []
+        children = []
+        parent = []
+        root_pointer = 0
+
+        Identifier._construct(
+            StringWindow(str, 0, len(str)), pieces, children, parent, None
+        )
+        return Identifier(pieces, children, parent, root_pointer, str)
+
+    @staticmethod
+    def _construct(
+        identifier_str: StringWindow,
+        pieces: List[ArchitecturePiece],
+        children: List[List[int]],
+        parent: List[Union[None, int]],
+        last_node_index: Union[None, int],
+    ) -> int:
+        current_node_index = len(pieces)
+        if identifier_str.is_empty():
+            pieces.append(NonePiece())
+            children.append([])
+            parent.append(last_node_index)
+            return current_node_index
+
+        kind = ArchitectureKind.from_char(identifier_str.next_char())
+        if not identifier_str.next_char() == "_":
+            raise SyntaxError("Expected _ next, found something else!")
+        index = identifier_str.next_number()
+
+        if kind == ArchitectureKind.REDUCTION:
+            if not identifier_str.next_char() == ".":
+                raise SyntaxError("Expected . next, found something else!")
+
+            pieces.append(ReductionPiece(index))
+            children.append([])
+            parent.append(last_node_index)
+
+            child_index = Identifier._construct(
+                identifier_str, pieces, children, parent, current_node_index
+            )
+            children[current_node_index].append(child_index)
+            return current_node_index
+
+        if kind == ArchitectureKind.AGGREGATION or kind == ArchitectureKind.STAGE:
+            if not identifier_str.next_char() == "(":
+                raise SyntaxError("Expected ( next, found something else!")
+            if not identifier_str.last_char() == ")":
+                raise SyntaxError("Expected ) last, found something else!")
+            sub_identifier_strs = identifier_str.split(",")
+
+            pieces.append(ArchitecturePiece.one_index(kind, index))
+            children.append([])
+            parent.append(last_node_index)
+
+            for sub_identifier_str in sub_identifier_strs:
+                child_index = Identifier._construct(
+                    sub_identifier_str, pieces, children, parent, current_node_index
+                )
+                children[current_node_index].append(child_index)
+            return current_node_index
+
+        if kind == ArchitectureKind.PERIOD or kind == ArchitectureKind.SCENARIO:
+            if not identifier_str.next_char() == "_":
+                raise SyntaxError("Expected _ next, found something else!")
+            index_2 = identifier_str.next_number()
+            if not identifier_str.next_char() == ".":
+                raise SyntaxError("Expected . next, found something else!")
+
+            pieces.append(ArchitecturePiece.two_index(kind, index, index_2))
+            children.append([])
+            parent.append(last_node_index)
+
+            child_index = Identifier._construct(
+                identifier_str, pieces, children, parent, current_node_index
+            )
+            children[current_node_index].append(child_index)
+            return current_node_index
+
+        raise ValueError(f"Invalid ArchitectureKind {kind}!")
+
+    @staticmethod
+    def union(identifiers: List["Identifier"]) -> "Identifier":
+        root_pointers = [identifier.get_root_pointer() for identifier in identifiers]
+        pieces = []
+        children = []
+        parent = []
+        root_pointer = 0
+        string = ""
+
+        string, _ = Identifier._union(
+            identifiers, root_pointers, pieces, children, parent, string, None
+        )
+        return Identifier(pieces, children, parent, root_pointer, string)
+
+    @staticmethod
+    def _union(
+        identifiers: List["Identifier"],
+        pointers: List[int],
+        u_pieces: List[ArchitecturePiece],
+        u_children: List[List[int]],
+        u_parent: List[Union[None, int]],
+        u_string: str,
+        last_node_index: Union[None, int],
+    ) -> Tuple[str, int]:
+        current_node_index = len(u_pieces)
+
+        if all(
+            identifiers[0].pieces[pointers[0]].equal(identifiers[i].pieces[pointers[i]])
+            for i in range(1, len(identifiers))
+        ):
+            piece = identifiers[0].pieces[pointers[0]]
+
+            if isinstance(piece, NonePiece):
+                u_pieces.append(NonePiece())
+                u_children.append([])
+                u_parent.append(last_node_index)
+
+                return u_string, current_node_index
+
+            if isinstance(piece, ReductionPiece):
+                u_pieces.append(piece)
+                u_children.append([])
+                u_parent.append(last_node_index)
+                u_string += piece.to_str() + "."
+
+                u_string, child_index = Identifier._union(
+                    identifiers,
+                    [
+                        idenfier.children[pointer][0]
+                        for idenfier, pointer in zip(identifiers, pointers)
+                    ],
+                    u_pieces,
+                    u_children,
+                    u_parent,
+                    u_string,
+                    current_node_index,
+                )
+                u_children[current_node_index].append(child_index)
+                return u_string, current_node_index
+
+            if isinstance(piece, (AggregationPiece, StagePiece)):
+                u_pieces.append(piece)
+                u_children.append([])
+                u_parent.append(last_node_index)
+                u_string += piece.to_str() + "("
+
+                for i in range(len(identifiers[0].children[pointers[0]])):
+                    if i != 0:
+                        u_string += ","
+                    u_string, child_index = Identifier._union(
+                        identifiers,
+                        [
+                            idenfier.children[pointer][i]
+                            for idenfier, pointer in zip(identifiers, pointers)
+                        ],
+                        u_pieces,
+                        u_children,
+                        u_parent,
+                        u_string,
+                        current_node_index,
+                    )
+                    u_children[current_node_index].append(child_index)
+                u_string += ")"
+                return u_string, current_node_index
+
+            if isinstance(piece, (PeriodPiece, ScenarioPiece)):
+                u_pieces.append(piece)
+                u_children.append([])
+                u_parent.append(last_node_index)
+                u_string += piece.to_str() + "."
+
+                u_string, child_index = Identifier._union(
+                    identifiers,
+                    [
+                        idenfier.children[pointer][piece.index_2]
+                        for idenfier, pointer in zip(identifiers, pointers)
+                    ],
+                    u_pieces,
+                    u_children,
+                    u_parent,
+                    u_string,
+                    current_node_index,
+                )
+                return u_string, current_node_index
+
+        else:
+            u_pieces.append(NonePiece())
+            u_children.append([])
+            u_parent.append(last_node_index)
+
+            return u_string, current_node_index
+
+    def get_root_pointer(self) -> int:
+        return self.root_pointer
+
+    def str(self) -> str:
+        return self.string
+
+    def add_prefix_from_str(self, prefix_str: str):
+        # TODO maybe we can make an optimisation to not reparse self.string, but this works and was easy to implement
+        return Identifier.from_str(prefix_str + self.string)
+
+    @staticmethod
+    def prefix_pieces_to_str(pieces: List[ArchitecturePiece]) -> str:
+        res = ""
+        for piece in pieces:
+            if not piece.is_prefix():
+                raise ValueError("Encountered non prefix ArchitecturePiece!")
+            res += piece.to_str() + "."
+        return res
+
+    @staticmethod
+    def is_bool_safe(a: str, b: str) -> bool:
+        a_pieces = a.split(".")
+        b_pieces = b.split(".")
+        pointer = 0
+        while True:
+            if pointer == len(a_pieces) - 1 and pointer == len(b_pieces) - 1:
+                return True
+            if pointer == len(a_pieces) - 1:
+                return False
+            if pointer == len(b_pieces) - 1:
+                return True
+            if a_pieces[pointer] == b_pieces[pointer]:
+                pointer += 1
+            else:
+                return False
+
+
+class Distribution:
+    def __init__(self, identifier: Identifier):
+        self.identifier = identifier
+        self.pointer = identifier.get_root_pointer()
+        self.pointer_stack = []
+        self.on_track = True
+        self.depth_off_track = 0
+
+    def down(self, piece: ArchitecturePiece):
+        if self.on_track:
+            self_piece = self.identifier.pieces[self.pointer]
+            index = self_piece.contains(piece)
+            if index is not None:
+                self.pointer_stack.append(self.pointer)
+                self.pointer = self.identifier.children[self.pointer][index]
+            else:
+                self.on_track = False
+                self.depth_off_track += 1
+        else:
+            self.depth_off_track += 1
+
+    def check(self) -> bool:
+        return self.on_track and isinstance(
+            self.identifier.pieces[self.pointer], NonePiece
+        )
+
+    def up(self):
+        if self.on_track:
+            self.pointer = self.pointer_stack.pop()
+        else:
+            self.depth_off_track -= 1
+            if self.depth_off_track == 0:
+                self.on_track = True
+
+
+class Architecture:
+    def __init__(self, dynamic: TreeDynamic):
+        self.architecture_dynamics = dict()
+        self.underlying_dynamics = dict()
+        self.all_period_aggregations = dict()
+        self.resamplings = dict()
+
+        self.root = ArchitectureBlock("", dynamic, self)
+
+    def get_dynamic(self, identifier_str: str) -> "ArchitectureDynamic":
+        return self.root.get_dynamic(identifier_str)
+
+    def get_resampling(
+        self, identifier: Identifier, target_identifier: Identifier
+    ) -> "Resampling":
+        if (identifier.str(), target_identifier.str()) not in self.resamplings:
+            self.resamplings[
+                identifier.str(), target_identifier.str()
+            ] = Resampling.new(identifier, target_identifier)
+        return self.resamplings[identifier.str(), target_identifier.str()]
+
+
+class ArchitectureBlock:
+    def __init__(
+        self,
+        prefix_str: str,
+        dynamic: TreeDynamic,
+        architecture: Architecture,
+    ):
+        self.prefix_str = prefix_str
+        self.dynamic = dynamic
+        self.architecture = architecture
+
+        self.dynamic_reductions = []
+        self.period_aggregations = []
+        self.stochastic_stages = []
+
+        self.get_dynamic("")
+        self.architecture.underlying_dynamics[self.prefix_str] = self.dynamic
+
+    # d_steps: the step length after the reduction
+    def add_dynamic_reduction(self, d_steps: List[int]) -> "ArchitectureBlock":
+        dynamic_acc = np.empty(self.dynamic.number_of_steps() + 1, dtype=int)
+        dynamic_acc[0] = 0
+        acc = 0
+        for i, length in enumerate(self.dynamic.step_lengths()):
+            acc += length
+            dynamic_acc[i + 1] = acc
+
+        reduced_acc = np.empty(len(d_steps) + 1, dtype=int)
+        reduced_acc[0] = 0
+        acc = 0
+        for i, length in enumerate(d_steps):
+            acc += length
+            reduced_acc[i + 1] = acc
+
+        reduced_positions = np.empty(len(d_steps) + 1, dtype=int)
+        for i, acc in enumerate(reduced_acc):
+            position = np.searchsorted(dynamic_acc, acc)
+            if dynamic_acc[position] != acc:
+                raise ValueError(
+                    "The steps in the reduced dynamic have to overlap with the steps in the original dynamic!"
+                )
+            reduced_positions[i] = position
+
+        reduction = ArchitectureBlock(
+            self.prefix_str
+            + ReductionPiece(len(self.dynamic_reductions)).to_str()
+            + ".",
+            self.dynamic.sub_dynamic_p(reduced_positions),
+            self.architecture,
+        )
+        self.dynamic_reductions.append(reduction)
+        return reduction
+
+    # periods: the segment lengths of all periods
+    # period_order: the order the periods have to be arranged in order to reconstruct the original dynamic
+    def add_period_aggregation(
+        self, periods: List[List[int]], period_order: List[int]
+    ) -> "PeriodAggregation":
+        # sanity check 1: every period has to have the same length
+        period_length = sum(segment_length for segment_length in periods[0])
+        if any(
+            sum(segment_length for segment_length in period) != period_length
+            for period in periods[1:]
+        ):
+            raise ValueError("Every period has to have the same length!")
+
+        # sanity check 2: the period length has to divide the dynamic length into the number of elements in the period order
+        dynamic_length = np.sum(self.dynamic.step_lengths())
+        if period_length * len(period_order) != dynamic_length:
+            raise ValueError(
+                "The period length has to divide the dynamic length into the number of elements in the period order!"
+            )
+
+        # sanity check 3: the dynamic has to have a time step start every period length
+        dynamic_step_lenghts = self.dynamic.step_lengths()
+        dynamic_steps_lenghts_pointer = 0
+        period_order_pointer = 0
+        acc = 0
+        local_acc_set = set()
+        intervalls_borders = np.empty(
+            int(dynamic_length / period_length) + 1, dtype=int
+        )
+        intervalls_borders[0] = 0
+        counter = 0
+        while period_order_pointer < len(period_order):
+            local_acc = 0
+            while acc < (period_order_pointer + 1) * period_length:
+                acc += dynamic_step_lenghts[dynamic_steps_lenghts_pointer]
+                local_acc += dynamic_step_lenghts[dynamic_steps_lenghts_pointer]
+                local_acc_set.add(local_acc)
+                dynamic_steps_lenghts_pointer += 1
+                counter += 1
+            if acc == (period_order_pointer + 1) * period_length:
+                intervalls_borders[period_order_pointer + 1] = counter
+                period_order_pointer += 1
+            else:
+                raise ValueError(
+                    "The dynamic has to have a time step start every period length!"
+                )
+
+        # sanity check 4: all elements in period order are valid period indices
+        number_of_periods = len(periods)
+        if not all(0 <= item and item < number_of_periods for item in period_order):
+            raise ValueError(
+                "All elements in period order have to be valid period indices!"
+            )
+
+        for period in periods:
+            local_acc = 0
+            for item in period:
+                local_acc += item
+                local_acc_set.add(local_acc)
+
+        local_acc_list = list(local_acc_set)
+        local_acc_list.sort()
+        lookup_map = {item: index + 1 for index, item in enumerate(local_acc_list)}
+
+        root_lenghts = np.empty(len(local_acc_list), dtype=int)
+        last_item = 0
+        for i, item in enumerate(local_acc_list):
+            root_lenghts[i] = item - last_item
+            last_item = item
+
+        root_dynamic = DynamicTree(root_lenghts).root()
+
+        def iter(period):
+            acc = 0
+            yield 0
+            for item in period:
+                acc += item
+                yield lookup_map[acc]
+
+        period_dynamics = np.empty(len(periods), dtype=object)
+        for i, period in enumerate(periods):
+            period_dynamics[i] = root_dynamic.sub_dynamic(
+                np.fromiter(iter(period), dtype=int)
+            )
+
+        period_blocks = []
+        for i, period_dynamic in enumerate(period_dynamics):
+            period_blocks.append(
+                ArchitectureBlock(
+                    self.prefix_str
+                    + PeriodPiece(len(self.period_aggregations), i).to_str()
+                    + ".",
+                    period_dynamic,
+                    self.architecture,
+                )
+            )
+
+        def iter(dynamic_step_lenghts, start, end):
+            acc = 0
+            yield 0
+            for i in range(start, end):
+                acc += dynamic_step_lenghts[i]
+                yield lookup_map[acc]
+
+        intervall_dynamics = np.empty(len(period_order), dtype=object)
+        intervall_borders = np.empty(len(period_order), dtype=object)
+        n_s = np.zeros(len(periods), dtype=int)
+        for i, index in enumerate(period_order):
+            intervall_dynamics[i] = root_dynamic.sub_dynamic(
+                np.fromiter(
+                    iter(
+                        dynamic_step_lenghts,
+                        intervalls_borders[i],
+                        intervalls_borders[i + 1],
+                    ),
+                    dtype=int,
+                )
+            )
+            intervall_borders[i] = (intervalls_borders[i], intervalls_borders[i + 1])
+            n_s[index] += 1
+
+        aggregation = PeriodAggregation(
+            self.dynamic.shape(),
+            period_blocks,
+            period_order,
+            intervall_dynamics,
+            intervall_borders,
+            n_s,
+        )
+        self.period_aggregations.append(aggregation)
+        self.architecture.all_period_aggregations[
+            self.prefix_str, len(self.period_aggregations) - 1
+        ] = aggregation
+
+        return aggregation
+
+    def add_stochastic_stage(self, probabilities: List[float]) -> "StochasticStage":
+        # sanity check: the sum of the probabilities has to be equal to 1.0
+        if abs(sum(probabilities) - 1.0) > float_info.epsilon:
+            raise ValueError("The sum of the probabilities has to be equal to 1.0!")
+
+        stage = StochasticStage(
+            [
+                ArchitectureBlock(
+                    self.prefix_str
+                    + ScenarioPiece(len(self.stochastic_stages), i).to_str()
+                    + ".",
+                    self.dynamic,
+                    self.architecture,
+                )
+                for i in range(len(probabilities))
+            ],
+            probabilities,
+        )
+        self.stochastic_stages.append(stage)
+        return stage
+
+    def reduction(self, reduction_index: int) -> "ArchitectureBlock":
+        return self.dynamic_reductions[reduction_index]
+
+    def aggregation(self, aggregation_index: int) -> "PeriodAggregation":
+        return self.period_aggregations[aggregation_index]
+
+    def period(self, aggregation_index: int, period_index: int) -> "ArchitectureBlock":
+        return self.period_aggregations[aggregation_index].period(period_index)
+
+    def get_dynamic(self, identifier_str: str) -> "ArchitectureDynamic":
+        root_identifier_str = self.prefix_str + identifier_str
+        if root_identifier_str in self.architecture.architecture_dynamics:
+            return self.architecture.architecture_dynamics[root_identifier_str][0]
+
+        identifier = Identifier.from_str(identifier_str)
+
+        dynamic_index = dict()
+        ArchitectureBlock._build_dynamic(
+            identifier, identifier.get_root_pointer(), self, dynamic_index, (0,)
+        )
+
+        root_identifier = identifier.add_prefix_from_str(self.prefix_str)
+
+        if len(dynamic_index) == 1:
+            dynamic = SimpleArchitectureDynamic(
+                dynamic_index[(0,)][2], self.architecture, root_identifier
+            )
+        else:
+            dynamic = ComplexArchitectureDynamic(
+                dynamic_index, self.architecture, root_identifier
+            )
+
+        self.architecture.architecture_dynamics[root_identifier_str] = (
+            dynamic,
+            root_identifier,
+        )
+        return dynamic
+
+    @staticmethod
+    def _build_dynamic(
+        identifier: Identifier,
+        pointer: int,
+        block: "ArchitectureBlock",
+        dynamic_index: Dict,
+        prefix: Tuple[int],
+    ):
+        piece = identifier.pieces[pointer]
+
+        if isinstance(piece, NonePiece):
+            dynamic_index[prefix] = (block.dynamic.shape(), 1, block.dynamic)
+
+        elif isinstance(piece, ReductionPiece):
+            ArchitectureBlock._build_dynamic(
+                identifier,
+                identifier.children[pointer][0],
+                block.dynamic_reductions[piece.index],
+                dynamic_index,
+                prefix,
+            )
+
+        elif isinstance(piece, AggregationPiece):
+            if len(identifier.children[pointer]) != len(
+                block.period_aggregations[piece.index].period_blocks
+            ):
+                raise ValueError(
+                    "Number of periods specified in the identifier string does not match number of periods found!"
+                )
+
+            length = 0
+            n_leafs = 0
+            for child_number, (child_pointer, child_block) in enumerate(
+                zip(
+                    identifier.children[pointer],
+                    block.period_aggregations[piece.index].period_blocks,
+                )
+            ):
+                ArchitectureBlock._build_dynamic(
+                    identifier,
+                    child_pointer,
+                    child_block,
+                    dynamic_index,
+                    prefix + (child_number,),
+                )
+                length += dynamic_index[prefix + (child_number,)][0]
+                n_leafs += dynamic_index[prefix + (child_number,)][1]
+            dynamic_index[prefix] = (length, n_leafs, None)
+
+        elif isinstance(piece, PeriodPiece):
+            ArchitectureBlock._build_dynamic(
+                identifier,
+                identifier.children[pointer][0],
+                block.period_aggregations[piece.index_1].period_blocks[piece.index_2],
+                dynamic_index,
+                prefix,
+            )
+
+        elif isinstance(piece, StagePiece):
+            if len(identifier.children[pointer]) != len(
+                block.stochastic_stages[piece.index].scenario_blocks
+            ):
+                raise ValueError(
+                    "Number of scenarios specified in the identifier string does not match number of scenarios found!"
+                )
+
+            length = 0
+            n_leafs = 0
+            for child_number, (child_pointer, child_block) in enumerate(
+                zip(
+                    identifier.children[pointer],
+                    block.stochastic_stages[piece.index].scenario_blocks,
+                )
+            ):
+                ArchitectureBlock._build_dynamic(
+                    identifier,
+                    child_pointer,
+                    child_block,
+                    dynamic_index,
+                    prefix + (child_number,),
+                )
+                length += dynamic_index[prefix + (child_number,)][0]
+                n_leafs += dynamic_index[prefix + (child_number,)][1]
+            dynamic_index[prefix] = (length, n_leafs, None)
+
+        elif isinstance(piece, ScenarioPiece):
+            ArchitectureBlock._build_dynamic(
+                identifier,
+                identifier.children[pointer][0],
+                block.stochastic_stages[piece.index_1].scenario_blocks[piece.index_2],
+                dynamic_index,
+                prefix,
+            )
+
+
+class PeriodAggregation:
+    def __init__(
+        self,
+        unaggregated_shape: int,
+        period_blocks: List[ArchitectureBlock],
+        period_order: List[int],
+        intervall_dynamics,
+        intervall_borders,
+        n_s,
+    ):  # intervall_dynamics: type hinting a np-array of TreeDynamics, intervall_borders: type hinting a np-array of (int, int), n_s: type hinting a np-array of ints
+        self.unaggregated_shape = unaggregated_shape
+        self.period_blocks = period_blocks
+        self.period_order = period_order
+        self.intervall_dynamics = intervall_dynamics
+        self.intervall_borders = intervall_borders
+        self.n_s = n_s
+        self.n = np.sum(n_s)
+
+    def period(self, period_index: int) -> ArchitectureBlock:
+        return self.period_blocks[period_index]
+
+    def aggregate_up(
+        self, values
+    ):  # values: type hinting a np-array, type hinting a np-array
+        unaggregated_values = np.empty(
+            (values[0].shape[0], self.unaggregated_shape), dtype=values[0].dtype
+        )
+        for period_index, intervall_dynamic, (intervall_start, intervall_end) in zip(
+            self.period_order, self.intervall_dynamics, self.intervall_borders
+        ):
+            period_dynamic = self.period_blocks[period_index].dynamic
+            unaggregated_values[
+                :, intervall_start:intervall_end
+            ] = period_dynamic.dynamic_tree.get_assignment(
+                period_dynamic, intervall_dynamic
+            ).resample(
+                values[period_index]
+            )
+        return unaggregated_values
+
+    def aggregate_variable_up(
+        self, variables
+    ):  # variables: type hinting a np-array, type hinting a np-array
+        unaggregated_variables = np.empty(self.unaggregated_shape, dtype=object)
+        for period_index, intervall_dynamic, (intervall_start, intervall_end) in zip(
+            self.period_order, self.intervall_dynamics, self.intervall_borders
+        ):
+            period_dynamic = self.period_blocks[period_index].dynamic
+            unaggregated_variables[
+                intervall_start:intervall_end
+            ] = period_dynamic.dynamic_tree.get_assignment(
+                period_dynamic, intervall_dynamic
+            ).resample_variable(
+                variables[period_index]
+            )
+        return unaggregated_variables
+
+    def aggregate_variable_down(
+        self,
+        variable,
+        name: str,
+        start_block: "Model_Library.optimization_model.OptimizationBlock",
+        end_blocks: List["Model_Library.optimization_model.OptimizationBlock"],
+    ):  # variable: type hinting a np-array, type hinting a np-array
+        aggregated_variables = np.empty(len(self.period_blocks), dtype=object)
+        for i, end_block in enumerate(end_blocks):
+            aggregated_variable = pyo.Var(end_block.T)
+            end_block.add(name, aggregated_variable)
+            data = np.empty(len(aggregated_variable), dtype=object)
+            for j in range(len(data)):
+                data[j] = aggregated_variable[j]
+            aggregated_variables[i] = data
+        unaggregated_variable = self.aggregate_variable_up(aggregated_variables)
+
+        def rule(m, t):
+            return variable[t] == unaggregated_variable[t]
+
+        start_block.add(name + "_resample", pyo.Constraint(start_block.T, rule=rule))
+        return aggregated_variables
+
+
+class StochasticStage:
+    def __init__(
+        self, scenario_blocks: List[ArchitectureBlock], probabilities: List[float]
+    ):
+        self.scenario_blocks = scenario_blocks
+        self.probabilities = probabilities
+
+    def scenario(self, scenario_index: int) -> ArchitectureBlock:
+        return self.scenario_blocks[scenario_index]
+
+
+class ArchitectureDynamic(Dynamic):
+    # returns the Architecture that this dynamic is from
+    @abc.abstractmethod
+    def get_architecture(self) -> Architecture:
+        pass
+
+    # returns the root identifier of the dynamic
+    @abc.abstractmethod
+    def get_root_identifier(self) -> Identifier:
+        pass
+
+    # returns the start and end of a slice to view into a ndarray
+    @abc.abstractmethod
+    def get_view(self, index: Tuple[int]) -> Tuple[int]:
+        pass
+
+    # returns the index to view into a ndarray holding the first state values
+    @abc.abstractmethod
+    def get_fs_view(self, index: Tuple[int]) -> int:
+        pass
+
+
+# an ArchitectureDynamic representing the dynamic of an architecture block
+class SimpleArchitectureDynamic(ArchitectureDynamic):
+    # dynamic: the dynamic of the block
+    # architecture: the Architecture that this dynamic is from
+    # root_identifier: the root identifier of the dynamic
+    def __init__(
+        self,
+        dynamic: TreeDynamic,
+        architecture: Architecture,
+        root_identifier: Identifier,
+    ):
+        self.dynamic = dynamic
+        self.architecture = architecture
+        self.root_identifier = root_identifier
+        self.views = {(): (0, self.dynamic.shape())}
+        self.fs_views = {(): 0}
+
+    def number_of_steps(self) -> int:
+        return self.dynamic.number_of_steps()
+
+    def shape(self) -> int:
+        return self.dynamic.shape()
+
+    def first_state_shape(self) -> int:
+        return self.dynamic.first_state_shape()
+
+    def pandas_index(self) -> pd.Index:
+        return self.dynamic.pandas_index()
+
+    def first_state_pandas_index(self) -> pd.Index:
+        return self.dynamic.first_state_pandas_index()
+
+    def step_size(self, position: int) -> float:
+        return self.dynamic.step_size(position)
+
+    def step_lengths(self):  # type hinting a np-array of ints
+        return self.dynamic.step_lengths()
+
+    def _all_indices(self):  # type hinting a np-array of ints
+        return self.dynamic._all_indices()
+
+    def get_architecture(self) -> Architecture:
+        return self.architecture
+
+    def get_root_identifier(self) -> Identifier:
+        return self.root_identifier
+
+    def get_view(self, index: Tuple[int]) -> Tuple[int]:
+        return self.views[index]
+
+    def get_fs_view(self, index: Tuple[int]) -> int:
+        return self.fs_views[index]
+
+
+# an ArchitectureDynamic representing the dynamic of a PeriodAggregation or StochasticStage of an architecture block
+class ComplexArchitectureDynamic(ArchitectureDynamic):
+    # index: the index constructed for this dynamic
+    # architecture: the Architecture that this dynamic is from
+    # root_identifier: the root identifier of the dynamic
+    def __init__(
+        self, index: Dict, architecture: Architecture, root_identifier: Identifier
+    ):
+        levels = max(len(k) for k in index)
+
+        self.length = index[(0,)][0]
+        self.index_array = np.zeros((levels, self.length), dtype=int)
+
+        self.n_leafs = index[(0,)][1]
+        self.first_state_index_array = np.zeros((levels, self.n_leafs), dtype=int)
+
+        self.views = dict()
+        self.fs_views = dict()
+
+        keys = list(index.keys())
+        keys.sort()
+
+        previous_key = keys[0]
+        current_level = 0
+        current_offsets = np.zeros(levels, dtype=int)
+        current_first_state_offsets = np.zeros(levels, dtype=int)
+        current_indices = np.zeros(levels, dtype=int)
+
+        self.views[()] = (0, self.length)
+
+        for key in keys[1:]:
+            v_length, v_n_leavs, v_dynamic = index[key]
+
+            self.views[key[1:]] = (
+                current_offsets[current_level],
+                current_offsets[current_level] + v_length,
+            )
+
+            if len(key) > len(previous_key):
+                self.index_array[
+                    current_level,
+                    current_offsets[current_level] : current_offsets[current_level]
+                    + v_length,
+                ] = current_indices[current_level]
+
+                self.first_state_index_array[
+                    current_level,
+                    current_first_state_offsets[
+                        current_level
+                    ] : current_first_state_offsets[current_level]
+                    + v_n_leavs,
+                ] = current_indices[current_level]
+
+                current_offsets[current_level] += v_length
+                current_first_state_offsets[current_level] += v_n_leavs
+
+                current_indices[current_level] += 1
+                current_level += 1
+                current_indices[current_level] = 0
+
+            elif len(key) == len(previous_key):
+                current_level -= 1
+
+                self.index_array[
+                    current_level,
+                    current_offsets[current_level] : current_offsets[current_level]
+                    + v_length,
+                ] = current_indices[current_level]
+
+                self.first_state_index_array[
+                    current_level,
+                    current_first_state_offsets[
+                        current_level
+                    ] : current_first_state_offsets[current_level]
+                    + v_n_leavs,
+                ] = current_indices[current_level]
+
+                current_offsets[current_level] += v_length
+                current_first_state_offsets[current_level] += v_n_leavs
+
+                current_indices[current_level] += 1
+                current_level += 1
+                current_indices[current_level] = 0
+
+            else:
+                current_level = len(key) - 2
+
+                self.index_array[
+                    current_level,
+                    current_offsets[current_level] : current_offsets[current_level]
+                    + v_length,
+                ] = current_indices[current_level]
+
+                self.first_state_index_array[
+                    current_level,
+                    current_first_state_offsets[
+                        current_level
+                    ] : current_first_state_offsets[current_level]
+                    + v_n_leavs,
+                ] = current_indices[current_level]
+
+                current_offsets[current_level] += v_length
+                current_first_state_offsets[current_level] += v_n_leavs
+
+                current_indices[current_level] += 1
+                current_level += 1
+                current_indices[current_level] = 0
+
+            if v_dynamic is not None:
+                self.index_array[
+                    current_level,
+                    current_offsets[current_level] : current_offsets[current_level]
+                    + v_length,
+                ] = range(v_length)
+
+                self.first_state_index_array[
+                    current_level,
+                    current_first_state_offsets[
+                        current_level
+                    ] : current_first_state_offsets[current_level]
+                    + v_n_leavs,
+                ] = -1
+
+                self.fs_views[key[1:]] = current_first_state_offsets[current_level]
+
+                current_offsets[current_level:] += v_length
+                current_first_state_offsets[current_level:] += v_n_leavs
+
+            previous_key = key
+
+        self.architecture = architecture
+        self.root_identifier = root_identifier
+
+    def number_of_steps(self) -> int:
+        raise NotImplementedError
+
+    def shape(self) -> int:
+        return self.length
+
+    def first_state_shape(self) -> int:
+        return self.n_leafs
+
+    def pandas_index(self) -> pd.Index:
+        return pd.MultiIndex.from_arrays(self.index_array)
+
+    def first_state_pandas_index(self) -> pd.Index:
+        return pd.MultiIndex.from_arrays(self.first_state_index_array)
+
+    def step_size(self, position: int) -> float:
+        raise NotImplementedError
+
+    def step_lengths(self):  # type hinting a np-array of ints
+        raise NotImplementedError
+
+    def _all_indices(self):  # type hinting a np-array of ints
+        raise NotImplementedError
+
+    def get_architecture(self) -> Architecture:
+        return self.architecture
+
+    def get_root_identifier(self) -> Identifier:
+        return self.root_identifier
+
+    def get_view(self, index: Tuple[int]) -> Tuple[int]:
+        return self.views[index]
+
+    def get_fs_view(self, index: Tuple[int]) -> int:
+        return self.fs_views[index]
+
+
+class Variable:
+    id: str
+
+
+class Start(Variable):
+    def __init__(self, view_index: Tuple[int], id: str):
+        self.view_index = view_index
+        self.id = id
+
+
+class Container(Variable):
+    def __init__(self, index: int, id: str):
+        self.index = index
+        self.id = id
+
+
+class End(Variable):
+    def __init__(self, view_index: Tuple[int], id: str):
+        self.view_index = view_index
+        self.id = id
+
+
+class ResamplingStep:
+    @abc.abstractmethod
+    def propagate(self, is_bool_safe: Dict, is_safe: Dict, is_fs_safe: Dict):
+        pass
+
+    @abc.abstractmethod
+    def execute(self, arcnitecture: Architecture, v_map: Dict):
+        pass
+
+    @abc.abstractmethod
+    def execute_fs(self, arcnitecture: Architecture, v_map: Dict):
+        pass
+
+    @abc.abstractmethod
+    def execute_variable(
+        self,
+        arcnitecture: Architecture,
+        v_map: Dict,
+        name: str,
+        block_prefix: str,
+        model: "Model_Library.optimization_model.OptimizationModel",
+    ):
+        pass
+
+
+class CopyStep(ResamplingStep):
+    def __init__(self, start_v: Variable, end_v: Variable):
+        self.start_v = start_v
+        self.end_v = end_v
+
+    def propagate(self, is_bool_safe: Dict, is_safe: Dict, is_fs_safe: Dict):
+        is_bool_safe[self.end_v] = is_bool_safe[self.start_v]
+        is_safe[self.end_v] = is_safe[self.start_v]
+        is_fs_safe[self.end_v] = is_fs_safe[self.start_v]
+
+    def execute(self, arcnitecture: Architecture, v_map: Dict):
+        v_map[self.end_v] = v_map[self.start_v]
+
+    def execute_fs(self, arcnitecture: Architecture, v_map: Dict):
+        v_map[self.end_v] = v_map[self.start_v]
+
+    def execute_variable(
+        self,
+        arcnitecture: Architecture,
+        v_map: Dict,
+        name: str,
+        block_prefix: str,
+        model: "Model_Library.optimization_model.OptimizationModel",
+    ):
+        v_map[self.end_v] = v_map[self.start_v]
+
+
+class AssignmentStep(ResamplingStep):
+    def __init__(self, start_v: Variable, end_v: Variable):
+        self.start_v = start_v
+        self.end_v = end_v
+
+    def propagate(self, is_bool_safe: Dict, is_safe: Dict, is_fs_safe: Dict):
+        is_bool_safe[self.end_v] = is_bool_safe[
+            self.start_v
+        ] and Identifier.is_bool_safe(self.start_v.id, self.end_v.id)
+        is_safe[self.end_v] = True
+        is_fs_safe[self.end_v] = is_fs_safe[self.start_v]
+
+    def execute(self, arcnitecture: Architecture, v_map: Dict):
+        start_dynamic = arcnitecture.underlying_dynamics[self.start_v.id]
+        end_dynamic = arcnitecture.underlying_dynamics[self.end_v.id]
+        v_map[self.end_v] = start_dynamic.dynamic_tree.get_assignment(
+            start_dynamic, end_dynamic
+        ).resample(v_map[self.start_v])
+
+    def execute_fs(self, arcnitecture: Architecture, v_map: Dict):
+        v_map[self.end_v] = v_map[self.start_v]
+
+    def execute_variable(
+        self,
+        arcnitecture: Architecture,
+        v_map: Dict,
+        name: str,
+        block_prefix: str,
+        model: "Model_Library.optimization_model.OptimizationModel",
+    ):
+        start_dynamic = arcnitecture.underlying_dynamics[self.start_v.id]
+        end_dynamic = arcnitecture.underlying_dynamics[self.end_v.id]
+        v_map[self.end_v] = start_dynamic.dynamic_tree.get_assignment(
+            start_dynamic, end_dynamic
+        ).resample_variable(v_map[self.start_v])
+
+
+class AggregationUpStep(ResamplingStep):
+    def __init__(self, start_vs: Variable, id: Tuple[str, int]):
+        self.start_vs = start_vs
+        self.id = id
+        self.end_v = None
+
+    def set_end(self, end_v: Variable):
+        self.end_v = end_v
+
+    def propagate(self, is_bool_safe: Dict, is_safe: Dict, is_fs_safe: Dict):
+        is_bool_safe[self.end_v] = False
+        is_safe[self.end_v] = True
+        is_fs_safe[self.end_v] = False
+
+    def execute(self, arcnitecture: Architecture, v_map: Dict):
+        data = np.empty(len(self.start_vs), dtype=object)
+        for i, start_v in enumerate(self.start_vs):
+            data[i] = v_map[start_v]
+        v_map[self.end_v] = arcnitecture.all_period_aggregations[self.id].aggregate_up(
+            data
+        )
+
+    def execute_fs(self, arcnitecture: Architecture, v_map: Dict):
+        pass
+
+    def execute_variable(
+        self,
+        arcnitecture: Architecture,
+        v_map: Dict,
+        name: str,
+        block_prefix: str,
+        model: "Model_Library.optimization_model.OptimizationModel",
+    ):
+        data = np.empty(len(self.start_vs), dtype=object)
+        for i, start_v in enumerate(self.start_vs):
+            data[i] = v_map[start_v]
+        v_map[self.end_v] = arcnitecture.all_period_aggregations[
+            self.id
+        ].aggregate_variable_up(data)
+
+
+class AggregationDownStep(ResamplingStep):
+    def __init__(self, start_v: Variable, id: Tuple[str, int], n_ends: int):
+        self.start_v = start_v
+        self.id = id
+        self.end_vs = np.empty(n_ends, dtype=object)
+        self.i = 0
+
+    def set_end(self, end_v: Variable):
+        self.end_vs[self.i] = end_v
+        self.i += 1
+
+    def propagate(self, is_bool_safe: Dict, is_safe: Dict, is_fs_safe: Dict):
+        for end_v in self.end_vs:
+            is_bool_safe[end_v] = False
+        for end_v in self.end_vs:
+            is_safe[end_v] = False
+        for end_v in self.end_vs:
+            is_fs_safe[end_v] = False
+
+    def execute(self, arcnitecture: Architecture, v_map: Dict):
+        pass
+
+    def execute_fs(self, arcnitecture: Architecture, v_map: Dict):
+        pass
+
+    def execute_variable(
+        self,
+        arcnitecture: Architecture,
+        v_map: Dict,
+        name: str,
+        block_prefix: str,
+        model: "Model_Library.optimization_model.OptimizationModel",
+    ):
+        data = arcnitecture.all_period_aggregations[self.id].aggregate_variable_down(
+            v_map[self.start_v],
+            name,
+            model.all_blocks[block_prefix + self.start_v.id],
+            [model.all_blocks[block_prefix + end_v.id] for end_v in self.end_vs],
+        )
+        for i, end_v in enumerate(self.end_vs):
+            v_map[end_v] = data[i]
+
+
+class ViewConstructor:
+    def __init__(self):
+        self.prefix = []
+        self.in_view = []
+
+    def construct(self) -> Tuple[int]:
+        return tuple(
+            piece.index_2
+            for piece, in_view_item in zip(self.prefix, self.in_view)
+            if in_view_item
+        )
+
+    def prefix_str(self) -> str:
+        return Identifier.prefix_pieces_to_str(self.prefix)
+
+    def add(self, piece: ArchitecturePiece, in_view_item: bool):
+        self.prefix.append(piece)
+        self.in_view.append(in_view_item)
+
+    def remove(self):
+        self.prefix.pop()
+        self.in_view.pop()
+
+
+class ResamplingConstructor:
+    def __init__(self, a: Identifier, b: Identifier):
+        self.a = a
+        self.b = b
+        self.a_view = ViewConstructor()
+        self.b_view = ViewConstructor()
+        self.steps = []
+        self.v_map = dict()
+        self.is_bool_safe = dict()
+        self.is_safe = dict()
+        self.is_fs_safe = dict()
+        self.start_vs = []
+        self.end_vs = []
+        self.n_containers = 0
+        self.open_ends = []
+
+        self.contruct(self.a.get_root_pointer(), self.b.get_root_pointer())
+
+        for step in self.steps:
+            step.propagate(self.is_bool_safe, self.is_safe, self.is_fs_safe)
+
+        self.is_bool_safe = all(self.is_bool_safe[end_v] for end_v in self.end_vs)
+        self.is_safe = all(self.is_safe[end_v] for end_v in self.end_vs)
+        self.is_fs_safe = all(self.is_fs_safe[end_v] for end_v in self.end_vs)
+
+    def contruct(self, a_pointer: int, b_pointer: int):
+        a_piece = self.a.pieces[a_pointer]
+        b_piece = self.b.pieces[b_pointer]
+
+        if a_piece.equal(b_piece):
+            if isinstance(a_piece, NonePiece):
+                start_v = Start(self.a_view.construct(), self.a_view.prefix_str())
+                self.add_start(start_v)
+
+                end_v = End(self.b_view.construct(), self.b_view.prefix_str())
+                self.add_end(end_v)
+
+                self.steps.append(CopyStep(start_v, end_v))
+                return
+
+            elif isinstance(a_piece, (ReductionPiece, PeriodPiece, ScenarioPiece)):
+                self.a_view.add(a_piece, False)
+                self.b_view.add(b_piece, False)
+                self.contruct(
+                    self.a.children[a_pointer][0], self.b.children[b_pointer][0]
+                )
+                self.a_view.remove()
+                self.b_view.remove()
+                return
+
+            else:
+                for i, (a_child, b_child) in enumerate(
+                    zip(self.a.children[a_pointer], self.b.children[b_pointer])
+                ):
+                    self.a_view.add(a_piece.child_piece(i), True)
+                    self.b_view.add(b_piece.child_piece(i), True)
+                    self.contruct(a_child, b_child)
+                    self.a_view.remove()
+                    self.b_view.remove()
+                return
+
+        else:
+            if (
+                isinstance(a_piece, AggregationPiece)
+                and isinstance(b_piece, PeriodPiece)
+                and a_piece.index == b_piece.index_1
+            ) or (
+                isinstance(a_piece, StagePiece)
+                and isinstance(b_piece, ScenarioPiece)
+                and a_piece.index == b_piece.index_1
+            ):
+                self.a_view.add(b_piece, True)
+                self.b_view.add(b_piece, False)
+                self.contruct(
+                    self.a.children[a_pointer][b_piece.index_2],
+                    self.b.children[b_pointer][0],
+                )
+                self.a_view.remove()
+                self.b_view.remove()
+                return
+
+            elif (
+                isinstance(a_piece, PeriodPiece)
+                and isinstance(b_piece, AggregationPiece)
+                and a_piece.index_1 == b_piece.index
+            ) or (
+                isinstance(a_piece, ScenarioPiece)
+                and isinstance(b_piece, StagePiece)
+                and a_piece.index_1 == b_piece.index
+            ):
+                self.a_view.add(a_piece, False)
+                self.b_view.add(a_piece, True)
+                self.contruct(
+                    self.a.children[a_pointer][0],
+                    self.b.children[b_pointer][a_piece.index_2],
+                )
+                self.a_view.remove()
+                self.b_view.remove()
+                return
+
+        start_v, start_id = self.construct_from(a_pointer)
+        self.construct_to(b_pointer, start_v, start_id)
+
+    def construct_from(self, a_pointer: int) -> Tuple[Union[Variable, None], str]:
+        a_piece = self.a.pieces[a_pointer]
+
+        if isinstance(a_piece, NonePiece):
+            start_v = Start(self.a_view.construct(), self.a_view.prefix_str())
+            self.add_start(start_v)
+            return start_v, start_v.id
+
+        elif isinstance(a_piece, ReductionPiece):
+            self.a_view.add(a_piece, False)
+            start_v, start_id = self.construct_from(self.a.children[a_pointer][0])
+            self.a_view.remove()
+            return start_v, start_id
+
+        elif isinstance(a_piece, AggregationPiece):
+            interim_vs = np.empty(len(self.a.children[a_pointer]), dtype=object)
+            for i, a_child in enumerate(self.a.children[a_pointer]):
+                self.a_view.add(a_piece.child_piece(i), True)
+                start_v, start_id = self.construct_from(a_child)
+                interim_id = self.a_view.prefix_str()
+                self.a_view.remove()
+
+                if start_v is None:
+                    start_v = self.add_container(start_id)
+                    if len(self.open_ends) > 0:
+                        self.steps[self.open_ends[-1]].set_end(start_v)
+                        self.open_ends.pop()
+
+                if start_id == interim_id:
+                    interim_vs[i] = start_v
+                else:
+                    interim_v = self.add_container(interim_id)
+                    self.steps.append(AssignmentStep(start_v, interim_v))
+                    interim_vs[i] = interim_v
+
+            self.steps.append(
+                AggregationUpStep(interim_vs, (self.a_view.prefix_str(), a_piece.index))
+            )
+            self.open_ends.append(len(self.steps) - 1)
+            return None, self.a_view.prefix_str()
+
+        else:
+            raise ValueError(f"Invalid ArchitecturePiece {a_piece}!")
+
+    def construct_to(
+        self, b_pointer: int, start_v: Union[Variable, None], start_id: str
+    ):
+        b_piece = self.b.pieces[b_pointer]
+
+        if isinstance(b_piece, NonePiece):
+            end_v = End(self.b_view.construct(), self.b_view.prefix_str())
+            self.add_end(end_v)
+            end_id = end_v.id
+            if start_id == end_id:
+                if start_v is None:
+                    self.steps[self.open_ends[-1]].set_end(end_v)
+                else:
+                    self.steps.append(CopyStep(start_v, end_v))
+            else:
+                if start_v is None:
+                    start_v = self.add_container(start_id)
+                    self.steps[self.open_ends[-1]].set_end(start_v)
+                self.steps.append(AssignmentStep(start_v, end_v))
+            return
+
+        elif isinstance(b_piece, ReductionPiece):
+            self.b_view.add(b_piece, False)
+            self.construct_to(self.b.children[b_pointer][0], start_v, start_id)
+            self.b_view.remove()
+            return
+
+        elif isinstance(b_piece, AggregationPiece):
+            if start_v is None:
+                start_v = self.add_container(start_id)
+                self.steps[self.open_ends[-1]].set_end(start_v)
+
+            interim_id = self.b_view.prefix_str()
+            if start_id == interim_id:
+                interim_v = start_v
+            else:
+                interim_v = self.add_container(interim_id)
+                self.steps.append(AssignmentStep(start_v, interim_v))
+
+            self.steps.append(
+                AggregationDownStep(
+                    interim_v,
+                    (interim_id, b_piece.index),
+                    len(self.b.children[b_pointer]),
+                )
+            )
+            self.open_ends.append(len(self.steps) - 1)
+
+            for i, b_child in enumerate(self.b.children[b_pointer]):
+                self.b_view.add(b_piece.child_piece(i), True)
+                self.construct_to(b_child, None, self.b_view.prefix_str())
+                self.b_view.remove()
+            self.open_ends.pop()
+            return
+
+        else:
+            raise ValueError(f"Invalid ArchitecturePiece {b_piece}!")
+
+    def add_start(self, start_v: Start):
+        self.v_map[start_v] = ()
+        self.is_bool_safe[start_v] = True
+        self.is_safe[start_v] = True
+        self.is_fs_safe[start_v] = True
+        self.start_vs.append(start_v)
+
+    def add_container(self, id: str) -> Container:
+        container = Container(self.n_containers, id)
+        self.n_containers += 1
+        self.v_map[container] = ()
+        self.is_bool_safe[container] = ()
+        self.is_safe[container] = ()
+        self.is_fs_safe[container] = ()
+        return container
+
+    def add_end(self, end_v: End):
+        self.v_map[end_v] = ()
+        self.is_bool_safe[end_v] = ()
+        self.is_safe[end_v] = ()
+        self.is_fs_safe[end_v] = ()
+        self.end_vs.append(end_v)
+
+
+# TODO change order of dynamic, value in execute functions
+class Resampling:
+    def __init__(
+        self,
+        steps: List[ResamplingStep],
+        v_map: Dict,
+        is_bool_safe: Dict,
+        is_safe: Dict,
+        is_fs_safe: Dict,
+        start_vs: List[Start],
+        end_vs: List[End],
+    ):
+        self.steps = steps
+        self.v_map = v_map
+        self.is_bool_safe = is_bool_safe
+        self.is_safe = is_safe
+        self.is_fs_safe = is_fs_safe
+        self.start_vs = start_vs
+        self.end_vs = end_vs
+
+    @staticmethod
+    def new(a: Identifier, b: Identifier) -> "Resampling":
+        constructor = ResamplingConstructor(a, b)
+        return Resampling(
+            constructor.steps,
+            constructor.v_map,
+            constructor.is_bool_safe,
+            constructor.is_safe,
+            constructor.is_fs_safe,
+            constructor.start_vs,
+            constructor.end_vs,
+        )
+
+    def execute(
+        self, values, dynamic: ArchitectureDynamic, target_dynamic: ArchitectureDynamic
+    ):  # values: type hinting a np-array
+        if not self.is_safe:
+            raise ValueError("Resampling cannot resample values!")
+        architecture = dynamic.get_architecture()
+        for start_v in self.start_vs:
+            view = dynamic.get_view(start_v.view_index)
+            self.v_map[start_v] = values[:, view[0] : view[1]]
+        for step in self.steps:
+            step.execute(architecture, self.v_map)
+        target_values = np.empty(
+            (values.shape[0], target_dynamic.shape()), dtype=values.dtype
+        )
+        for end_v in self.end_vs:
+            view = target_dynamic.get_view(end_v.view_index)
+            target_values[:, view[0] : view[1]] = self.v_map[end_v]
+        return target_values
+
+    def execute_result(
+        self,
+        vars_iter,
+        block: "Model_Library.optimization_model.OptimizationBlock",
+        dynamic: ArchitectureDynamic,
+        target_values,
+        target_dynamic: ArchitectureDynamic,
+    ):  # vars_iter: type hinting a (str, int) iterator, target_values: type hinting a np-array
+        if not self.is_safe:
+            raise ValueError("Resampling cannot resample values!")
+        architecture = dynamic.get_architecture()
+        for start_v in self.start_vs:
+            view = dynamic.get_view(start_v.view_index)
+            # we know the number of vars in vars_iter, so we can pass that as an additional argument and not use target_values.shape[0]
+            self.v_map[start_v] = np.empty(
+                (target_values.shape[0], view[1] - view[0]), float
+            )
+            for var_name, index in vars_iter:
+                value_object = block.component_dict[var_name].get_values()
+                self.v_map[start_v][index] = np.fromiter(
+                    (value_object[t] for t in block.T), float
+                )
+        for step in self.steps:
+            step.execute(architecture, self.v_map)
+        for end_v in self.end_vs:
+            view = target_dynamic.get_view(end_v.view_index)
+            target_values[:, view[0] : view[1]] = self.v_map[end_v]
+        return target_values
+
+    def execute_fs_result(
+        self,
+        vars_iter,
+        block: "Model_Library.optimization_model.OptimizationBlock",
+        dynamic: ArchitectureDynamic,
+        target_values,
+        target_dynamic: ArchitectureDynamic,
+    ):  # vars_iter: type hinting a (str, int) iterator, target_values: type hinting a np-array
+        if not self.is_fs_safe:
+            raise ValueError("Resampling cannot resample first state values!")
+        architecture = dynamic.get_architecture()
+        for start_v in self.start_vs:
+            self.v_map[start_v] = np.empty(target_values.shape[0], float)
+            for var_name, index in vars_iter:
+                value_object = block.component_dict[var_name].get_values()
+                self.v_map[start_v][index] = value_object[block.T_prime.first()]
+        for step in self.steps:
+            step.execute_fs(architecture, self.v_map)
+        for end_v in self.end_vs:
+            target_values[:, target_dynamic.get_fs_view(end_v.view_index)] = self.v_map[
+                end_v
+            ]
+        return target_values
+
+    def execute_variable(
+        self,
+        name: str,
+        block_prefix: str,
+        model: "Model_Library.optimization_model.OptimizationModel",
+        dynamic: ArchitectureDynamic,
+        target_dynamic: ArchitectureDynamic,
+    ):
+        architecture = dynamic.get_architecture()
+        for start_v in self.start_vs:
+            view = dynamic.get_view(start_v.view_index)
+            data = np.empty(view[1] - view[0], dtype=object)
+            variable = model.all_blocks[block_prefix + start_v.id].component_dict[name]
+            for j in range(view[1] - view[0]):
+                data[j] = variable[j]
+            self.v_map[start_v] = data
+        for step in self.steps:
+            step.execute_variable(architecture, self.v_map, name, block_prefix, model)
+        for end_v in self.end_vs:
+            model.all_blocks[block_prefix + end_v.id].add(name, self.v_map[end_v])
+
+
+def is_bool_safe(
+    dynamic: ArchitectureDynamic, target_dynamic: ArchitectureDynamic
+) -> bool:
+    if dynamic == target_dynamic:
+        return True
+
+    if dynamic.get_architecture() == target_dynamic.get_architecture():
+        return (
+            dynamic.get_architecture()
+            .get_resampling(
+                dynamic.get_root_identifier(), target_dynamic.get_root_identifier()
+            )
+            .is_bool_safe
+        )
+    else:
+        return NotImplementedError
+
+
+def resample(
+    values, dynamic: ArchitectureDynamic, target_dynamic: ArchitectureDynamic
+):  # values: type hinting a np-array, type hinting a np-array
+    if dynamic == target_dynamic:
+        return values
+
+    if dynamic.get_architecture() == target_dynamic.get_architecture():
+        return (
+            dynamic.get_architecture()
+            .get_resampling(
+                dynamic.get_root_identifier(), target_dynamic.get_root_identifier()
+            )
+            .execute(values, dynamic, target_dynamic)
+        )
+    else:
+        NotImplementedError
+
+
+def resample_result(
+    vars_iter,
+    block: "Model_Library.optimization_model.OptimizationBlock",
+    dynamic: ArchitectureDynamic,
+    target_values,
+    target_dynamic: ArchitectureDynamic,
+):  # vars_iter: type hinting a (str, int) iterator, target_values: type hinting a np-array
+    if dynamic == target_dynamic:
+        for var_name, index in vars_iter:
+            value_object = block.component_dict[var_name].get_values()
+            target_values[index] = np.fromiter(
+                (value_object[t] for t in block.T), float
+            )
+
+    elif dynamic.get_architecture() == target_dynamic.get_architecture():
+        dynamic.get_architecture().get_resampling(
+            dynamic.get_root_identifier(), target_dynamic.get_root_identifier()
+        ).execute_result(vars_iter, block, dynamic, target_values, target_dynamic)
+
+    else:
+        NotImplementedError
+
+
+def resample_first_state_result(
+    vars_iter,
+    block: "Model_Library.optimization_model.OptimizationBlock",
+    dynamic: ArchitectureDynamic,
+    target_values,
+    target_dynamic: ArchitectureDynamic,
+):  # vars_iter: type hinting a (str, int) iterator, target_values: type hinting a np-array
+    if dynamic == target_dynamic:
+        for var_name, index in vars_iter:
+            value_object = block.component_dict[var_name].get_values()
+            target_values[index] = value_object[block.T_prime.first()]
+
+    elif dynamic.get_architecture() == target_dynamic.get_architecture():
+        dynamic.get_architecture().get_resampling(
+            dynamic.get_root_identifier(), target_dynamic.get_root_identifier()
+        ).execute_fs_result(vars_iter, block, dynamic, target_values, target_dynamic)
+
+    else:
+        NotImplementedError
+
+
+def resample_variable(
+    name: str,
+    block_prefix: str,
+    model: "Model_Library.optimization_model.OptimizationModel",
+    dynamic: ArchitectureDynamic,
+    target_dynamic: ArchitectureDynamic,
+):
+    if dynamic == target_dynamic:
+        pass
+
+    elif dynamic.get_architecture() == target_dynamic.get_architecture():
+        (
+            dynamic.get_architecture()
+            .get_resampling(
+                dynamic.get_root_identifier(), target_dynamic.get_root_identifier()
+            )
+            .execute_variable(name, block_prefix, model, dynamic, target_dynamic)
+        )
+
+    else:
+        NotImplementedError
+
+
+class Profile:
+    def __init__(
+        self, values, dynamic: ArchitectureDynamic
+    ):  # values: type hinting np-array
+        if isinstance(values[0], np.number):
+            self.is_bool = False
+        elif isinstance(values[0], np.bool_):
+            self.is_bool = True
+        else:
+            raise ValueError(f"Invalid data type {type(values[0])}!")
+        self.values = values
+        self.dynamic = dynamic
+
+    @staticmethod
+    def from_csv(path: str, dynamic: ArchitectureDynamic) -> Dict[str, "Profile"]:
+        expected_index = dynamic.pandas_index()
+        if expected_index.nlevels == 1:
+            df = pd.read_csv(path)
+        else:
+            df = pd.read_csv(path, index_col=[i for i in range(expected_index.nlevels)])
+        if not expected_index.equals(df.index):
+            raise ValueError(
+                f"The shape of the data in the csv file does not fit the expected shape based on the given dynamic!"
+            )
+        return {
+            str(column_name): Profile(df[column_name].values, dynamic)
+            for column_name in df.columns
+        }
+
+    def resample(self, target_dynamic: ArchitectureDynamic) -> "Profile":
+        if self.dynamic == target_dynamic:
+            return self
+        if self.is_bool and not is_bool_safe(self.dynamic, target_dynamic):
+            raise ValueError(
+                f"A bool profile can only be resampled with a bool safe resampling!"
+            )
+        return Profile(
+            resample(np.expand_dims(self.values, axis=0), self.dynamic, target_dynamic)[
+                0
+            ],
+            target_dynamic,
+        )
+
+
+class StepsVector:
+    def __init__(self, lengths, acc, n_steps):
+        self.lengths = lengths
+        self.acc = acc
+        self.n_steps = n_steps
+
+    @staticmethod
+    def from_lengths(lengths):
+        acc = np.empty(len(lengths) + 1, dtype=int)
+        acc[0] = 0
+        accc = 0
+        for i, legnth in enumerate(lengths):
+            accc += legnth
+            acc[i + 1] = accc
+        n_steps = len(lengths)
+        return StepsVector(lengths, acc, n_steps)
+
+    @staticmethod
+    def from_acc(acc):
+        lengths = acc[1:] - acc[:-1]
+        n_steps = len(lengths)
+        return StepsVector(lengths, acc, n_steps)
+
+    def get_lengths(self):
+        return np.copy(self.lengths[: self.n_steps])
+
+    def get_acc(self):
+        return np.copy(self.acc[: self.n_steps + 1])
+
+    def total_acc(self):
+        return self.acc[self.n_steps]
+
+    def _reserve(self):
+        new_lengths = np.empty(self.n_steps * 2, dtype=int)
+        new_acc = np.empty(self.n_steps * 2 + 1, dtype=int)
+
+        new_lengths[0 : self.n_steps] = self.lengths
+        new_acc[0 : self.n_steps + 1] = self.acc
+
+        self.lengths = new_lengths
+        self.acc = new_acc
+
+    def _insert_acc(self, acc):
+        if self.n_steps == len(self.lengths):
+            self._reserve()
+
+        index = np.searchsorted(self.acc[: self.n_steps + 1], acc) - 1
+
+        first = acc - self.acc[index]
+        second = self.lengths[index] - first
+
+        self.lengths[index + 1 : self.n_steps + 1] = self.lengths[index : self.n_steps]
+        self.acc[index + 1 : self.n_steps + 2] = self.acc[index : self.n_steps + 1]
+
+        self.lengths[index] = first
+        self.lengths[index + 1] = second
+        self.acc[index + 1] = acc
+
+        self.n_steps += 1
+
+    def split(self, amount):
+        multiplier = 1
+
+        while amount > 0:
+            max_length = np.max(self.lengths[: self.n_steps])
+            if max_length < 5:
+                self.lengths *= 2
+                self.acc *= 2
+                multiplier *= 2
+
+            possebilities = np.setdiff1d(
+                np.arange(0, self.acc[self.n_steps] + 1), self.acc[: self.n_steps + 1]
+            )
+
+            if len(possebilities) >= amount:
+                temp = random.sample(range(len(possebilities)), amount)
+                new_accs = possebilities[temp]
+            else:
+                new_accs = possebilities
+
+            amount -= len(new_accs)
+
+            for new_acc in new_accs:
+                self._insert_acc(new_acc)
+
+        return multiplier
+
+    def multiply(self, multiplier):
+        self.lengths *= multiplier
+        self.acc *= multiplier
+
+    def increase_total_acc(self, new_total_acc):
+        factor = new_total_acc / self.acc[self.n_steps]
+        new_lengths_f = self.lengths[: self.n_steps] * factor
+        new_lenghts = np.round(new_lengths_f).astype(int)
+        current_total_acc = sum(new_lenghts)
+
+        while current_total_acc != new_total_acc:
+            error = new_lengths_f - new_lenghts
+
+            if current_total_acc > new_total_acc:
+                possebilities = np.nonzero(error == error.min())[0]
+
+                if len(possebilities) > current_total_acc - new_total_acc:
+                    temp = random.sample(
+                        range(len(possebilities)), current_total_acc - new_total_acc
+                    )
+                    candidates = possebilities[temp]
+                else:
+                    candidates = possebilities
+
+                new_lenghts[candidates] -= 1
+                current_total_acc -= len(candidates)
+
+            else:
+                possebilities = np.where(error == error.max())[0]
+
+                if len(possebilities) > new_total_acc - current_total_acc:
+                    temp = random.sample(
+                        range(len(possebilities)), new_total_acc - current_total_acc
+                    )
+                    candidates = possebilities[temp]
+                else:
+                    candidates = possebilities
+
+                new_lenghts[candidates] += 1
+                current_total_acc += len(candidates)
+
+        new_acc = np.empty(len(self.acc), dtype=int)
+        new_acc[0] = 0
+        acc = 0
+        for i in range(self.n_steps):
+            acc += new_lenghts[i]
+            new_acc[i + 1] = acc
+        acc_shift = np.empty(self.acc[self.n_steps] + 1, dtype=int)
+        acc_shift[self.acc[: self.n_steps + 1]] = new_acc[: self.n_steps + 1]
+        self.lengths[: self.n_steps] = new_lenghts
+        self.acc = new_acc
+        return acc_shift
+
+    def acc_shift(self, acc_shift):
+        self.acc[: self.n_steps + 1] = acc_shift[self.acc[: self.n_steps + 1]]
+        self.lengths[: self.n_steps] = (
+            self.acc[1 : self.n_steps + 1] - self.acc[: self.n_steps]
+        )
+
+    def reinstate_grid(self, grid):
+        for new_acc in range(0, self.acc[self.n_steps] + 1, grid):
+            if new_acc not in self.acc[: self.n_steps + 1]:
+                self._insert_acc(new_acc)
+
+
+def create_random_architecture(n_expansions):
+    tree = [None]
+    leafs = [0]
+
+    for _ in range(n_expansions):
+        leaf_index = random.choice(range(len(leafs)))
+        node_index = leafs.pop(leaf_index)
+        choice = random.random()
+
+        if choice < 0.33:
+            new_index = len(tree)
+            tree[node_index] = (ArchitectureKind.REDUCTION, [new_index])
+            tree.append(None)
+            leafs.append(new_index)
+
+        elif choice < 0.67:
+            n_periods = random.randint(3, 6)
+            new_indices = range(len(tree), len(tree) + n_periods)
+            tree[node_index] = (ArchitectureKind.AGGREGATION, list(new_indices))
+            for new_index in new_indices:
+                tree.append(None)
+                leafs.append(new_index)
+
+        else:
+            n_scenarios = random.randint(3, 6)
+            new_indices = range(len(tree), len(tree) + n_scenarios)
+            tree[node_index] = (ArchitectureKind.STAGE, list(new_indices))
+            for new_index in new_indices:
+                tree.append(None)
+                leafs.append(new_index)
+
+    details = [None for _ in range(len(tree))]
+    steps, _, _ = _construct_bottom_up(tree, [0], details)
+
+    architecture = Architecture(DynamicTree(steps.get_lengths()).root())
+    _construct_top_down(tree, [0], details, architecture.root)
+    return architecture
+
+
+def _construct_bottom_up(tree, path, details):
+    if tree[path[-1]] is None:
+        n_steps = random.randint(2, 5)
+        lenghts = np.empty(n_steps, dtype=int)
+        for i in range(n_steps):
+            lenghts[i] = random.randint(1, 5)
+
+        return StepsVector.from_lengths(lenghts), None, []
+
+    kind, children = tree[path[-1]]
+    if kind == ArchitectureKind.REDUCTION:
+        path.append(children[0])
+        steps, length_divider, grids = _construct_bottom_up(tree, path, details)
+        path.pop()
+
+        indices = steps.get_acc()
+
+        n_new_indices = random.randint(2, 5)
+        multiplier = steps.split(n_new_indices)
+
+        if multiplier > 1:
+            indices *= multiplier
+
+            path.append(children[0])
+            _multiply(tree, path, details, multiplier)
+            path.pop()
+
+        details[path[-1]] = (indices, grids)
+
+        return steps, length_divider, grids
+
+    elif kind == ArchitectureKind.AGGREGATION:
+        periods = np.empty(len(children), dtype=object)
+        length_dividers = np.empty(len(children), dtype=object)
+        max_total_acc = 0
+        for i, child in enumerate(children):
+            path.append(child)
+            period, length_divider, _ = _construct_bottom_up(tree, path, details)
+            path.pop()
+
+            periods[i] = period
+            length_dividers[i] = length_divider
+
+            total_acc = period.total_acc()
+            if total_acc > max_total_acc:
+                max_total_acc = total_acc
+
+        lcm = np.lcm.reduce(length_dividers[length_dividers != None], initial=1)
+        period_length = math.ceil(max_total_acc / lcm) * lcm
+
+        lengths = np.empty(len(children), dtype=object)
+        for i, period in enumerate(periods):
+            if period.total_acc() < period_length:
+                indices_shift = period.increase_total_acc(period_length)
+
+                path.append(children[i])
+                _shift(tree, path, details, indices_shift)
+                _reinstate_grid(tree, path, details, period)
+                path.pop()
+
+            lengths[i] = period.get_lengths()
+
+        details[path[-1]] = (periods, [period_length])
+
+        lengths = np.concatenate(lengths, axis=0)
+
+        return StepsVector.from_lengths(lengths), period_length, [period_length]
+
+    elif kind == ArchitectureKind.STAGE:
+        scenarios = np.empty(len(children), dtype=object)
+        length_dividers = np.empty(len(children), dtype=object)
+        total_grids = []
+        max_total_acc = 0
+        for i, child in enumerate(children):
+            path.append(child)
+            scenario, length_divider, grids = _construct_bottom_up(tree, path, details)
+            path.pop()
+
+            scenarios[i] = scenario
+            length_dividers[i] = length_divider
+            total_grids.extend(grids)
+
+            total_acc = scenario.total_acc()
+            if total_acc > max_total_acc:
+                max_total_acc = total_acc
+
+        lcm = np.lcm.reduce(length_dividers[length_dividers != None], initial=1)
+        scenario_length = math.ceil(max_total_acc / lcm) * lcm
+
+        indices = np.empty(len(children), dtype=object)
+        for i, scenario in enumerate(scenarios):
+            if scenario.total_acc() < scenario_length:
+                indices_shift = scenario.increase_total_acc(scenario_length)
+
+                path.append(children[i])
+                _shift(tree, path, details, indices_shift)
+                _reinstate_grid(tree, path, details, scenario)
+                path.pop()
+
+            indices[i] = scenario.get_acc()
+
+        filtered_total_grids = [item for item in total_grids if item is not None]
+        details[path[-1]] = (scenarios, filtered_total_grids)
+
+        indices_union = reduce(np.union1d, indices)
+
+        return (StepsVector.from_acc(indices_union), lcm, filtered_total_grids)
+
+
+def _multiply(tree, path, details, multiplier):
+    if tree[path[-1]] is None:
+        return
+
+    kind, children = tree[path[-1]]
+    if kind == ArchitectureKind.REDUCTION:
+        path.append(children[0])
+        _multiply(tree, path, details, multiplier)
+        path.pop()
+
+        indices, grid = details[path[-1]]
+        for i in range(len(grid)):
+            grid[i] *= multiplier
+        details[path[-1]] = (indices * multiplier, grid)
+
+        return
+
+    elif kind == ArchitectureKind.AGGREGATION:
+        periods, grid = details[path[-1]]
+        for i, child in enumerate(children):
+            path.append(child)
+            _multiply(tree, path, details, multiplier)
+            path.pop()
+
+            periods[i].multiply(multiplier)
+
+        for i in range(len(grid)):
+            grid[i] *= multiplier
+        details[path[-1]] = (periods, grid)
+
+        return
+
+    elif kind == ArchitectureKind.STAGE:
+        scenarios, grid = details[path[-1]]
+        for i, child in enumerate(children):
+            path.append(child)
+            _multiply(tree, path, details, multiplier)
+            path.pop()
+
+            scenarios[i].multiply(multiplier)
+
+        for i in range(len(grid)):
+            grid[i] *= multiplier
+        details[path[-1]] = (scenarios, grid)
+
+        return
+
+
+def _shift(tree, path, details, shift):
+    if tree[path[-1]] is None:
+        return
+
+    kind, children = tree[path[-1]]
+    if kind == ArchitectureKind.REDUCTION:
+        path.append(children[0])
+        _shift(tree, path, details, shift)
+        path.pop()
+
+        indices, grids = details[path[-1]]
+        indices = shift[indices]
+        details[path[-1]] = (indices, grids)
+
+        return
+
+    elif kind == ArchitectureKind.AGGREGATION:
+        return
+
+    elif kind == ArchitectureKind.STAGE:
+        scenarios, grids = details[path[-1]]
+        for i, child in enumerate(children):
+            path.append(child)
+            _shift(tree, path, details, shift)
+            path.pop()
+
+            scenarios[i].acc_shift(shift)
+
+        return
+
+
+def _reinstate_grid(tree, path, details, steps):
+    if tree[path[-1]] is None:
+        return
+
+    for grid in details[path[-1]][1]:
+        steps.reinstate_grid(grid)
+
+    _reinstate_grid_inner(tree, path, details)
+
+
+def _reinstate_grid_inner(tree, path, details):
+    if tree[path[-1]] is None:
+        return
+
+    kind, children = tree[path[-1]]
+    if kind == ArchitectureKind.REDUCTION:
+        path.append(children[0])
+        _reinstate_grid_inner(tree, path, details)
+        path.pop()
+
+        indices, grids = details[path[-1]]
+        for grid in grids:
+            additional_indices = []
+            for new_index in range(grid, indices[-1], grid):
+                i = np.searchsorted(indices, new_index)
+                if indices[i] != new_index:
+                    additional_indices.append((i, new_index))
+
+            new_indices = np.empty(len(indices) + len(additional_indices), dtype=int)
+            new_indices[: len(indices)] = indices
+            for i, (j, new_index) in enumerate(additional_indices):
+                new_indices[i + j + 1 : i + len(indices) + 1] = new_indices[
+                    i + j : i + len(indices)
+                ]
+                new_indices[i + j] = new_index
+
+            indices = new_indices
+
+        details[path[-1]] = (indices, grids)
+        return
+
+    elif kind == ArchitectureKind.AGGREGATION:
+        return
+
+    elif kind == ArchitectureKind.STAGE:
+        for child in children:
+            path.append(child)
+            _reinstate_grid_inner(tree, path, details)
+            path.pop()
+
+        return
+
+
+def _construct_top_down(tree, path, details, block):
+    if tree[path[-1]] is None:
+        return
+
+    kind, children = tree[path[-1]]
+    if kind == ArchitectureKind.REDUCTION:
+        indices, _ = details[path[-1]]
+
+        reduction = block.add_dynamic_reduction(indices[1:] - indices[:-1])
+
+        path.append(children[0])
+        _construct_top_down(tree, path, details, reduction)
+        path.pop()
+
+        return
+
+    elif kind == ArchitectureKind.AGGREGATION:
+        periods = [period.get_lengths() for period in details[path[-1]][0]]
+
+        dynamic_lenght = sum(block.dynamic.step_lengths())
+        period_lenght = sum(periods[0])
+
+        period_order = []
+        for _ in range(int(dynamic_lenght / period_lenght)):
+            period_order.append(random.randint(0, len(periods) - 1))
+
+        aggregation = block.add_period_aggregation(periods, period_order)
+
+        for i, child in enumerate(children):
+            path.append(child)
+            _construct_top_down(tree, path, details, aggregation.period(i))
+            path.pop()
+
+        return
+
+    elif kind == ArchitectureKind.STAGE:
+        stage = block.add_stochastic_stage(
+            [1.0 / len(children) for _ in range(len(children))]
+        )
+
+        for i, child in enumerate(children):
+            path.append(child)
+            _construct_top_down(tree, path, details, stage.scenario(i))
+            path.pop()
+
+        return
+
+
+def create_resampling_pair(block, a, b):
+    choices = ["split"]
+    if len(block.dynamic_reductions) != 0:
+        choices.append("reduction")
+    if len(block.period_aggregations) != 0:
+        choices.append("agregation")
+        choices.append("period")
+        choices.append("agregation->period")
+        choices.append("period->aggregation")
+    if len(block.stochastic_stages) != 0:
+        choices.append("stage")
+        choices.append("scenario")
+        choices.append("stage->scenario")
+        choices.append("scenario->stage")
+    choice = random.choice(choices)
+    if choice == "reduction":
+        choices = [
+            (ReductionPiece(i), reduction)
+            for i, reduction in enumerate(block.dynamic_reductions)
+        ]
+    elif choice == "aggregation":
+        choices = [
+            (AggregationPiece(i), [period for period in aggregation.period_blocks])
+            for i, aggregation in enumerate(block.period_aggregations)
+        ]
+    elif choice == "period":
+        choices = [
+            (PeriodPiece(i, j), period)
+            for i, aggregation in enumerate(block.period_aggregations)
+            for j, period in enumerate(aggregation.period_blocks)
+        ]
+    elif choice == "agregation->period":
+        choices = [
+            (
+                AggregationPiece(i),
+                PeriodPiece(i, j),
+                period,
+                [period for period in aggregation.period_blocks],
+            )
+            for i, aggregation in enumerate(block.period_aggregations)
+            for j, period in enumerate(aggregation.period_blocks)
+        ]
+    elif choice == "period->aggregation":
+        choices = [
+            (
+                PeriodPiece(i, j),
+                AggregationPiece(i),
+                period,
+                [period for period in aggregation.period_blocks],
+            )
+            for i, aggregation in enumerate(block.period_aggregations)
+            for j, period in enumerate(aggregation.period_blocks)
+        ]
+    elif choice == "stage":
+        choices = [
+            (StagePiece(i), [scenario for scenario in stage.scenario_blocks])
+            for i, stage in enumerate(block.stochastic_stages)
+        ]
+    elif choice == "scenario":
+        choices = [
+            (ScenarioPiece(i, j), scenario)
+            for i, stage in enumerate(block.stochastic_stages)
+            for j, scenario in enumerate(stage.scenario_blocks)
+        ]
+    elif choice == "stage->scenario":
+        choices = [
+            (
+                StagePiece(i),
+                ScenarioPiece(i, j),
+                scenario,
+                [scenario for scenario in stage.scenario_blocks],
+            )
+            for i, stage in enumerate(block.stochastic_stages)
+            for j, scenario in enumerate(stage.scenario_blocks)
+        ]
+    elif choice == "scenario->stage":
+        choices = [
+            (
+                ScenarioPiece(i, j),
+                StagePiece(i),
+                scenario,
+                [scenario for scenario in stage.scenario_blocks],
+            )
+            for i, stage in enumerate(block.stochastic_stages)
+            for j, scenario in enumerate(stage.scenario_blocks)
+        ]
+    else:
+        a = _create_resampling_end(block, a)
+        b = _create_resampling_end(block, b)
+        return a, b
+    choice = random.choice(choices)
+
+    if len(choice) == 2:
+        piece, data = choice
+        if isinstance(piece, (ReductionPiece, PeriodPiece, ScenarioPiece)):
+            a += piece.to_str() + "."
+            b += piece.to_str() + "."
+            a, b = create_resampling_pair(data, a, b)
+        else:
+            a += piece.to_str() + "("
+            b += piece.to_str() + "("
+            for i, block in enumerate(data):
+                if i != 0:
+                    a += ","
+                    b += ","
+                a, b = create_resampling_pair(block, a, b)
+            a += ")"
+            b += ")"
+    elif len(choice) == 4:
+        a_piece, b_piece, data, blocks = choice
+        if isinstance(a_piece, (AggregationPiece, StagePiece)):
+            a += a_piece.to_str() + "("
+            for i in range(b_piece.index_2):
+                block = blocks[i]
+                a = _create_resampling_appendix(block, a)
+                a += ","
+            b += b_piece.to_str() + "."
+            a, b = create_resampling_pair(data, a, b)
+            for i in range(b_piece.index_2 + 1, len(blocks)):
+                block = blocks[i]
+                a += ","
+                a = _create_resampling_appendix(block, a)
+            a += ")"
+        else:
+            a += a_piece.to_str() + "."
+            b += b_piece.to_str() + "("
+            for i in range(a_piece.index_2):
+                block = blocks[i]
+                b = _create_resampling_appendix(block, b)
+                b += ","
+            a, b = create_resampling_pair(data, a, b)
+            for i in range(a_piece.index_2 + 1, len(blocks)):
+                block = blocks[i]
+                b += ","
+                b = _create_resampling_appendix(block, b)
+            b += ")"
+    else:
+        a = _create_resampling_end(block, a)
+        b = _create_resampling_end(block, b)
+    return a, b
+
+
+def _create_resampling_end(block, a):
+    choices = ["end"]
+    if len(block.dynamic_reductions) != 0:
+        choices.append("reduction")
+    if len(block.period_aggregations) != 0:
+        choices.append("aggregation")
+    choice = random.choice(choices)
+
+    if choice == "reduction":
+        choices = [
+            (ReductionPiece(i), reduction)
+            for i, reduction in enumerate(block.dynamic_reductions)
+        ]
+        piece, data = random.choice(choices)
+        a += piece.to_str() + "."
+        a = _create_resampling_end(data, a)
+        return a
+    elif choice == "aggregation":
+        choices = [
+            (AggregationPiece(i), [period for period in aggregation.period_blocks])
+            for i, aggregation in enumerate(block.period_aggregations)
+        ]
+        piece, data = random.choice(choices)
+        a += piece.to_str() + "("
+        for i, block in enumerate(data):
+            if i != 0:
+                a += ","
+            a = _create_resampling_end(block, a)
+        a += ")"
+        return a
+    else:
+        return a
+
+
+def _create_resampling_appendix(block, a):
+    choices = ["end"]
+    if len(block.dynamic_reductions) != 0:
+        choices.append("reduction")
+    if len(block.period_aggregations) != 0:
+        choices.append("agregation")
+        choices.append("period")
+    if len(block.stochastic_stages) != 0:
+        choices.append("stage")
+        choices.append("scenario")
+    choice = random.choice(choices)
+    if choice == "reduction":
+        choices = [
+            (ReductionPiece(i), reduction)
+            for i, reduction in enumerate(block.dynamic_reductions)
+        ]
+    elif choice == "aggregation":
+        choices = [
+            (AggregationPiece(i), [period for period in aggregation.period_blocks])
+            for i, aggregation in enumerate(block.period_aggregations)
+        ]
+    elif choice == "period":
+        choices = [
+            (PeriodPiece(i, j), period)
+            for i, aggregation in enumerate(block.period_aggregations)
+            for j, period in enumerate(aggregation.period_blocks)
+        ]
+    elif choice == "stage":
+        choices = [
+            (StagePiece(i), [scenario for scenario in stage.scenario_blocks])
+            for i, stage in enumerate(block.stochastic_stages)
+        ]
+    elif choice == "scenario":
+        choices = [
+            (ScenarioPiece(i, j), scenario)
+            for i, stage in enumerate(block.stochastic_stages)
+            for j, scenario in enumerate(stage.scenario_blocks)
+        ]
+    else:
+        return a
+    piece, data = random.choice(choices)
+
+    if isinstance(piece, (ReductionPiece, PeriodPiece, ScenarioPiece)):
+        a += piece.to_str() + "."
+        a = _create_resampling_appendix(data, a)
+    else:
+        a += piece.to_str() + "("
+        for i, block in enumerate(data):
+            if i != 0:
+                a += ","
+            a = _create_resampling_appendix(block, a)
+        a += ")"
+    return a
+
+
+def get_identifier_strs(block, identifier_strs, prefix):
+    identifier_strs.append(prefix)
+
+    for i, reduction in enumerate(block.dynamic_reductions):
+        get_identifier_strs(reduction, identifier_strs, prefix + "r_" + str(i) + ".")
+
+    for i, aggregation in enumerate(block.period_aggregations):
+        periods_strs = []
+        for j, period in enumerate(aggregation.period_blocks):
+            period_strs = []
+            get_identifier_strs(period, period_strs, "")
+            periods_strs.append(period_strs)
+
+        for item in product(*periods_strs):
+            identifier_strs.append(
+                prefix
+                + "a_"
+                + str(i)
+                + "("
+                + ",".join(str(item_item) for item_item in item)
+                + ")"
+            )
+
+        for j, period in enumerate(aggregation.period_blocks):
+            get_identifier_strs(
+                period, identifier_strs, prefix + "p_" + str(i) + "_" + str(j) + "."
+            )
+
+    for i, stage in enumerate(block.stochastic_stages):
+        scenarios_strs = []
+        for j, scenario in enumerate(stage.scenario_blocks):
+            scenario_strs = []
+            get_identifier_strs(scenario, scenario_strs, "")
+            scenarios_strs.append(scenario_strs)
+
+        for item in product(*scenarios_strs):
+            identifier_strs.append(
+                prefix
+                + "d_"
+                + str(i)
+                + "("
+                + ",".join(str(item_item) for item_item in item)
+                + ")"
+            )
+
+        for j, scenario in enumerate(stage.scenario_blocks):
+            get_identifier_strs(
+                scenario, identifier_strs, prefix + "s_" + str(i) + "_" + str(j) + "."
+            )
+
+
+def test_single_resampling(arhitecture: Architecture, a_str: str, b_str: str):
+    dynamic = arhitecture.get_dynamic(a_str)
+    target_dynamic = arhitecture.get_dynamic(b_str)
+    resampling = arhitecture.get_resampling(
+        Identifier.from_str(a_str), Identifier.from_str(b_str)
+    )
+    if resampling.is_safe:
+        result = resample(
+            np.expand_dims(np.arange(dynamic.shape(), dtype=float), axis=0),
+            dynamic,
+            target_dynamic,
+        )
+
+
+import random
+
+
+def test_resampling():
+    random.seed(0)
+    architecture = create_random_architecture(10)
+
+    for _ in range(100):
+        a, b = create_resampling_pair(architecture.root, "", "")
+        test_single_resampling(architecture, a, b)
diff --git a/component/__init__.py b/component/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..174deebf206cbf429e29c024b19ec4c09e04d092
--- /dev/null
+++ b/component/__init__.py
@@ -0,0 +1,26 @@
+from .adapter import AssetAdapter, MemberAdapter
+from .cold import AbsorptionChiller, CompressorChiller, ColdConsumption
+from .electricity import (
+    ElectricalBusBar,
+    BiPowerElectronic,
+    PowerElectronic,
+    PVGenerator,
+    ElectricalConsumption,
+    ElectricalGeneration,
+    ElectricalGrid,
+    Battery,
+)
+from .gas import CHP, GasGrid
+from .heat import (
+    ElectricBoiler,
+    GasBoiler,
+    GasHeatPump,
+    HeatExchanger,
+    HeatPump,
+    SolarThermalCollector,
+    HotWaterConsumption,
+    HeatGrid,
+    HeatStorage,
+)
+from .hydrogen import Electrolyzer, FuelCell, HydrogenStorage
+from .space_heat import ElectricRadiator, Radiator, HeatConsumption
diff --git a/component/adapter.py b/component/adapter.py
new file mode 100644
index 0000000000000000000000000000000000000000..e5975c7cc619bc47c6c3bf8148e4d65fe36509e9
--- /dev/null
+++ b/component/adapter.py
@@ -0,0 +1,196 @@
+"""
+MIT License
+
+Copyright (c) 2023 RWTH Aachen University
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+
+from Model_Library.component.core import (
+    AbstractComponent,
+    ComponentCapacity,
+    ComponentKind,
+    ComponentCommodity,
+    ComponentLink,
+    ComponentPart,
+)
+from Model_Library.optimization_model import VariableKind
+
+import pyomo.environ as pyo
+
+
+class AssetLink(ComponentLink):
+    def __init__(self, asset, asset_var_name, start, var_name, end):
+        self.asset = asset
+        self.asset_var_name = asset_var_name
+        self.start = start
+        self.var_name = var_name
+        self.end = end
+
+
+class MemberLink(ComponentLink):
+    def __init__(self, member, member_res_name, local_res_name, end):
+        self.member = member
+        self.member_res_name = member_res_name
+        self.local_res_name = local_res_name
+        self.end = end
+
+
+class AssetAdapter(AbstractComponent):
+    def __init__(self, name, configuration, assets):
+        self.asset = assets[configuration["asset"]]
+        self.grid = self.asset._components[configuration["grid"]]
+
+        super().__init__(
+            name=name,
+            kind=ComponentKind.ASSET_ADAPTER,
+            commodity_1=self.grid.commodity,
+            commodity_2=None,
+            commodity_3=self.grid.commodity,
+            commodity_4=None,
+            configuration=configuration,
+            capacity=ComponentCapacity.NONE,
+        )
+
+        self.commodity = self.grid.commodity
+
+    def commodities(self):
+        return [self.commodity]
+
+    def iter_component_parts(self):
+        yield ComponentPart.NONE_STATE
+
+    def iter_links(self):
+        yield AssetLink(
+            self.asset,
+            self.grid.name + ".output_1",
+            (self.grid.name, ComponentPart.NONE_STATE),
+            self.name + ".into_asset",
+            (self.name, ComponentPart.NONE_STATE),
+        )
+        yield AssetLink(
+            self.asset,
+            self.grid.name + ".input_1",
+            (self.grid.name, ComponentPart.NONE_STATE),
+            self.name + ".from_asset",
+            (self.name, ComponentPart.NONE_STATE),
+        )
+
+    def non_state_base_variable_names(self):
+        return [
+            (self.name + ".input_1", VariableKind.INDEXED),
+            (self.name + ".output_1", VariableKind.INDEXED),
+        ]
+
+    def add_non_state_variables(self, o_block):
+        input = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + ".input_1", input)
+
+        output = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + ".output_1", output)
+
+    def add_non_state_model(self, d_block, o_block):
+        input = o_block.component_dict[self.name + ".input_1"]
+
+        output = o_block.component_dict[self.name + ".output_1"]
+
+        into_asset = o_block.component_dict[self.name + ".into_asset"]
+
+        from_asset = o_block.component_dict[self.name + ".from_asset"]
+
+        def rule(m, t):
+            return input[t] == into_asset[t]
+
+        o_block.add(self.name + ".input_cons", pyo.Constraint(o_block.T, rule=rule))
+
+        def rule(m, t):
+            return output[t] == from_asset[t]
+
+        o_block.add(self.name + ".output_cons", pyo.Constraint(o_block.T, rule=rule))
+
+
+class MemberAdapter(AbstractComponent):
+    def __init__(self, name, configuration, members):
+        self.member = members[configuration["member"]]
+        self.grid = self.member._components[configuration["grid"]]
+
+        super().__init__(
+            name=name,
+            kind=ComponentKind.MEMBER_ADAPTER,
+            commodity_1=self.grid.commodity,
+            commodity_2=None,
+            commodity_3=self.grid.commodity,
+            commodity_4=None,
+            configuration=configuration,
+            capacity=ComponentCapacity.NONE,
+        )
+
+        self.commodity = self.grid.commodity
+
+    def commodities(self):
+        return [self.commodity]
+
+    def iter_component_parts(self):
+        yield ComponentPart.NONE_STATE
+
+    def iter_links(self):
+        yield MemberLink(
+            self.member,
+            self.grid.name + ".output_1",
+            "into_member",
+            (self.name, ComponentPart.NONE_STATE),
+        )
+        yield MemberLink(
+            self.member,
+            self.grid.name + ".input_1",
+            "from_member",
+            (self.name, ComponentPart.NONE_STATE),
+        )
+
+    def non_state_base_variable_names(self):
+        return [
+            (self.name + ".input_1", VariableKind.INDEXED),
+            (self.name + ".output_1", VariableKind.INDEXED),
+        ]
+
+    def add_non_state_variables(self, o_block):
+        input = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + ".input_1", input)
+
+        output = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + ".output_1", output)
+
+    def add_non_state_model(self, d_block, o_block):
+        input = o_block.component_dict[self.name + ".input_1"]
+
+        output = o_block.component_dict[self.name + ".output_1"]
+
+        into_member = self.into_member.resample(o_block.dynamic).values
+
+        from_member = self.from_member.resample(o_block.dynamic).values
+
+        def rule(m, t):
+            return input[t] == into_member[t]
+
+        o_block.add(self.name + ".input_cons", pyo.Constraint(o_block.T, rule=rule))
+
+        def rule(m, t):
+            return output[t] == from_member[t]
+
+        o_block.add(self.name + ".output_cons", pyo.Constraint(o_block.T, rule=rule))
diff --git a/component/cold.py b/component/cold.py
new file mode 100644
index 0000000000000000000000000000000000000000..3915f068e9136b1dd808133a5046513deaaa8f27
--- /dev/null
+++ b/component/cold.py
@@ -0,0 +1,68 @@
+"""
+MIT License
+
+Copyright (c) 2023 RWTH Aachen University
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+
+from Model_Library.component.core import (
+    BaseComponent,
+    BaseConsumption,
+    BaseGrid,
+    ComponentCommodity,
+)
+
+
+class AbsorptionChiller(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=ComponentCommodity.HEAT,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.COLD,
+            commodity_4=None,
+            configuration=configuration,
+        )
+
+
+class CompressorChiller(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=ComponentCommodity.ELECTRICITY,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.COLD,
+            commodity_4=None,
+            configuration=configuration,
+        )
+
+
+class ColdConsumption(BaseConsumption):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name, commodity=ComponentCommodity.COLD, configuration=configuration
+        )
+
+
+class ColdGrid(BaseGrid):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name, commodity=ComponentCommodity.COLD, configuration=configuration
+        )
diff --git a/component/core.py b/component/core.py
new file mode 100644
index 0000000000000000000000000000000000000000..49b3c26827d6c3026d99f58b7a0209496e183923
--- /dev/null
+++ b/component/core.py
@@ -0,0 +1,1029 @@
+"""
+MIT License
+
+Copyright (c) 2023 RWTH Aachen University
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+
+from Model_Library.utility import design_annuity, operational_annuity
+from Model_Library.optimization_model import VariableKind
+
+import abc
+from enum import Enum
+import json
+import pyomo.environ as pyo
+from typing import List
+
+
+class ComponentKind(Enum):
+    ALL = 1
+    ASSET_ADAPTER = 2
+    BASE = 3
+    BUSBAR = 4
+    CONSUMPTION = 5
+    GENERATION = 6
+    GRID = 7
+    MEMBER_ADAPTER = 8
+    STORAGE = 9
+
+
+class ComponentCommodity(Enum):
+    ALL = 1
+    ELECTRICITY = 2
+    HEAT = 3
+    GAS = 4
+    COLD = 5
+    HYDROGEN = 6
+    SPACE_HEAT = 7
+
+
+class ComponentPart(Enum):
+    ALL = 1
+    DESIGN = 2
+    STATE = 3
+    NONE_STATE = 4
+
+
+class ComponentLink(abc.ABC):
+    pass
+
+
+class VariableLink(ComponentLink):
+    def __init__(self, var_name, start, end):
+        self.var_name = var_name
+        self.start = start
+        self.end = end
+
+
+class ComponentPartPattern:
+    def __init__(
+        self,
+        kind=ComponentKind.ALL,
+        type="all",
+        commodity=ComponentCommodity.ALL,
+        name="all",
+        part=ComponentPart.ALL,
+    ):
+        if isinstance(kind, List) and ComponentKind.ALL in kind:
+            self.kind = ComponentKind.ALL
+        else:
+            self.kind = kind
+
+        if isinstance(type, List) and "all" in type:
+            self.type = "all"
+        else:
+            self.type = type
+
+        if isinstance(commodity, List) and ComponentCommodity.ALL in commodity:
+            self.commodity = ComponentCommodity.ALL
+        else:
+            self.commodity = commodity
+
+        if isinstance(name, List) and "all" in name:
+            self.name = "all"
+        else:
+            self.name = name
+
+        if isinstance(part, List) and ComponentPart.ALL in part:
+            self.part = ComponentPart.ALL
+        else:
+            self.part = part
+
+    def match(self, component, part):
+        if self.kind != ComponentKind.ALL:
+            if isinstance(self.kind, List):
+                if component.kind not in self.kind:
+                    return False
+            else:
+                if component.kind != self.kind:
+                    return False
+
+        if self.type != "all":
+            if isinstance(self.type, List):
+                if component.__class__.__name__ not in self.type:
+                    return False
+            else:
+                if component.__class__.__name__ != self.type:
+                    return False
+
+        if self.commodity != ComponentCommodity.ALL:
+            if isinstance(self.commodity, List):
+                if all(
+                    commodity not in self.commodity
+                    for commodity in component.commodities()
+                ):
+                    return False
+            else:
+                if all(
+                    commodity != self.commodity for commodity in component.commodities()
+                ):
+                    return False
+
+        if self.name != "all":
+            if isinstance(self.name, List):
+                if component.name not in self.name:
+                    return False
+            else:
+                if component.name != self.name:
+                    return False
+
+        if self.part != ComponentPart.ALL:
+            if isinstance(self.part, List):
+                if part not in self.part:
+                    return False
+            else:
+                if part != self.part:
+                    return False
+
+        return True
+
+
+class ComponentCapacity(Enum):
+    NONE = 1
+    OPTIONAL = 2
+    REQUIRED = 3
+
+
+class AbstractComponent:
+    def __init__(
+        self,
+        name,
+        kind,
+        commodity_1,
+        commodity_2,
+        commodity_3,
+        commodity_4,
+        configuration,
+        capacity,
+    ):
+        self.name = name
+        self.kind = kind
+        self.input_commodity_1 = commodity_1
+        self.input_commodity_2 = commodity_2
+        self.output_commodity_1 = commodity_3
+        self.output_commodity_2 = commodity_4
+
+        if "model" in configuration:
+            if isinstance(configuration["model"], str):
+                with open(configuration["model"]) as f:
+                    configuration["model"] = json.load(f)
+        self._load_model(configuration, configuration.get("model", dict()))
+
+        if capacity == ComponentCapacity.NONE:
+            self.capacity = None
+        elif capacity == ComponentCapacity.OPTIONAL:
+            if "capacity" in configuration:
+                self.capacity = configuration["capacity"]
+            elif "min_capacity" in configuration or "max_capacity" in configuration:
+                min_capacity = configuration.get("min_capacity", 0)
+                max_capacity = configuration.get("max_capacity", None)
+                if min_capacity == max_capacity:
+                    self.capacity = min_capacity
+                else:
+                    self.capacity = (min_capacity, max_capacity)
+            else:
+                self.capacity = None
+        elif capacity == ComponentCapacity.REQUIRED:
+            if "capacity" in configuration:
+                self.capacity = configuration["capacity"]
+            else:
+                min_capacity = configuration.get("min_capacity", 0)
+                max_capacity = configuration.get("max_capacity", None)
+                if min_capacity == max_capacity:
+                    self.capacity = min_capacity
+                else:
+                    self.capacity = (min_capacity, max_capacity)
+
+        if self.capacity is not None:
+            self.specific_capital_expenditure = configuration["model"].get(
+                "specific_capital_expenditure", 0
+            )
+            self.service_life = configuration["model"].get("service_life", None)
+            self.factor_effort_maintenance_operation = configuration["model"].get(
+                "factor_effort_maintenance_operation", 0
+            )
+
+        if configuration is not None and "additional_model_logic" in configuration:
+            self.additional_model_logic = configuration["additional_model_logic"]
+        else:
+            self.additional_model_logic = dict()
+
+    def _load_model(self, configuration, model):
+        pass
+
+    def commodities_ordered(self):
+        return (
+            self.input_commodity_1,
+            self.input_commodity_2,
+            self.output_commodity_1,
+            self.output_commodity_2,
+        )
+
+    def commodities(self):
+        return []
+
+    def iter_component_parts(self):
+        if False:
+            yield
+
+    def iter_links(self):
+        if False:
+            yield
+
+    # TODO make these functions into generator functions
+    def design_base_variable_names(self):
+        if self.capacity is None:
+            return []
+        else:
+            return [(self.name + ".capacity", VariableKind.UNINDEXED)]
+
+    # TODO make these functions into generator functions
+    def non_state_base_variable_names(self):
+        return []
+
+    # TODO make these functions into generator functions
+    def state_base_variable_names(self):
+        return []
+
+    def add_design_variables(self, d_block):
+        if self.capacity is not None:
+            if isinstance(self.capacity, tuple):
+                d_block.add(self.name + ".capacity", pyo.Var(bounds=self.capacity))
+            elif isinstance(self.capacity, (int, float)):
+                d_block.add(
+                    self.name + ".capacity", pyo.Param(initialize=self.capacity)
+                )
+            else:
+                raise ValueError(f"Invalid capacity {self.capacity}!")
+
+    def add_non_state_variables(self, o_block):
+        pass
+
+    def add_state_variables(self, s_block):
+        pass
+
+    def add_non_state_model(self, d_block, o_block):
+        pass
+
+    def add_state_model(self, d_block, s_block):
+        pass
+
+    def add_additional_model_logic(self, d_block, o_block):
+        for logic_name, logic in self.additional_model_logic.items():
+            if logic["type"] == "RampPenalty":
+                var_name = logic["variable"]
+                var = o_block.component_dict[self.name + "." + var_name]
+
+                var_ub_encoded = logic["variable_ub"]
+                if isinstance(var_ub_encoded, (int, float)):
+                    var_ub = var_ub_encoded
+                elif var_ub_encoded == "capacity":
+                    if self.capacity is not None:
+                        if isinstance(self.capacity, tuple):
+                            var_ub = d_block.component_dict[self.name + ".capacity"].ub
+                        elif isinstance(self.capacity, (int, float)):
+                            var_ub = self.capacity
+                        else:
+                            raise ValueError(f"Invalid capacity {self.capacity}!")
+                    else:
+                        raise ValueError(f"Invalid variable_ub {var_ub_encoded}")
+                else:
+                    raise ValueError(f"Invalid variable_ub {var_ub_encoded}")
+
+                z = pyo.Var(o_block.T, domain=pyo.Binary)
+                o_block.add(self.name + "." + logic_name + "_z", z)
+                change = pyo.Var(o_block.T, bounds=(0, None))
+                o_block.add(self.name + "." + logic_name + "_change", change)
+
+                def rule(m, t):
+                    if t == o_block.T.first():
+                        return pyo.Constraint.Skip
+                    else:
+                        return 0 <= change[t] - (
+                            var[o_block.T[o_block.T.ord(t) - 1]] - var[t]
+                        )
+
+                o_block.add(
+                    self.name + "." + logic_name + "_cons_1",
+                    pyo.Constraint(o_block.T, rule=rule),
+                )
+
+                def rule(m, t):
+                    if t == o_block.T.first():
+                        return pyo.Constraint.Skip
+                    else:
+                        return (
+                            change[t] - (var[o_block.T[o_block.T.ord(t) - 1]] - var[t])
+                            <= 2.0 * var_ub * z[t]
+                        )
+
+                o_block.add(
+                    self.name + "." + logic_name + "_cons_2",
+                    pyo.Constraint(o_block.T, rule=rule),
+                )
+
+                def rule(m, t):
+                    if t == o_block.T.first():
+                        return pyo.Constraint.Skip
+                    else:
+                        return 0 <= change[t] - (
+                            var[t] - var[o_block.T[o_block.T.ord(t) - 1]]
+                        )
+
+                o_block.add(
+                    self.name + "." + logic_name + "_cons_3",
+                    pyo.Constraint(o_block.T, rule=rule),
+                )
+
+                def rule(m, t):
+                    if t == o_block.T.first():
+                        return pyo.Constraint.Skip
+                    else:
+                        return change[t] - (
+                            var[t] - var[o_block.T[o_block.T.ord(t) - 1]]
+                        ) <= 2.0 * var_ub * (1.0 - z[t])
+
+                o_block.add(
+                    self.name + "." + logic_name + "_cons_4",
+                    pyo.Constraint(o_block.T, rule=rule),
+                )
+
+                objective_term = pyo.quicksum(
+                    change[t] * logic["objective_factor"] for t in o_block.T
+                )  # This objective term should be scaled by the time step, because the change itself is bigger, when the time step is longer, so this implicetly scales the objective term
+
+                o_block.add_general_scaled_objective(objective_term)
+
+            if logic["type"] == "additional_operational_objective":
+                factor_encoded, var_name = logic["value"]
+                if isinstance(factor_encoded, (int, float)):
+                    factor = factor_encoded
+                else:
+                    raise ValueError(f"Invalid factor {factor_encoded}!")
+                var = o_block.component_dict[self.name + "." + var_name]
+
+                objective_term = pyo.quicksum(
+                    factor * var[t] * o_block.step_size(t) for t in o_block.T
+                )
+
+                o_block.add_general_scaled_objective(objective_term)
+
+    def design_annuity(self, model, T, r, q):
+        if self.capacity is not None:
+            A_0 = (
+                model.component_dict[self.name + ".capacity"]
+                * self.specific_capital_expenditure
+            )
+        else:
+            A_0 = 0.0
+        if isinstance(A_0, float) and A_0 == 0.0:
+            self_design_annuity = 0.0
+        else:
+            if self.service_life is not None:
+                T_N = self.service_life
+            else:
+                T_N = T
+            f_b = self.factor_effort_maintenance_operation
+            self_design_annuity = design_annuity(A_0, T, T_N, r, q, f_b)
+
+        return self_design_annuity
+
+    # TODO rename model to o_block
+    def _operational_exprenditure(self, model):
+        return 0.0
+
+    def operational_annuity(
+        self, model, T, r, q
+    ):  # normally is w, to scale the operational expenditure A_V to the operational expenditure in the first year A_V1, but that is done for all operational annuities when right before the objective is added to the model
+        A_V = self._operational_exprenditure(model)
+        if isinstance(A_V, float) and A_V == 0.0:
+            self_operational_annuity = 0.0
+        else:
+            self_operational_annuity = operational_annuity(T, r, q, A_V)
+
+        return self_operational_annuity
+
+    def peak_power_cost(self, model):
+        return 0.0
+
+    def co2_emissions(self, model):
+        # heat https://www.uni-goettingen.de/de/document/download/e778b3727c64ed6f962e4c1cea80fa2f.pdf/CO2%20Emissionen_2016.pdf
+        # https://www.umweltbundesamt.de/presse/pressemitteilungen/bilanz-2019-co2-emissionen-pro-kilowattstunde-strom
+        return 0.0
+
+
+class BaseBusBar(AbstractComponent):
+    def __init__(self, name, commodity):
+        super().__init__(
+            name=name,
+            kind=ComponentKind.BUSBAR,
+            commodity_1=commodity,
+            commodity_2=None,
+            commodity_3=commodity,
+            commodity_4=None,
+            configuration=dict(),
+            capacity=ComponentCapacity.NONE,
+        )
+
+        self.commodity = commodity
+
+    def commodities(self):
+        return [self.commodity]
+
+    def iter_component_parts(self):
+        yield ComponentPart.NONE_STATE
+
+    def non_state_base_variable_names(self):
+        return [
+            (self.name + ".input_1", VariableKind.INDEXED),
+            (self.name + ".output_1", VariableKind.INDEXED),
+        ]
+
+    def add_non_state_variables(self, o_block):
+        input = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + ".input_1", input)
+
+        output = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + ".output_1", output)
+
+    def add_non_state_model(self, d_block, o_block):
+        input = o_block.component_dict[self.name + ".input_1"]
+        output = o_block.component_dict[self.name + ".output_1"]
+
+        def rule(m, t):
+            return output[t] == input[t]
+
+        o_block.add(self.name + ".sum", pyo.Constraint(o_block.T, rule=rule))
+
+
+class BaseComponent(AbstractComponent):
+    def __init__(
+        self, name, commodity_1, commodity_2, commodity_3, commodity_4, configuration
+    ):
+        super().__init__(
+            name=name,
+            kind=ComponentKind.BASE,
+            commodity_1=commodity_1,
+            commodity_2=commodity_2,
+            commodity_3=commodity_3,
+            commodity_4=commodity_4,
+            configuration=configuration,
+            capacity=ComponentCapacity.REQUIRED,
+        )
+
+        self._setup_conversions(configuration)
+
+        self.commodities_list = []
+        for commodity in [commodity_1, commodity_2, commodity_3, commodity_4]:
+            if commodity is not None and commodity not in self.commodities_list:
+                self.commodities_list.append(commodity)
+
+    def _setup_conversions(self, configuration):
+        self._load_conversions(configuration)
+
+        self.indep_var_1 = configuration["indep_var_1"]
+        self.indep_var_2 = configuration["indep_var_2"]
+        self.conversion_1 = configuration["conversion_1"]
+        self.conversion_2 = configuration["conversion_2"]
+        dep_var_1 = self.conversion_1[1]
+        dep_var_2 = self.conversion_2[1] if self.conversion_2 is not None else None
+
+        self.operational_variables = [
+            (self.name + "." + var, VariableKind.INDEXED)
+            for var in ["input_1", "input_2", "output_1", "output_2"]
+            if var in [self.indep_var_1, self.indep_var_2, dep_var_1, dep_var_2]
+        ]
+
+    def _load_conversions(self, configuration):
+        efficiency = configuration["efficiency"]
+
+        configuration["indep_var_1"] = "output_1"
+        configuration["indep_var_2"] = None
+        configuration["conversion_1"] = ("output_1", "input_1", 1.0 / efficiency)
+        configuration["conversion_2"] = None
+
+    def _load_model(self, configuration, model):
+        configuration["efficiency"] = model["efficiency"]
+
+    def commodities(self):
+        return self.commodities_list
+
+    def iter_component_parts(self):
+        yield ComponentPart.DESIGN
+        yield ComponentPart.NONE_STATE
+
+    def non_state_base_variable_names(self):
+        return self.operational_variables
+
+    def add_non_state_variables(self, o_block):
+        dep_var_1 = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + "." + self.indep_var_1, dep_var_1)
+
+        if self.indep_var_2 is not None:
+            dep_var_2 = pyo.Var(o_block.T, bounds=(0, None))
+            o_block.add(self.name + "." + self.indep_var_2, dep_var_2)
+
+    def add_non_state_model(self, d_block, o_block):
+        capacity = d_block.component_dict[self.name + ".capacity"]
+
+        dep_var_1 = o_block.component_dict[self.name + "." + self.indep_var_1]
+
+        def rule(m, t):
+            return dep_var_1[t] <= capacity
+
+        o_block.add(
+            self.name + "." + self.indep_var_1 + "_capacity_cons",
+            pyo.Constraint(o_block.T, rule=rule),
+        )
+
+        if self.indep_var_2 is not None:
+            dep_var_2 = o_block.component_dict[self.name + "." + self.indep_var_2]
+
+            def rule(m, t):
+                return dep_var_2[t] <= capacity
+
+            o_block.add(
+                self.name + "." + self.indep_var_2 + "_capacity_cons",
+                pyo.Constraint(o_block.T, rule=rule),
+            )
+
+            z_bi_flow = pyo.Var(o_block.T, domain=pyo.Binary)
+            o_block.add(self.name + ".z_bi_flow", z_bi_flow)
+
+            if isinstance(self.capacity, (int, float)):
+
+                def rule(m, t):
+                    return dep_var_1[t] <= z_bi_flow[t] * capacity
+
+                o_block.add(
+                    self.name + ".bi_flow_cons_1", pyo.Constraint(o_block.T, rule=rule)
+                )
+
+                def rule(m, t):
+                    return dep_var_2[t] <= (1.0 - z_bi_flow[t]) * capacity
+
+                o_block.add(
+                    self.name + ".bi_flow_cons_2", pyo.Constraint(o_block.T, rule=rule)
+                )
+            else:
+
+                def rule(m, t):
+                    return dep_var_1[t] <= z_bi_flow[t] * capacity.ub
+
+                o_block.add(
+                    self.name + ".bi_flow_cons_1", pyo.Constraint(o_block.T, rule=rule)
+                )
+
+                def rule(m, t):
+                    return dep_var_2[t] <= (1.0 - z_bi_flow[t]) * capacity.ub
+
+                o_block.add(
+                    self.name + ".bi_flow_cons_2", pyo.Constraint(o_block.T, rule=rule)
+                )
+
+        self._handle_conversion(d_block, o_block, self.conversion_1)
+
+        if self.conversion_2 is not None:
+            self._handle_conversion(d_block, o_block, self.conversion_2)
+
+    def _handle_conversion(self, d_block, o_block, conversion):
+        indep_var = o_block.component_dict[self.name + "." + conversion[0]]
+
+        dep_var = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + "." + conversion[1], dep_var)
+
+        if isinstance(conversion[2], (int, float)):
+
+            def rule(m, t):
+                return dep_var[t] == indep_var[t] * conversion[2]
+
+            o_block.add(
+                self.name
+                + "."
+                + conversion[0]
+                + "_to_"
+                + conversion[1]
+                + "_conversion",
+                pyo.Constraint(o_block.T, rule=rule),
+            )
+        else:
+
+            def rule(m, t):
+                return dep_var[t] == indep_var[t] * conversion[2][t]
+
+            o_block.add(
+                self.name
+                + "."
+                + conversion[0]
+                + "_to_"
+                + conversion[1]
+                + "_conversion",
+                pyo.Constraint(o_block.T, rule=rule),
+            )
+
+
+class BaseConsumption(AbstractComponent):
+    def __init__(self, name, commodity, configuration):
+        super().__init__(
+            name=name,
+            kind=ComponentKind.CONSUMPTION,
+            commodity_1=commodity,
+            commodity_2=None,
+            commodity_3=None,
+            commodity_4=None,
+            configuration=configuration,
+            capacity=ComponentCapacity.NONE,
+        )
+
+        self.commodity = commodity
+
+        self.consumption = configuration["consumption"]
+
+    def commodities(self):
+        return [self.commodity]
+
+    def iter_component_parts(self):
+        yield ComponentPart.NONE_STATE
+
+    def non_state_base_variable_names(self):
+        return [(self.name + ".input_1", VariableKind.INDEXED)]
+
+    def add_non_state_variables(self, o_block):
+        input = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + ".input_1", input)
+
+    def add_non_state_model(self, d_block, o_block):
+        input = o_block.component_dict[self.name + ".input_1"]
+
+        consumption = self.consumption.resample(o_block.dynamic).values
+
+        def rule(m, t):
+            return input[t] == consumption[t]
+
+        o_block.add(
+            self.name + ".consumption_cons", pyo.Constraint(o_block.T, rule=rule)
+        )
+
+
+class BaseGeneration(AbstractComponent):
+    def __init__(self, name, commodity, configuration):
+        super().__init__(
+            name=name,
+            kind=ComponentKind.GENERATION,
+            commodity_1=None,
+            commodity_2=None,
+            commodity_3=commodity,
+            commodity_4=None,
+            configuration=configuration,
+            capacity=ComponentCapacity.NONE,
+        )
+
+        self.commodity = commodity
+
+        self.generation = configuration["generation"]
+
+    def commodities(self):
+        return [self.commodity]
+
+    def iter_component_parts(self):
+        yield ComponentPart.NONE_STATE
+
+    def non_state_base_variable_names(self):
+        return [(self.name + ".output_1", VariableKind.INDEXED)]
+
+    def add_non_state_variables(self, o_block):
+        output = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + ".output_1", output)
+
+    def add_non_state_model(self, d_block, o_block):
+        output = o_block.component_dict[self.name + ".output_1"]
+
+        generation = self.generation.resample(o_block.dynamic).values
+
+        def rule(m, t):
+            return output[t] == generation[t]
+
+        o_block.add(
+            self.name + ".generation_cons", pyo.Constraint(o_block.T, rule=rule)
+        )
+
+
+class BaseGrid(AbstractComponent):
+    def __init__(self, name, commodity, configuration):
+        super().__init__(
+            name=name,
+            kind=ComponentKind.GRID,
+            commodity_1=commodity,
+            commodity_2=None,
+            commodity_3=commodity,
+            commodity_4=None,
+            configuration=configuration,
+            capacity=ComponentCapacity.OPTIONAL,
+        )
+
+        self.commodity = commodity
+
+        self.price = configuration.get("price", 0)
+        self.injection_price = configuration.get("injection_price", 0)
+        self.peak_power_cost_ = configuration.get("peak_power_cost", 0)
+        self.co2_emissions_ = configuration.get("co2_emissions", 0)
+
+    def commodities(self):
+        return [self.commodity]
+
+    def iter_component_parts(self):
+        if self.capacity is not None:
+            yield ComponentPart.DESIGN
+
+        yield ComponentPart.NONE_STATE
+
+    def non_state_base_variable_names(self):
+        return [
+            (self.name + ".input_1", VariableKind.INDEXED),
+            (self.name + ".output_1", VariableKind.INDEXED),
+        ]
+
+    def add_non_state_variables(self, o_block):
+        input = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + ".input_1", input)
+
+        output = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + ".output_1", output)
+
+    def add_non_state_model(self, d_block, o_block):
+        input = o_block.component_dict[self.name + ".input_1"]
+
+        output = o_block.component_dict[self.name + ".output_1"]
+
+        if self.capacity is not None:
+            capacity = d_block.component_dict[self.name + ".capacity"]
+
+            def rule(m, t):
+                return input[t] <= capacity
+
+            o_block.add(
+                self.name + ".capacity_input_cons", pyo.Constraint(o_block.T, rule=rule)
+            )
+
+            def rule(m, t):
+                return output[t] <= capacity
+
+            o_block.add(
+                self.name + ".capacity_output_cons",
+                pyo.Constraint(o_block.T, rule=rule),
+            )
+
+    def _operational_exprenditure(self, model):
+        if isinstance(self.price, (int, float)):
+            output = model.component_dict[self.name + ".output_1"]
+            price_function = lambda t: output[t] * self.price
+        else:
+            price = self.price.resample(model.dynamic).values
+
+            output = model.component_dict[self.name + ".output_1"]
+            price_function = lambda t: output[t] * price[t]
+
+        if isinstance(self.injection_price, (int, float)):
+            input = model.component_dict[self.name + ".input_1"]
+            injection_price_function = lambda t: input[t] * self.injection_price
+        else:
+            injection_price = self.injection_price.resample(model.dynamic).values
+
+            input = model.component_dict[self.name + ".input_1"]
+            injection_price_function = lambda t: input[t] * injection_price[t]
+
+        return pyo.quicksum(
+            (price_function(t) - injection_price_function(t)) * model.step_size(t)
+            for t in model.T
+        )
+
+    def peak_power_cost(self, model):
+        peak_import = pyo.Var()
+        model.add(self.name + ".peak_import", peak_import)
+
+        output = model.component_dict[self.name + ".output_1"]
+
+        def rule(m, t):
+            return output[t] <= peak_import
+
+        model.add(self.name + ".peak_import_cons", pyo.Constraint(model.T, rule=rule))
+
+        return peak_import * self.peak_power_cost_
+
+    def co2_emissions(self, model):
+        if isinstance(self.co2_emissions_, (int, float)):
+            output = model.component_dict[self.name + ".output_1"]
+            co2_emissions_function = lambda t: output[t] * self.co2_emissions_
+        else:
+            co2_emissions_ = self.co2_emissions_.resample(model.dynamic).values
+
+            output = model.component_dict[self.name + ".output_1"]
+            co2_emissions_function = lambda t: output[t] * co2_emissions_[t]
+
+
+class BaseStorage(AbstractComponent):
+    def __init__(self, name, commodity, configuration):
+        super().__init__(
+            name=name,
+            kind=ComponentKind.STORAGE,
+            commodity_1=commodity,
+            commodity_2=None,
+            commodity_3=commodity,
+            commodity_4=None,
+            configuration=configuration,
+            capacity=ComponentCapacity.REQUIRED,
+        )
+
+        self.commodity = commodity
+        if "first_soe" in configuration:
+            self.first_soe = configuration["first_soe"]
+        if "final_soe" in configuration:
+            self.final_soe = configuration["final_soe"]
+
+    def _load_model(self, configuration, model):
+        self.input_efficiency = model["input_efficiency"]
+        self.e2p_in = model["e2p_in"]
+        self.output_efficiency = model["output_efficiency"]
+        self.e2p_out = model["e2p_out"]
+        self.self_discharging_loss = model["self_discharging_loss"]
+
+    def commodities(self):
+        return [self.commodity]
+
+    def iter_component_parts(self):
+        yield ComponentPart.DESIGN
+        yield ComponentPart.NONE_STATE
+        yield ComponentPart.STATE
+
+    def iter_links(self):
+        yield VariableLink(
+            self.name + ".input_1",
+            (self.name, ComponentPart.NONE_STATE),
+            (self.name, ComponentPart.STATE),
+        )
+        yield VariableLink(
+            self.name + ".output_1",
+            (self.name, ComponentPart.NONE_STATE),
+            (self.name, ComponentPart.STATE),
+        )
+
+    def state_base_variable_names(self):
+        return [
+            (self.name + ".input_1", VariableKind.INDEXED),
+            (self.name + ".output_1", VariableKind.INDEXED),
+        ]
+
+    def state_base_variable_names(self):
+        return [
+            (self.name + ".energy", VariableKind.EXTENDED_INDEXED),
+        ]
+
+    def add_non_state_variables(self, o_block):
+        input = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + ".input_1", input)
+
+        output = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + ".output_1", output)
+
+    def add_state_variables(self, s_block):
+        energy = pyo.Var(s_block.T_prime, bounds=(0, None))
+        s_block.add(self.name + ".energy", energy)
+
+    def add_non_state_model(self, d_block, o_block):
+        capacity = d_block.component_dict[self.name + ".capacity"]
+        input = o_block.component_dict[self.name + ".input_1"]
+        output = o_block.component_dict[self.name + ".output_1"]
+
+        def rule(m, t):
+            return input[t] * self.input_efficiency <= capacity / self.e2p_in
+
+        o_block.add(
+            self.name + ".capacity_input_cons", pyo.Constraint(o_block.T, rule=rule)
+        )
+
+        def rule(m, t):
+            return output[t] / self.output_efficiency <= capacity / self.e2p_out
+
+        o_block.add(
+            self.name + ".capacity_output_cons", pyo.Constraint(o_block.T, rule=rule)
+        )
+
+        z_bi_flow = pyo.Var(o_block.T, domain=pyo.Binary)
+        o_block.add(self.name + ".z_bi_flow", z_bi_flow)
+
+        if isinstance(self.capacity, (int, float)):
+
+            def rule(m, t):
+                return input[t] <= z_bi_flow[t] * (
+                    capacity / (self.e2p_in * self.input_efficiency)
+                )
+
+            o_block.add(
+                self.name + ".bi_flow_cons_1", pyo.Constraint(o_block.T, rule=rule)
+            )
+
+            def rule(m, t):
+                return output[t] <= (1.0 - z_bi_flow[t]) * (
+                    (capacity * self.output_efficiency) / self.e2p_out
+                )
+
+            o_block.add(
+                self.name + ".bi_flow_cons_2", pyo.Constraint(o_block.T, rule=rule)
+            )
+        else:
+
+            def rule(m, t):
+                return input[t] <= z_bi_flow[t] * (
+                    capacity.ub / (self.e2p_in * self.input_efficiency)
+                )
+
+            o_block.add(
+                self.name + ".bi_flow_cons_1", pyo.Constraint(o_block.T, rule=rule)
+            )
+
+            def rule(m, t):
+                return output[t] <= (1.0 - z_bi_flow[t]) * (
+                    (capacity.ub * self.output_efficiency) / self.e2p_out
+                )
+
+            o_block.add(
+                self.name + ".bi_flow_cons_2", pyo.Constraint(o_block.T, rule=rule)
+            )
+
+    def add_state_model(self, d_block, s_block):
+        capacity = d_block.component_dict[self.name + ".capacity"]
+        energy = s_block.component_dict[self.name + ".energy"]
+        input = s_block.component_dict[self.name + ".input_1"]
+        output = s_block.component_dict[self.name + ".output_1"]
+
+        def rule(m, t):
+            return energy[t] <= capacity
+
+        s_block.add(self.name + ".energy_ub", pyo.Constraint(s_block.T, rule=rule))
+
+        if hasattr(self, "first_soe"):
+            factor_encoded, var_name = self.first_soe["value"]
+            var = s_block.component_dict[self.name + "." + var_name]
+            if isinstance(factor_encoded, (int, float)):
+                factor = factor_encoded
+            else:
+                raise ValueError(f"Invalid factor {factor}!")
+            sense = self.first_soe["sense"]
+            if sense == "==":
+                expr = energy[s_block.T_prime.first()] == factor * var
+            elif sense == "<=":
+                expr = energy[s_block.T_prime.first()] <= factor * var
+            elif sense == ">=":
+                expr = energy[s_block.T_prime.first()] >= factor * var
+            else:
+                raise ValueError(f"Invalid sense {sense}!")
+            s_block.add(self.name + ".fix_first_energy", pyo.Constraint(expr=expr))
+
+        def rule(m, t):
+            return energy[t] == energy[s_block.T_prime[s_block.T_prime.ord(t) - 1]] * (
+                1.0 - self.self_discharging_loss * s_block.step_size(t)
+            ) + input[t] * self.input_efficiency * s_block.step_size(t) - output[
+                t
+            ] / self.output_efficiency * s_block.step_size(
+                t
+            )
+
+        s_block.add(self.name + ".state_equation", pyo.Constraint(s_block.T, rule=rule))
+
+        if hasattr(self, "final_soe"):
+            factor_encoded, var = self.final_soe["value"]
+            if isinstance(factor_encoded, (int, float)):
+                factor = factor_encoded
+            else:
+                raise ValueError(f"Invalid factor {factor}!")
+            if var == "first_soe":
+                var = energy[s_block.T_prime.first()]
+            else:
+                var = s_block.component_dict[self.name + "." + var]
+            if self.final_soe["sense"] == "==":
+                expr = energy[s_block.T_prime.last()] == factor * var
+            elif self.final_soe["sense"] == "<=":
+                expr = energy[s_block.T_prime.last()] <= factor * var
+            elif self.final_soe["sense"] == ">=":
+                expr = energy[s_block.T_prime.last()] >= factor * var
+            else:
+                sense = self.final_soe["sense"]
+                raise ValueError(f"Invalid sense {sense}!")
+            s_block.add(self.name + ".fix_final_energy", pyo.Constraint(expr=expr))
diff --git a/component/electricity.py b/component/electricity.py
new file mode 100644
index 0000000000000000000000000000000000000000..8c8c8482da54030314c210455e97bf6600111626
--- /dev/null
+++ b/component/electricity.py
@@ -0,0 +1,193 @@
+"""
+MIT License
+
+Copyright (c) 2023 RWTH Aachen University
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+
+from Model_Library.component.core import (
+    BaseBusBar,
+    BaseComponent,
+    BaseConsumption,
+    BaseGeneration,
+    BaseGrid,
+    BaseStorage,
+    ComponentCommodity,
+)
+from Model_Library.optimization_model import VariableKind
+
+import pyomo.environ as pyo
+
+
+class ElectricalBusBar(BaseBusBar):
+    def __init__(self, name, configuration):
+        super().__init__(name=name, commodity=ComponentCommodity.ELECTRICITY)
+
+
+class BiPowerElectronic(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=ComponentCommodity.ELECTRICITY,
+            commodity_2=ComponentCommodity.ELECTRICITY,
+            commodity_3=ComponentCommodity.ELECTRICITY,
+            commodity_4=ComponentCommodity.ELECTRICITY,
+            configuration=configuration,
+        )
+
+    def _load_model(self, configuration, model):
+        configuration["efficiency_a"] = model["efficiency_a"]
+        configuration["efficiency_b"] = model["efficiency_b"]
+        configuration["rated_power_side_a"] = model["rated_power_side_a"]
+        configuration["rated_power_side_b"] = model["rated_power_side_b"]
+
+    def _load_conversions(self, configuration):
+        efficiency_a = configuration["efficiency_a"]  # TODO move comment to docu 1 -> 2
+        efficiency_b = configuration["efficiency_b"]  # TODO move comment to docu 2 -> 1
+
+        if configuration["rated_power_side_a"] == "output":
+            configuration["indep_var_1"] = "output_2"
+            configuration["conversion_1"] = ("output_2", "input_1", 1.0 / efficiency_a)
+        else:
+            configuration["indep_var_1"] = "input_1"
+            configuration["conversion_1"] = ("input_1", "output_2", efficiency_a)
+
+        if configuration["rated_power_side_b"] == "output":
+            configuration["indep_var_2"] = "output_1"
+            configuration["conversion_2"] = ("output_1", "input_2", 1.0 / efficiency_b)
+        else:
+            configuration["indep_var_2"] = "input_2"
+            configuration["conversion_2"] = ("input_2", "output_1", efficiency_b)
+
+
+class PowerElectronic(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=ComponentCommodity.ELECTRICITY,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.ELECTRICITY,
+            commodity_4=None,
+            configuration=configuration,
+        )
+
+    def _load_model(self, configuration, model):
+        configuration["efficiency"] = model["efficiency"]
+        configuration["rated_power_side"] = model["rated_power_side"]
+
+    def _load_conversions(self, configuration):
+        efficiency = configuration["efficiency"]
+
+        if configuration["rated_power_side"] == "output":
+            configuration["indep_var_1"] = "output_1"
+            configuration["conversion_1"] = ("output_1", "input_1", 1.0 / efficiency)
+        else:
+            configuration["indep_var_1"] = "input_1"
+            configuration["conversion_1"] = ("input_1", "output_1", efficiency)
+
+        configuration["indep_var_2"] = None
+        configuration["conversion_2"] = None
+
+
+class PVGenerator(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=None,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.ELECTRICITY,
+            commodity_4=None,
+            configuration=configuration,
+        )
+
+        if "irradiance" in configuration:
+            self.irradiance = configuration["irradiance"]
+        if "temperature" in configuration:
+            self.temperature = configuration["temperature"]
+        if "power_factors" in configuration:
+            self.power_factors = configuration["power_factors"]
+
+    def _load_model(self, configuration, model):
+        self.noct = model["NOCT"]
+        self.temperature_coefficient = model["temperature_coefficient"]
+
+    def _setup_conversions(self, configuration):
+        self.operational_variables = [(self.name + ".output_1", VariableKind.INDEXED)]
+
+    def add_non_state_variables(self, o_block):
+        output = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + ".output_1", output)
+
+    def add_non_state_model(self, d_block, o_block):
+        if hasattr(self, "power_factors"):
+            power_factor = self.power_factors.resample(o_block.dynamic).values
+        else:
+            irradiance = self.irradiance.resample(o_block.dynamic).values
+            temperature = self.temperature.resample(o_block.dynamic).values
+            cell_temp = temperature + (irradiance / 800.0) * (self.noct - 20.0)
+            power_factor = (irradiance / 1000.0) * (
+                1 - self.temperature_coefficient * (cell_temp - 25)
+            )
+            power_factor[power_factor > 1.0] = 1.0
+            power_factor[power_factor < 0.0] = 0.0
+
+        output = o_block.component_dict[self.name + ".output_1"]
+        capacity = d_block.component_dict[self.name + ".capacity"]
+
+        def rule(m, t):
+            return output[t] <= capacity * power_factor[t]
+
+        o_block.add(self.name + ".generation", pyo.Constraint(o_block.T, rule=rule))
+
+
+class ElectricalConsumption(BaseConsumption):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity=ComponentCommodity.ELECTRICITY,
+            configuration=configuration,
+        )
+
+
+class ElectricalGeneration(BaseGeneration):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity=ComponentCommodity.ELECTRICITY,
+            configuration=configuration,
+        )
+
+
+class ElectricalGrid(BaseGrid):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity=ComponentCommodity.ELECTRICITY,
+            configuration=configuration,
+        )
+
+
+class Battery(BaseStorage):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity=ComponentCommodity.ELECTRICITY,
+            configuration=configuration,
+        )
diff --git a/component/gas.py b/component/gas.py
new file mode 100644
index 0000000000000000000000000000000000000000..77a0cc4a72899cad33219c3f165ad5824acaf687
--- /dev/null
+++ b/component/gas.py
@@ -0,0 +1,65 @@
+"""
+MIT License
+
+Copyright (c) 2023 RWTH Aachen University
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+
+from Model_Library.component.core import BaseComponent, BaseGrid, ComponentCommodity
+
+
+class CHP(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=ComponentCommodity.GAS,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.ELECTRICITY,
+            commodity_4=ComponentCommodity.HEAT,
+            configuration=configuration,
+        )
+
+    def _load_model(self, configuration, model):
+        configuration["electric_efficiency"] = model["electric_efficiency"]
+        configuration["thermal_efficiency"] = model["thermal_efficiency"]
+
+    def _load_conversions(self, configuration):
+        electric_efficiency = configuration["electric_efficiency"]
+        thermal_efficiency = configuration["thermal_efficiency"]
+
+        configuration["indep_var_1"] = "output_1"
+        configuration["indep_var_2"] = None
+        configuration["conversion_1"] = (
+            "output_1",
+            "input_1",
+            1.0 / electric_efficiency,
+        )
+        configuration["conversion_2"] = (
+            "output_1",
+            "output_2",
+            thermal_efficiency / electric_efficiency,
+        )
+
+
+class GasGrid(BaseGrid):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name, commodity=ComponentCommodity.GAS, configuration=configuration
+        )
diff --git a/component/heat.py b/component/heat.py
new file mode 100644
index 0000000000000000000000000000000000000000..66609a0fa4aace5d20ef9df979d9a11e6b456ef2
--- /dev/null
+++ b/component/heat.py
@@ -0,0 +1,162 @@
+"""
+MIT License
+
+Copyright (c) 2023 RWTH Aachen University
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+
+from Model_Library.component.core import (
+    BaseComponent,
+    BaseConsumption,
+    BaseGrid,
+    BaseStorage,
+    ComponentCommodity,
+)
+from Model_Library.optimization_model import VariableKind
+
+import pyomo.environ as pyo
+
+
+class ElectricBoiler(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=ComponentCommodity.ELECTRICITY,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.HEAT,
+            commodity_4=None,
+            configuration=configuration,
+        )
+
+
+class GasBoiler(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=ComponentCommodity.GAS,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.HEAT,
+            commodity_4=None,
+            configuration=configuration,
+        )
+
+
+class GasHeatPump(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=ComponentCommodity.GAS,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.HEAT,
+            commodity_4=None,
+            configuration=configuration,
+        )
+
+
+class HeatExchanger(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=ComponentCommodity.HEAT,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.HEAT,
+            commodity_4=None,
+            configuration=configuration,
+        )
+
+
+class HeatPump(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=ComponentCommodity.ELECTRICITY,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.HEAT,
+            commodity_4=None,
+            configuration=configuration,
+        )
+
+    def _load_conversions(self, configuration):
+        efficiency = configuration["efficiency"]
+
+        temperature = configuration["temperature"].values
+        set_temperature = 40.0
+        cop = (set_temperature + 273.15) / (set_temperature - temperature) * efficiency
+
+        configuration["indep_var_1"] = "output_1"
+        configuration["indep_var_2"] = None
+        configuration["conversion_1"] = ("output_1", "input_1", 1.0 / cop)
+        configuration["conversion_2"] = None
+
+
+class SolarThermalCollector(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=None,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.HEAT,
+            commodity_4=None,
+            configuration=configuration,
+        )
+
+        self.irradiance = configuration["irradiance"]
+
+    def _setup_conversions(self, configuration):
+        self.operational_variables = [(self.name + ".output_1", VariableKind.INDEXED)]
+
+    def add_non_state_variables(self, o_block):
+        output = pyo.Var(o_block.T, bounds=(0, None))
+        o_block.add(self.name + ".output_1", output)
+
+    def add_non_state_model(self, d_block, o_block):
+        irradiance = self.irradiance.resample(o_block.dynamic).values
+        power_factor = irradiance * 0.001  # <--- Yi: your code here
+        power_factor[power_factor > 1.0] = 1.0
+        power_factor[power_factor < 0.0] = 1.0
+
+        output = o_block.component_dict[self.name + ".output_1"]
+        capacity = d_block.component_dict[self.name + ".capacity"]
+
+        def rule(m, t):
+            return output[t] <= capacity * power_factor[t]
+
+        o_block.add(self.name + ".generation", pyo.Constraint(o_block.T, rule=rule))
+
+
+class HotWaterConsumption(BaseConsumption):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name, commodity=ComponentCommodity.HEAT, configuration=configuration
+        )
+
+
+class HeatGrid(BaseGrid):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name, commodity=ComponentCommodity.HEAT, configuration=configuration
+        )
+
+
+class HeatStorage(BaseStorage):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name, commodity=ComponentCommodity.HEAT, configuration=configuration
+        )
diff --git a/component/hydrogen.py b/component/hydrogen.py
new file mode 100644
index 0000000000000000000000000000000000000000..25315ab707a1dddc33b8224603283b53b340eed2
--- /dev/null
+++ b/component/hydrogen.py
@@ -0,0 +1,83 @@
+"""
+MIT License
+
+Copyright (c) 2023 RWTH Aachen University
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+
+from Model_Library.component.core import BaseComponent, BaseStorage, ComponentCommodity
+
+
+class Electrolyzer(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=ComponentCommodity.ELECTRICITY,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.HYDROGEN,
+            commodity_4=None,
+            configuration=configuration,
+        )
+
+    def _load_conversions(self, configuration):
+        efficiency = configuration["efficiency"]
+
+        configuration["indep_var_1"] = "input_1"
+        configuration["indep_var_2"] = None
+        configuration["conversion_1"] = ("input_1", "output_1", efficiency)
+        configuration["conversion_2"] = None
+
+
+class FuelCell(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=ComponentCommodity.HYDROGEN,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.ELECTRICITY,
+            commodity_4=ComponentCommodity.HEAT,
+            configuration=configuration,
+        )
+
+    def _load_conversions(self, configuration):
+        electric_efficiency = configuration["electric_efficiency"]
+        thermal_efficiency = configuration["thermal_efficiency"]
+
+        configuration["indep_var_1"] = "output_1"
+        configuration["indep_var_2"] = None
+        configuration["conversion_1"] = (
+            "output_1",
+            "input_1",
+            1.0 / electric_efficiency,
+        )
+        configuration["conversion_2"] = (
+            "output_1",
+            "output_2",
+            thermal_efficiency / electric_efficiency,
+        )
+
+
+class HydrogenStorage(BaseStorage):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity=ComponentCommodity.HYDROGEN,
+            configuration=configuration,
+        )
diff --git a/Component/model/heat_components/heat_consumption/HeatConsumption.py b/component/space_heat.py
similarity index 52%
rename from Component/model/heat_components/heat_consumption/HeatConsumption.py
rename to component/space_heat.py
index 4a372254f11efc4cf74b292dbf25ed30ef37726b..486bdbea4403e0ff1bf6d9b418e48fcbd6384f61 100644
--- a/Component/model/heat_components/heat_consumption/HeatConsumption.py
+++ b/component/space_heat.py
@@ -22,19 +22,41 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 THE SOFTWARE.
 """
 
-from Model_Library.Component.model.AbstractComponent import ComponentCommodity
-from Model_Library.Component.model.BaseConsumption import BaseConsumption
-from Tooling.predictor.Predictor import Predictor
-from Tooling.dynamics.Dynamic import resample
+from Model_Library.component.core import (
+    BaseComponent,
+    BaseConsumption,
+    ComponentCommodity,
+)
+
+
+class ElectricRadiator(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=ComponentCommodity.ELECTRICITY,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.SPACE_HEAT,
+            commodity_4=None,
+            configuration=configuration,
+        )
+
+
+class Radiator(BaseComponent):
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity_1=ComponentCommodity.HEAT,
+            commodity_2=None,
+            commodity_3=ComponentCommodity.SPACE_HEAT,
+            commodity_4=None,
+            configuration=configuration,
+        )
 
-import pyomo.environ as pyo
 
 class HeatConsumption(BaseConsumption):
-
-    def __init__(self, name, configuration, model_directory, profiles, dynamic):
-        super().__init__(name=name,
-                         type="HeatConsumption",
-                         commodity=ComponentCommodity.HEAT,
-                         configuration=configuration,
-                         profiles=profiles,
-                         dynamic=dynamic)
+    def __init__(self, name, configuration):
+        super().__init__(
+            name=name,
+            commodity=ComponentCommodity.SPACE_HEAT,
+            configuration=configuration,
+        )
diff --git a/dynamics.py b/dynamics.py
new file mode 100644
index 0000000000000000000000000000000000000000..c13ee80bb01874c6c722bee78ca100979d81957d
--- /dev/null
+++ b/dynamics.py
@@ -0,0 +1,1519 @@
+"""
+MIT License
+
+Copyright (c) 2023 RWTH Aachen University
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+
+import abc
+import numpy as np
+import pandas as pd
+from typing import Union
+
+
+# represents a continuous set of time steps
+class Dynamic(abc.ABC):
+    # returns the number of steps in this dynamic
+    @abc.abstractmethod
+    def number_of_steps(self) -> int:
+        pass
+
+    # returns the length that a ndarray needs in order to hold values for this dynamic
+    @abc.abstractmethod
+    def shape(self) -> int:
+        pass
+
+    # returns the length that a ndarray needs in order to hold the first state values for this dynamic
+    @abc.abstractmethod
+    def first_state_shape(self) -> int:
+        pass
+
+    # returns the index that a pandas Series or DataFrame needs in order to hold values for this dynamic
+    @abc.abstractmethod
+    def pandas_index(self) -> pd.Index:
+        pass
+
+    # returns the index that a pandas Series or DataFrame needs in order to hold first state values for this dynamic
+    @abc.abstractmethod
+    def first_state_pandas_index(self) -> pd.Index:
+        pass
+
+    # returns the length of the time step at the given position in hours
+    @abc.abstractmethod
+    def step_size(self, position: int) -> float:
+        pass
+
+    # returns the length of all time steps in seconds
+    @abc.abstractmethod
+    def step_lengths(self):  # type hinting a np-array of ints
+        pass
+
+    @abc.abstractmethod
+    def _all_indices(self):  # type hinting a np-array of ints
+        pass
+
+    def display(self) -> str:
+        numbers = self._all_indices()
+        rows = [[]]
+        row_ends = [0]
+        for number in numbers:
+            local_position = number - numbers[0]
+            row_index = 0
+            while row_index < len(rows) and row_ends[row_index] > local_position:
+                row_index += 1
+            if row_index == len(rows):
+                rows.append([])
+                row_ends.append(0)
+            rows[row_index].append(number)
+            row_ends[row_index] = local_position + len(str(number)) + 1
+        row_strings = []
+        for items in rows:
+            row_string = ""
+            string_end = 0
+            for number in items:
+                local_position = number - numbers[0]
+                row_string += " " * (local_position - string_end)
+                row_string += str(number)
+                string_end = local_position + len(str(number))
+            row_strings.append(row_string)
+        output = ""
+        for row_index in range(len(row_strings)):
+            output += row_strings[len(row_strings) - row_index - 1] + "\n"
+        string_end = 0
+        for number in numbers:
+            local_position = number - numbers[0]
+            output += " " * (local_position - string_end)
+            output += "|"
+            string_end = local_position + 1
+        output += "\n"
+        return output
+
+
+# a dynamic defined by an explicit list of time steps
+class TrivialDynamic(Dynamic):
+    # d_steps: length of each time step in seconds
+    def __init__(self, d_steps):  # d_steps: type hinting a np-array of ints
+        self.d_steps = d_steps
+
+    def number_of_steps(self) -> int:
+        return len(self.d_steps)
+
+    def shape(self) -> int:
+        return len(self.d_steps)
+
+    def first_state_shape(self) -> int:
+        return 1
+
+    def pandas_index(self) -> pd.Index:
+        return pd.Index(np.arange(0, len(self.d_steps), dtype=int), dtype=int)
+
+    def first_state_pandas_index(self) -> pd.Index:
+        return pd.Index([-1], dtype=int)
+
+    def step_size(self, position: int) -> float:
+        return self.d_steps[position] / 3600.0
+
+    def step_lengths(self):  # type hinting a np-array of ints
+        return self.d_steps
+
+    def _all_indices(self):  # type hinting a np-array of ints
+        return np.arange(0, len(self.d_steps) + 1, dtype=int)
+
+
+# represents a continuous set of time steps within a dynamic tree
+class TreeDynamic(Dynamic):
+    # returns the root dynamic of this dynamic
+    @abc.abstractmethod
+    def root(self) -> "RootDynamic":
+        pass
+
+    # return true if other is an ancestor of this dynamic
+    @abc.abstractmethod
+    def has_ancestor(self, other: "TreeDynamic") -> bool:
+        pass
+
+    # return the positions between the given start and end indices
+    # i_start and i_end are relative to the root dynamic
+    @abc.abstractmethod
+    def positions_between(
+        self, i_start: int, i_end: int
+    ):  # type hinting a np-array of ints
+        pass
+
+    # returns the index of the given position
+    # position is relative to this dynamic
+    @abc.abstractmethod
+    def index_of(self, position: int) -> int:
+        pass
+
+    # returns the position of the given index
+    # index is relative to the root dynamic
+    @abc.abstractmethod
+    def position_of(self, index: int) -> int:
+        pass
+
+    # returns the length of the time step at the given position
+    # position is relative to this dynamic
+    @abc.abstractmethod
+    def step_length_p(self, position: int) -> int:
+        pass
+
+    # constructs a sub dynamic containing time steps starting at the time steps at the given positions with the last position representing the end of the last time step
+    # positions are relative to this dynamic
+    @abc.abstractmethod
+    def sub_dynamic_p(
+        self, positions
+    ) -> "TreeDynamic":  # positions: type hinting a np-array of ints
+        pass
+
+    # constructs a sub dynamic containing time steps starting at the time steps at the given indices with the last index representing the end of the last time step
+    # indices are relative to the root dynamic
+    @abc.abstractmethod
+    def sub_dynamic(
+        self, indices
+    ) -> "TreeDynamic":  # indices: type hinting a np-array of ints
+        pass
+
+    # construct a sub dynamic containing time steps between the given positions
+    # p_start and p_end are relative to this dynamic
+    @abc.abstractmethod
+    def partial_dynamic_p(self, p_start: int, p_end: int) -> "TreeDynamic":
+        pass
+
+    # construct a sub dynamic containing time steps between the given indices
+    # i_start and i_end are relative to the root dynamic
+    @abc.abstractmethod
+    def partial_dynamic(self, i_start: int, i_end: int) -> "TreeDynamic":
+        pass
+
+    def display_alignment(self, other: "TreeDynamic") -> str:
+        p_self = 0
+        p_other = 0
+        numbers = []
+        while p_self <= self.number_of_steps() and p_other <= other.number_of_steps():
+            i_self = self.index_of(p_self)
+            i_other = other.index_of(p_other)
+            if i_self < i_other:
+                numbers.append(i_self)
+                p_self += 1
+            elif i_self > i_other:
+                numbers.append(i_other)
+                p_other += 1
+            else:
+                numbers.append(i_self)
+                p_self += 1
+                p_other += 1
+        while p_self <= self.number_of_steps():
+            numbers.append(self.index_of(p_self))
+            p_self += 1
+        while p_other <= other.number_of_steps():
+            numbers.append(other.index_of(p_other))
+            p_other += 1
+        rows = [[]]
+        row_ends = [0]
+        for number in numbers:
+            local_position = number - numbers[0]
+            row_index = 0
+            while row_index < len(rows) and row_ends[row_index] > local_position:
+                row_index += 1
+            if row_index == len(rows):
+                rows.append([])
+                row_ends.append(0)
+            rows[row_index].append(number)
+            row_ends[row_index] = local_position + len(str(number)) + 1
+        row_strings = []
+        for items in rows:
+            row_string = ""
+            self_string_end = 0
+            for number in items:
+                local_position = number - numbers[0]
+                row_string += " " * (local_position - self_string_end)
+                row_string += str(number)
+                self_string_end = local_position + len(str(number))
+            row_strings.append(row_string)
+        output = ""
+        for row_index in range(len(row_strings)):
+            output += row_strings[len(row_strings) - row_index - 1] + "\n"
+        self_string = " " * (self.index_of(0) - numbers[0])
+        self_string_end = self.index_of(0) - numbers[0]
+        for index in self._all_indices():
+            local_position = index - numbers[0]
+            self_string += " " * (local_position - self_string_end)
+            self_string += "|"
+            self_string_end = local_position + 1
+        output += self_string + "\n"
+        other_string = " " * (other.index_of(0) - numbers[0])
+        other_string_end = other.index_of(0) - numbers[0]
+        for index in other._all_indices():
+            local_position = index - numbers[0]
+            other_string += " " * (local_position - other_string_end)
+            other_string += "|"
+            other_string_end = local_position + 1
+        output += other_string + "\n"
+        return output
+
+
+# the root of a dynamic tree, defined by the length of each time step
+class RootDynamic(TreeDynamic):
+    # d_steps: length of each time step in seconds
+    def __init__(
+        self, d_steps, dynamic_tree: "DynamicTree"
+    ):  # d_steps: type hinting a np-array of ints
+        self.d_steps = d_steps
+        self.dynamic_tree = dynamic_tree
+
+    def number_of_steps(self) -> int:
+        return len(self.d_steps)
+
+    def shape(self) -> int:
+        return len(self.d_steps)
+
+    def first_state_shape(self) -> int:
+        return 1
+
+    def pandas_index(self) -> pd.Index:
+        return pd.Index(np.arange(0, len(self.d_steps), dtype=int), dtype=int)
+
+    def first_state_pandas_index(self) -> pd.Index:
+        return pd.Index([-1], dtype=int)
+
+    def step_size(self, position: int) -> float:
+        return self.d_steps[position] / 3600.0
+
+    def step_lengths(self):  # type hinting a np-array of ints
+        return self.d_steps
+
+    def _all_indices(self):
+        return np.arange(0, len(self.d_steps) + 1, dtype=int)
+
+    def root(self) -> "RootDynamic":
+        return self
+
+    def has_ancestor(self, other: "TreeDynamic") -> bool:
+        return self == other
+
+    def positions_between(
+        self, i_start: int, i_end: int
+    ):  # type hinting a np-array of ints
+        return np.arange(i_start, i_end, dtype=int)
+
+    def index_of(self, position: int) -> int:
+        return position
+
+    def position_of(self, index: int) -> int:
+        return index
+
+    def step_length_p(self, position: int) -> int:
+        return self.d_steps[position]
+
+    def sub_dynamic_p(
+        self, positions
+    ) -> "TreeDynamic":  # positions: type hinting a np-array of ints
+        return self.dynamic_tree.sub_dynamic(self, positions)
+
+    def sub_dynamic(
+        self, indices
+    ) -> "TreeDynamic":  # indices: type hinting a np-array of ints
+        return self.dynamic_tree.sub_dynamic(self, indices)
+
+    def partial_dynamic_p(self, p_start: int, p_end: int) -> "TreeDynamic":
+        return self.dynamic_tree.partial_dynamic(self, p_start, p_end)
+
+    def partial_dynamic(self, i_start: int, i_end: int) -> "TreeDynamic":
+        return self.dynamic_tree.partial_dynamic(self, i_start, i_end)
+
+
+# a tree dynamic definied by taking certain time steps form a reference dynamic
+class BackedDynamic(TreeDynamic):
+    # reference: the tree dynamic that backs this dynamic
+    # indices: the indicies of the time steps contained in this dynamic with the last index representing the end of the last time step
+    # indices are relative to the reference dynamic
+    def __init__(
+        self,
+        reference: Union["RootDynamic", "BackedDynamic"],
+        indices,
+        dynamic_tree: "DynamicTree",
+    ):  # indices: type hinting a np-array of ints
+        self.reference = reference
+        self.indices = indices
+        self.dynamic_tree = dynamic_tree
+
+    def number_of_steps(self) -> int:
+        return len(self.indices) - 1
+
+    def shape(self) -> int:
+        return len(self.indices) - 1
+
+    def first_state_shape(self) -> int:
+        return 1
+
+    def pandas_index(self) -> pd.Index:
+        return pd.Index(self.indices[:-1], dtype=int)
+
+    def first_state_pandas_index(self) -> pd.Index:
+        return pd.Index([-1], dtype=int)
+
+    def step_size(self, position: int) -> float:
+        if position < 0 or len(self.indices) <= position:
+            raise IndexError("The dynamic does not have a time step at this index!")
+        return self._step_length_p_unchecked(position) / 3600.0
+
+    def step_lengths(self):  # type hinting a np-array of ints
+        return np.fromiter(
+            (
+                self._step_length_p_unchecked(position)
+                for position in range(len(self.indices) - 1)
+            ),
+            int,
+        )
+
+    def _all_indices(self):  # type hinting a np-array of ints
+        return self.indices
+
+    def root(self) -> "RootDynamic":
+        return self.reference.root()
+
+    def has_ancestor(self, other: "TreeDynamic") -> bool:
+        return self == other or self.reference.has_ancestor(other)
+
+    def positions_between(
+        self, i_start: int, i_end: int
+    ):  # type hinting a np-array of ints
+        return np.arange(self.position_of(i_start), self.position_of(i_end), dtype=int)
+
+    def index_of(self, position: int) -> int:
+        if position < 0 or len(self.indices) <= position:
+            raise IndexError("The dynamic does not have a index for this position!")
+        return self.indices[position]
+
+    def position_of(self, index: int) -> int:
+        positions = np.where(self.indices == index)[0]
+        if len(positions) == 0:
+            raise IndexError("The dynamic does not have a position for this index!")
+        return positions[0]
+
+    def step_length_p(self, position: int) -> int:
+        if position < 0 or len(self.indices) - 1 <= position:
+            raise IndexError("The dynamic does not have a time step at this position!")
+        return self._step_length_p_unchecked(position)
+
+    def sub_dynamic_p(
+        self, positions
+    ) -> "TreeDynamic":  # positions: type hinting a np-array of ints
+        if np.any(np.logical_or(positions < 0, len(self.indices) <= positions)):
+            raise IndexError("The dynamic does not have all requested indices!")
+        return self.dynamic_tree.sub_dynamic(self, self.indices[positions])
+
+    def sub_dynamic(
+        self, indices
+    ) -> "TreeDynamic":  # indices: type hinting a np-array of ints
+        if not np.all(np.in1d(indices, self.indices)):
+            raise IndexError(
+                "The dynamic does not have all requested indices for the sub dynamic!"
+            )
+        return self.dynamic_tree.sub_dynamic(self, indices)
+
+    def partial_dynamic_p(self, p_start: int, p_end: int) -> "TreeDynamic":
+        if (
+            p_start < 0
+            or len(self.indices) <= p_start
+            or p_end < 0
+            or len(self.indices) <= p_end
+        ):
+            raise IndexError(
+                "The dynamic does not have all requested positions for the sub dynamic!"
+            )
+        return self.dynamic_tree.partial_dynamic(self, p_start, p_end)
+
+    def partial_dynamic(self, i_start: int, i_end: int) -> "TreeDynamic":
+        start_positions = np.where(self.indices == i_start)[0]
+        end_positions = np.where(self.indices == i_end)[0]
+        if len(start_positions) == 0 or len(end_positions) == 0:
+            raise IndexError(
+                "The dynamic does not have all requested indices for the sub dynamic!"
+            )
+        return self.dynamic_tree.partial_dynamic(
+            self, start_positions[0], end_positions[0]
+        )
+
+    def _step_length_p_unchecked(self, position):
+        return sum(
+            self.reference.step_length_p(position)
+            for position in self.reference.positions_between(
+                self.indices[position], self.indices[position + 1]
+            )
+        )
+
+
+# a tree dynamic defined by taking a continuus intervall of time steps from a reference dynamic
+class PartialDynamic(TreeDynamic):
+    # reference: the tree dynamic from which the intervall is taken
+    # start: the position in the reference dynamic of the first time step contained in this dynamic
+    # end: the position in the reference dynamic of the end of the last time step contained in this dynamic
+    # start/end are relative to the reference dynamic
+    def __init__(
+        self,
+        reference: Union["RootDynamic", "BackedDynamic"],
+        start: int,
+        end: int,
+        dynamic_tree: "DynamicTree",
+    ):
+        self.reference = reference
+        self.start = start
+        self.end = end
+        self.dynamic_tree = dynamic_tree
+
+    def number_of_steps(self) -> int:
+        return self.end - self.start
+
+    def shape(self) -> int:
+        return self.end - self.start
+
+    def first_state_shape(self) -> int:
+        return 1
+
+    def pandas_index(self) -> pd.Index:
+        return pd.Index(self.reference._all_indices()[self.start : self.end], dtype=int)
+
+    def first_state_pandas_index(self) -> pd.Index:
+        return pd.Index([-1], dtype=int)
+
+    def step_size(self, position: int) -> float:
+        if position < 0 or self.end - self.start < position:
+            raise IndexError("The dynamic does not have a time step at this index!")
+        return self._step_length_p_unchecked(self.start + position) / 3600.0
+
+    def step_lengths(self):  # type hinting a np-array of ints
+        return np.fromiter(
+            (
+                self._step_length_p_unchecked(reference_position)
+                for reference_position in range(self.start, self.end)
+            ),
+            int,
+        )
+
+    def _all_indices(self):  # type hinting a np-array of ints
+        return self.reference._all_indices()[self.start : self.end + 1]
+
+    def root(self) -> "RootDynamic":
+        return self.reference.root()
+
+    def has_ancestor(self, other: "TreeDynamic") -> bool:
+        return self == other or self.reference.has_ancestor(other)
+
+    def positions_between(
+        self, i_start: int, i_end: int
+    ):  # type hinting a np-array of ints
+        return np.arange(self.position_of(i_start), self.position_of(i_end), dtype=int)
+
+    def index_of(self, position: int) -> int:
+        if position < 0 or self.end - self.start < position:
+            raise IndexError("The dynamic does not have a index for this position!")
+        return self.reference.index_of(self.start + position)
+
+    def position_of(self, index: int) -> int:
+        reference_position = self.reference.position_of(index)
+        if reference_position < self.start or self.end < reference_position:
+            raise IndexError("The dynamic does not have a position for this index!")
+        return reference_position - self.start
+
+    def step_length_p(self, position: int) -> int:
+        if position < 0 or self.end - self.start <= position:
+            raise IndexError("The dynamic does not have a time step at this position!")
+        return self._step_length_p_unchecked(self.start + position)
+
+    def sub_dynamic_p(
+        self, positions
+    ) -> "TreeDynamic":  # positions: type hinting a np-array of ints
+        if np.any(np.logical_or(positions < 0, self.end - self.start < positions)):
+            raise IndexError(
+                "The dynamic does not have all requested positions for the sub dynamic!"
+            )
+        return self.reference.sub_dynamic_p(positions + self.start)
+
+    def sub_dynamic(
+        self, indices
+    ) -> "TreeDynamic":  # indices: type hinting a np-array of ints
+        def filter(index):
+            reference_position = self.reference.position_of(index)
+            return reference_position < self.start or self.end < reference_position
+
+        if np.any(np.array(list(filter(index) for index in indices))):
+            raise IndexError(
+                "The does not have all requested indices for the sub dynamic!"
+            )
+        return self.dynamic_tree.sub_dynamic(self, indices)
+
+    def partial_dynamic_p(self, p_start: int, p_end: int) -> "TreeDynamic":
+        if (
+            p_start < 0
+            or self.end - self.start < p_start
+            or p_end < 0
+            or self.end - self.start < p_end
+        ):
+            raise IndexError(
+                "The dynamic does not have all requested positions for the sub dynamic!"
+            )
+        return self.dynamic_tree.partial_dynamic(self, p_start, p_end)
+
+    def partial_dynamic(self, i_start: int, i_end: int) -> "TreeDynamic":
+        p_start = self.position_of(i_start)
+        p_end = self.position_of(i_end)
+        return self.dynamic_tree.partial_dynamic(self, p_start, p_end)
+
+    def _step_length_p_unchecked(
+        self, reference_position
+    ) -> int:  # reference_position: type hinting a np-array of ints
+        return self.reference.step_length_p(reference_position)
+
+
+class DynamicTree:
+    # d_steps: length of each time step in seconds
+    def __init__(self, d_steps):  # d_steps: type hinting a np-array of ints
+        self.d_steps = d_steps
+        self.root_dynamic = RootDynamic(self.d_steps, self)
+        self.dynamics = dict()
+        self.dynamics[self.root_dynamic] = (dict(), dict())
+        self.symbol_table = dict()
+        self.symbol_table[self.root_dynamic] = 0
+        self.assignments = dict()
+
+    def root(self) -> RootDynamic:
+        return self.root_dynamic
+
+    # indices are relative to the root dynamic
+    def sub_dynamic(
+        self, dynamic: TreeDynamic, indices
+    ) -> TreeDynamic:  # indices: type hinting a np-array of ints
+        if len(indices) == len(dynamic._all_indices()):
+            dynamic_indices = dynamic._all_indices()
+            if np.all(indices == dynamic_indices):
+                return dynamic
+        if isinstance(dynamic, PartialDynamic):
+            dynamic = dynamic.reference
+        if tuple(indices) in self.dynamics[dynamic][0].keys():
+            return self.dynamics[dynamic][0][tuple(indices)]
+        sub_dynamic = BackedDynamic(dynamic, indices, self)
+        self.dynamics[dynamic][0][tuple(indices)] = sub_dynamic
+        self.dynamics[sub_dynamic] = (dict(), dict())
+        self.symbol_table[sub_dynamic] = len(self.symbol_table)
+        return sub_dynamic
+
+    # start/end are relative to dynamic
+    def partial_dynamic(
+        self, dynamic: TreeDynamic, start: int, end: int
+    ) -> TreeDynamic:
+        if start == 0 and end == dynamic.number_of_steps():
+            return dynamic
+        if isinstance(dynamic, PartialDynamic):
+            start += dynamic.start
+            end += dynamic.start
+            dynamic = dynamic.reference
+        if (start, end) in self.dynamics[dynamic][1].keys():
+            return self.dynamics[dynamic][1][(start, end)]
+        partial_dynamic = PartialDynamic(dynamic, start, end, self)
+        self.dynamics[dynamic][1][(start, end)] = partial_dynamic
+        self.dynamics[partial_dynamic] = (dict(), dict())
+        self.symbol_table[partial_dynamic] = len(self.symbol_table)
+        return partial_dynamic
+
+    def get_assignment(
+        self, dynamic: TreeDynamic, target_dynamic: TreeDynamic
+    ) -> "AssignmentWrapper":
+        if isinstance(dynamic, PartialDynamic):
+            non_partial_dynamic = dynamic.reference
+            source_start = dynamic.start
+            source_end = dynamic.end
+        else:
+            non_partial_dynamic = dynamic
+            source_start = 0
+            source_end = dynamic.number_of_steps()
+        if isinstance(target_dynamic, PartialDynamic):
+            non_partial_target_dynamic = target_dynamic.reference
+            target_start = target_dynamic.start
+            target_end = target_dynamic.end
+        else:
+            non_partial_target_dynamic = target_dynamic
+            target_start = 0
+            target_end = target_dynamic.number_of_steps()
+        if (non_partial_dynamic, non_partial_target_dynamic) not in self.assignments:
+            self.assignments[
+                non_partial_dynamic, non_partial_target_dynamic
+            ] = compute_assignment(non_partial_dynamic, non_partial_target_dynamic)
+        return AssignmentWrapper(
+            self.assignments[non_partial_dynamic, non_partial_target_dynamic],
+            source_start,
+            source_end,
+            target_start,
+            target_end,
+        )
+
+    def display(self) -> str:
+        output = ""
+        for dynamic, (sub_dynamics, partial_dynamics) in self.dynamics.items():
+            if len(sub_dynamics) == 0 and len(partial_dynamics) == 0:
+                continue
+            output += f"{self.symbol_table[dynamic]}:\n"
+            for indices, sub_dynamic in sub_dynamics.items():
+                output += f"\t{indices}: {self.symbol_table[sub_dynamic]}\n"
+            for (start, end), partial_dynamic in partial_dynamics.items():
+                output += f"\t{start}-{end}: {self.symbol_table[partial_dynamic]}\n"
+        return output
+
+
+def compute_assignment(
+    dynamic: Union[RootDynamic, BackedDynamic],
+    target_dynamic: Union[RootDynamic, BackedDynamic],
+) -> "Assignment":
+    if dynamic.root() != target_dynamic.root():
+        raise ValueError("Both dynamics have to have the same root dynamic!")
+    if dynamic == target_dynamic:
+        return AssignmentSame()
+    elif isinstance(target_dynamic, BackedDynamic) and target_dynamic.has_ancestor(
+        dynamic
+    ):
+        return compute_assignment_to_backed(dynamic, target_dynamic)
+    elif isinstance(dynamic, BackedDynamic) and dynamic.has_ancestor(target_dynamic):
+        return compute_assignment_from_backed(dynamic, target_dynamic)
+    else:
+        return compute_assignment_common_reference(dynamic, target_dynamic)
+
+
+# target_dynamic is BackedDynamic and has dynamic as an ancestor
+def compute_assignment_to_backed(
+    dynamic: Union[RootDynamic, BackedDynamic], target_dynamic: BackedDynamic
+) -> "AssignmentToBacked":
+    assignment = AssignmentToBacked(dynamic._all_indices(), target_dynamic.indices)
+    for target_position, target_index in enumerate(target_dynamic.indices[:-1]):
+        source_positions = dynamic.positions_between(
+            target_index, target_dynamic.indices[target_position + 1]
+        )
+        expression = np.empty(len(source_positions), dtype=object)
+        target_length = target_dynamic.step_length_p(target_position)
+        for offset, source_position in enumerate(source_positions):
+            expression[offset] = (
+                source_position,
+                dynamic.step_length_p(source_position) / target_length,
+            )
+        assignment.add_expression(target_position, expression)
+    assignment.compile()
+    return assignment
+
+
+# dynamic is BackedDynamic and has target_dynamic as an ancestor
+def compute_assignment_from_backed(
+    dynamic: BackedDynamic, target_dynamic: Union[RootDynamic, BackedDynamic]
+) -> "AssignmentFromBacked":
+    assignment = AssignmentFromBacked(dynamic.indices, target_dynamic._all_indices())
+    first_target_position = target_dynamic.position_of(dynamic.indices[0])
+    for source_position, next_source_index in enumerate(dynamic.indices[1:]):
+        next_target_position = target_dynamic.position_of(next_source_index)
+        assignment.add_distribution(
+            first_target_position, next_target_position, source_position
+        )
+        first_target_position = next_target_position
+    assignment.compile()
+    return assignment
+
+
+# dynamic and target_dynamic are BackedDynamic and share the same root dynamic
+def compute_assignment_common_reference(
+    dynamic: BackedDynamic, target_dynamic: BackedDynamic
+) -> "AssignmentCommon":
+    assignment = AssignmentCommon(dynamic.indices, target_dynamic.indices)
+    if (
+        dynamic.indices[-1] <= target_dynamic.indices[0]
+        or target_dynamic.indices[-1] <= dynamic.indices[0]
+    ):
+        assignment.compile()
+        return assignment
+    target_i_start = target_dynamic.indices[0]
+    if target_i_start not in dynamic.indices:
+        source_i_start = dynamic.indices[0]
+        if source_i_start < target_i_start:
+            root = dynamic.root()
+            root_p_start = target_i_start  # because root is a RootDynamic, positions and indices are equivalent
+            length = 0
+            while (
+                root_p_start not in dynamic.indices[:-1]
+            ):  # because root is a RootDynamic, root_p_start is equivalent to root_i_start
+                root_p_start -= 1
+                length += root.step_length_p(root_p_start)
+            source_position = dynamic.position_of(
+                root_p_start
+            )  # because root is a RootDynamic, positions and indices are equivalent
+            target_position = 0
+            remaining_length = dynamic.step_length_p(source_position) - length
+        else:  # Here source_i_start > target_i_start becuase the case of source_i_start == target_i_start is handled in the else branch of target_i_start not in dynamic.indices
+            root = dynamic.root()
+            root_p_start = source_i_start  # because root is a RootDynamic, positions and indices are equivalent
+            length = 0
+            source_position = 0
+            while root_p_start not in target_dynamic.indices:
+                length += root.step_length_p(root_p_start)
+                root_p_start += 1
+                if root_p_start in dynamic.indices[:-1]:
+                    length = 0
+                    source_position += 1
+                elif (
+                    root_p_start > dynamic.indices[-1]
+                ):  # because root is a RootDynamic, positions and indices are equivalent
+                    assignment.compile()
+                    return assignment  # here, we discover that the entire dynamic does not cover one time_step of the target_dynamic
+            target_position = target_dynamic.position_of(
+                root_p_start
+            )  # because root is a RootDynamic, positions and indices are equivalent
+            remaining_length = dynamic.step_length_p(source_position) - length
+    else:
+        source_position = dynamic.position_of(target_i_start)
+        target_position = 0
+        remaining_length = dynamic.step_length_p(source_position)
+    while target_position < len(target_dynamic.indices) - 1:
+        target_length = target_dynamic.step_length_p(target_position)
+        remaining_target_length = target_length
+        expression = []
+        while remaining_target_length > 0:
+            if remaining_length == 0:
+                source_position += 1
+                if source_position >= len(dynamic.indices) - 1:
+                    assignment.compile()
+                    return assignment
+                remaining_length = dynamic.step_length_p(source_position)
+            if remaining_target_length <= remaining_length:
+                expression.append(
+                    (source_position, remaining_target_length / target_length)
+                )
+                remaining_length -= remaining_target_length
+                remaining_target_length -= remaining_target_length
+            else:
+                expression.append((source_position, remaining_length / target_length))
+                remaining_target_length -= remaining_length
+                remaining_length -= remaining_length
+        assignment.add_expression(target_position, expression)
+        target_position += 1
+    assignment.compile()
+    return assignment
+
+
+class Assignment(abc.ABC):
+    # proviedes a view indexed by target into the values indexed by source
+    @abc.abstractmethod
+    def resample(
+        self,
+        values,
+        source_start: int,
+        source_end: int,
+        target_start: int,
+        target_end: int,
+    ):  # values: type hinting a np-array, type hinting a np-array
+        pass
+
+    # generates expressions representing the resampling
+    @abc.abstractmethod
+    def resample_variable(
+        self,
+        variable,
+        source_start: int,
+        source_end: int,
+        target_start: int,
+        target_end: int,
+    ):  # variable: type hinting a np-array of unindexed pyo Var or pyo Expression
+        pass
+
+
+class AssignmentSame(Assignment):
+    def __init__(self):
+        pass
+
+    def resample(
+        self,
+        values,
+        source_start: int,
+        source_end: int,
+        target_start: int,
+        target_end: int,
+    ):  # values: type hinting a np-array, type hinting a np-array
+        if target_start < source_start:
+            raise IndexError("Source values do not cover all target time steps!")
+        if source_end < target_end:
+            raise IndexError("Source values do not cover all target time steps!")
+        return values[:, target_start - source_start : target_end - source_start]
+
+    def resample_variable(
+        self,
+        variable,
+        source_start: int,
+        source_end: int,
+        target_start: int,
+        target_end: int,
+    ):  # variable: type hinting a np-array of unindexed pyo Var or pyo Expression
+        if source_start != target_start:
+            raise IndexError(
+                "Source variables do not cover all target time steps exactly!"
+            )
+        if source_end != target_end:
+            raise IndexError(
+                "Source variables do not cover all target time steps exactly!"
+            )
+        return variable
+
+
+class AssignmentToBacked(Assignment):
+    def __init__(
+        self, indices, target_indices
+    ):  # indices: type hinting a np-array of ints, target_indices: type hinting a np-array of ints
+        self.indices = indices
+        self.target_indices = target_indices
+        self.expressions = np.empty(len(target_indices) - 1, dtype=object)
+
+    def add_expression(
+        self, target_position: int, expression
+    ):  # expression: type hinting a np-array of (int, float)
+        self.expressions[target_position] = expression
+
+    def compile(self):
+        self.first_complete_expression = np.empty(
+            len(self.indices), dtype=int
+        )  # self.first_complete_expression[source_position] is the index of the first complete expression that the source dynamic covers, if it would start with source_position
+        self.last_complete_expression = np.empty(
+            len(self.indices), dtype=int
+        )  # self.last_complete_expression[source_position] is 1 plus the index of the last complete expression that the source dynamic covers, if it would end with source_position
+        next_position_to_assign_first = 0
+        next_position_to_assign_last = 0
+        for i, expression in enumerate(self.expressions):
+            self.first_complete_expression[
+                next_position_to_assign_first : expression[0][0] + 1
+            ] = i
+            next_position_to_assign_first = expression[0][0] + 1
+            self.last_complete_expression[
+                next_position_to_assign_last : expression[-1][0] + 1
+            ] = i
+            next_position_to_assign_last = expression[-1][0] + 1
+        self.first_complete_expression[next_position_to_assign_first:] = len(
+            self.expressions
+        )
+        self.last_complete_expression[next_position_to_assign_last:] = len(
+            self.expressions
+        )
+
+    def resample(
+        self,
+        values,
+        source_start: int,
+        source_end: int,
+        target_start: int,
+        target_end: int,
+    ):  # values: type hinting a np-array, type hinting a np-array
+        source_i_start = self.indices[source_start]
+        source_i_end = self.indices[source_end]
+        target_i_start = self.target_indices[target_start]
+        target_i_end = self.target_indices[target_end]
+        if target_i_start < source_i_start:
+            raise IndexError("Source values do not cover all target time steps!")
+        if source_i_end < target_i_end:
+            raise IndexError("Source values do not cover all target time steps!")
+        target_values = np.empty(
+            (values.shape[0], target_end - target_start), dtype=values.dtype
+        )
+        for local_target_position, expression in enumerate(
+            self.expressions[target_start:target_end]
+        ):
+            acc = 0.0
+            for source_position, factor in expression:
+                acc += factor * values[:, source_position - source_start]
+            target_values[:, local_target_position] = acc
+        return target_values
+
+    def resample_variable(
+        self,
+        variable,
+        source_start: int,
+        source_end: int,
+        target_start: int,
+        target_end: int,
+    ):  # variable: type hinting a np-array of unindexed pyo Var or pyo Expression
+        source_i_start = self.indices[source_start]
+        source_i_end = self.indices[source_end]
+        target_i_start = self.target_indices[target_start]
+        target_i_end = self.target_indices[target_end]
+        if source_i_start != target_i_start:
+            raise IndexError(
+                "Source variables do not cover all target time steps exactly!"
+            )
+        if source_i_end != target_i_end:
+            raise IndexError(
+                "Source variables do not cover all target time steps exactly!"
+            )
+        target_variable = np.empty(target_end - target_start, dtype=object)
+        for local_target_position, expression in enumerate(
+            self.expressions[target_start:target_end]
+        ):
+            acc = 0.0
+            for source_position, factor in expression:
+                acc += factor * variable[source_position - source_start]
+            target_variable[local_target_position] = acc
+        return target_variable
+
+
+class AssignmentFromBacked(Assignment):
+    def __init__(
+        self, indices, target_indices
+    ):  # indices: type hinting a np-array of ints, target_indices: type hinting a np-array of ints
+        self.indices = indices
+        self.target_indices = target_indices
+        self.distributions = np.empty(len(indices) - 1, dtype=object)
+
+    def add_distribution(
+        self, first_target_position: int, end_target_position: int, source_position: int
+    ):
+        self.distributions[source_position] = (
+            first_target_position,
+            end_target_position,
+        )
+
+    def compile(self):
+        self.distribution_positions = np.full(len(self.target_indices), -1, dtype=int)
+        self.source_to_target = np.empty(len(self.indices), dtype=int)
+        for source_position, (first_target_position, end_target_position) in enumerate(
+            self.distributions
+        ):
+            self.distribution_positions[
+                first_target_position:end_target_position
+            ] = source_position
+            self.source_to_target[source_position] = first_target_position
+        self.source_to_target[-1] = self.distributions[-1][1]
+
+    def resample(
+        self,
+        values,
+        source_start: int,
+        source_end: int,
+        target_start: int,
+        target_end: int,
+    ):  # values: type hinting a np-array, type hinting a np-array
+        source_i_start = self.indices[source_start]
+        source_i_end = self.indices[source_end]
+        target_i_start = self.target_indices[target_start]
+        target_i_end = self.target_indices[target_end]
+        if target_i_start < source_i_start:
+            raise IndexError("Source values do not cover all target time steps!")
+        if source_i_end < target_i_end:
+            raise IndexError("Source values do not cover all target time steps!")
+        return values[
+            :, self.distribution_positions[target_start:target_end] - source_start
+        ]
+
+    def resample_variable(
+        self,
+        variable,
+        source_start: int,
+        source_end: int,
+        target_start: int,
+        target_end: int,
+    ):  # variable: type hinting a np-array of unindexed pyo Var or pyo Expression
+        source_i_start = self.indices[source_start]
+        source_i_end = self.indices[source_end]
+        target_i_start = self.target_indices[target_start]
+        target_i_end = self.target_indices[target_end]
+        if source_i_start != target_i_start:
+            raise IndexError(
+                "Source variables do not cover all target time steps exactly!"
+            )
+        if source_i_end != target_i_end:
+            raise IndexError(
+                "Source variables do not cover all target time steps exactly!"
+            )
+        target_variable = np.empty(target_end - target_start, dtype=object)
+        for local_target_position, source_position in enumerate(
+            self.distribution_positions[target_start:target_end]
+        ):
+            target_variable[local_target_position] = variable[
+                source_position - source_start
+            ]
+        return target_variable
+
+
+class AssignmentCommon(Assignment):
+    def __init__(
+        self, indices, target_indices
+    ):  # indices: type hinting a np-array of ints, target_indices: type hinting a np-array of ints
+        self.indices = indices
+        self.target_indices = target_indices
+        self.expressions = np.empty(len(target_indices) - 1, dtype=object)
+
+    def add_expression(
+        self, target_position: int, expression
+    ):  # expression: type hinting a np-array of (int, float)
+        self.expressions[target_position] = expression
+
+    def compile(self):
+        self.first_complete_expression = np.empty(
+            len(self.indices), dtype=int
+        )  # self.first_complete_expression[source_position] is the index of the first complete expression that the source dynamic covers, if it would start with source_position
+        self.last_complete_expression = np.empty(
+            len(self.indices), dtype=int
+        )  # self.last_complete_expression[source_position] is 1 plus the index of the last complete expression that the source dynamic covers, if it would end with source_position
+        next_position_to_assign_first = 0
+        next_position_to_assign_last = 0
+        last_existing_expression = 0
+        for i, expression in enumerate(self.expressions):
+            if expression is not None:
+                self.first_complete_expression[
+                    next_position_to_assign_first : expression[0][0] + 1
+                ] = i
+                next_position_to_assign_first = expression[0][0] + 1
+                self.last_complete_expression[
+                    next_position_to_assign_last : expression[-1][0] + 1
+                ] = i
+                next_position_to_assign_last = expression[-1][0] + 1
+                last_existing_expression = i
+        self.first_complete_expression[next_position_to_assign_first:] = (
+            last_existing_expression + 1
+        )
+        self.last_complete_expression[next_position_to_assign_last:] = (
+            last_existing_expression + 1
+        )
+
+    def resample(
+        self,
+        values,
+        source_start: int,
+        source_end: int,
+        target_start: int,
+        target_end: int,
+    ):  # values: type hinting a np-array, type hinting a np-array
+        source_i_start = self.indices[source_start]
+        source_i_end = self.indices[source_end]
+        target_i_start = self.target_indices[target_start]
+        target_i_end = self.target_indices[target_end]
+        if target_i_start < source_i_start:
+            raise IndexError("Source values do not cover all target time steps!")
+        if source_i_end < target_i_end:
+            raise IndexError("Source values do not cover all target time steps!")
+        target_values = np.empty(
+            (values.shape[0], target_end - target_start), dtype=values.dtype
+        )
+        for local_target_position, expression in enumerate(
+            self.expressions[target_start:target_end]
+        ):
+            acc = 0.0
+            for source_position, factor in expression:
+                acc += factor * values[:, source_position - source_start]
+            target_values[:, local_target_position] = acc
+        return target_values
+
+    def resample_variable(
+        self,
+        variable,
+        source_start: int,
+        source_end: int,
+        target_start: int,
+        target_end: int,
+    ):  # variable: type hinting a np-array of unindexed pyo Var or pyo Expression
+        source_i_start = self.indices[source_start]
+        source_i_end = self.indices[source_end]
+        target_i_start = self.target_indices[target_start]
+        target_i_end = self.target_indices[target_end]
+        if source_i_start != target_i_start:
+            raise IndexError(
+                "Source variables do not cover all target time steps exactly!"
+            )
+        if source_i_end != target_i_end:
+            raise IndexError(
+                "Source variables do not cover all target time steps exactly!"
+            )
+        target_variable = np.empty(target_end - target_start, dtype=object)
+        for local_target_position, expression in enumerate(
+            self.expressions[target_start:target_end]
+        ):
+            acc = 0.0
+            for source_position, factor in expression:
+                acc += factor * variable[source_position - source_start]
+            target_variable[local_target_position] = acc
+        return target_variable
+
+
+class AssignmentWrapper:
+    def __init__(
+        self,
+        assignment: Assignment,
+        source_start: int,
+        source_end: int,
+        target_start: int,
+        target_end: int,
+    ):
+        self.assignment = assignment
+        self.source_start = source_start
+        self.source_end = source_end
+        self.target_start = target_start
+        self.target_end = target_end
+
+    def resample(
+        self, values
+    ):  # values: type hinting a np-array, type hinting a np-array
+        return self.assignment.resample(
+            values,
+            self.source_start,
+            self.source_end,
+            self.target_start,
+            self.target_end,
+        )
+
+    def resample_variable(
+        self, variable
+    ):  # variable: type hinting a np-array of unindexed pyo Var or pyo Expression
+        return self.assignment.resample_variable(
+            variable,
+            self.source_start,
+            self.source_end,
+            self.target_start,
+            self.target_end,
+        )
+
+
+def test_single_resampling(
+    dynamic: TreeDynamic,
+    target_dynamic: TreeDynamic,
+    ancestor_dynamic: TreeDynamic,
+    ancestor_values,
+    f,
+):  # ancestor_values: type hinting a np-array, f: type hinting a file
+    values = np.arange(1, dynamic.number_of_steps() + 1, dtype=float)
+    i_start = dynamic.index_of(0)
+    i_end = dynamic.index_of(dynamic.number_of_steps())
+    target_i_start = target_dynamic.index_of(0)
+    target_i_end = target_dynamic.index_of(target_dynamic.number_of_steps())
+    resample_possible = i_start <= target_i_start and target_i_end <= i_end
+    try:
+        result = dynamic.dynamic_tree.get_assignment(dynamic, target_dynamic).resample(
+            np.expand_dims(values, axis=0)
+        )[0]
+    except Exception as error:
+        if str(error) == "Source values do not cover all target time steps!":
+            f.write("olap # ")
+        else:
+            f.write(str(error) + " # ")
+    if not resample_possible:
+        return
+    target_values = np.full(target_dynamic.number_of_steps(), np.nan, dtype=float)
+    # resample from source to ancestor
+    first_ancestor_position = ancestor_dynamic.position_of(dynamic.index_of(0))
+    for source_position in range(0, dynamic.number_of_steps()):
+        next_source_index = dynamic.index_of(source_position + 1)
+        next_ancestor_position = ancestor_dynamic.position_of(next_source_index)
+        for ancestor_position in range(first_ancestor_position, next_ancestor_position):
+            ancestor_values[ancestor_position] = values[source_position]
+        first_ancestor_position = next_ancestor_position
+    # resample from ancestor to target
+    for target_position, target_index in enumerate(target_dynamic._all_indices()[:-1]):
+        ancestor_positions = ancestor_dynamic.positions_between(
+            target_index, target_dynamic.index_of(target_position + 1)
+        )
+        if len(ancestor_positions) == 1:
+            target_values[target_position] = ancestor_values[ancestor_positions[0]]
+        else:
+            acc = 0.0
+            target_length = target_dynamic.step_length_p(target_position)
+            for ancestor_position in ancestor_positions:
+                acc += (
+                    ancestor_dynamic.step_length_p(ancestor_position)
+                    / target_length
+                    * ancestor_values[ancestor_position]
+                )
+            target_values[target_position] = acc
+    if resample_possible:
+        if all(np.isclose(target_values, result)):
+            f.write("fine # ")
+        else:
+            f.write("math # ")
+    ancestor_values[:] = np.nan
+
+
+def test_resampling():
+    import random
+
+    random.seed(0)
+    dynamic_tree = DynamicTree(np.ones(100))
+    root = dynamic_tree.root()
+    values = np.full(root.number_of_steps(), np.nan, dtype=float)
+    dynamics = []
+    dynamics.append(root)
+    for i in range(100):
+        dynamic_number = random.randint(0, len(dynamics) - 1)
+        dynamic = dynamics[dynamic_number]
+        if random.random() < 0.75:
+            original_indices = list(dynamic._all_indices())
+            number = random.randint(2, len(original_indices))
+            indices = []
+            for i in range(number):
+                choice = random.choice(original_indices)
+                original_indices.remove(choice)
+                indices.append(choice)
+            indices.sort()
+            sub_dynamic = dynamic.sub_dynamic(np.array(indices))
+            if sub_dynamic not in dynamics:
+                dynamics.append(sub_dynamic)
+        else:
+            original_positions = list(range(dynamic.number_of_steps() + 1))
+            positions = []
+            for i in range(2):
+                choice = random.choice(original_positions)
+                original_positions.remove(choice)
+                positions.append(choice)
+            positions.sort()
+            p_start = positions[0]
+            p_end = positions[1]
+            partial_dynamic = dynamic.partial_dynamic_p(p_start, p_end)
+            if partial_dynamic not in dynamics:
+                dynamics.append(partial_dynamic)
+    f = open("resampling.txt", "w")
+    for i, dynamic_1 in enumerate(dynamics):
+        print(f"{i}", end="")
+        for j, dynamic_2 in enumerate(dynamics):
+            print(f" -> {j}", end="")
+            f.write(dynamic_1.display_alignment(dynamic_2))
+            test_single_resampling(dynamic_1, dynamic_2, root, values, f)
+            f.write("\n")
+        print()
+
+    f = open("resampling.txt", "w")
+
+    # from root
+    dynamic_tree = DynamicTree(np.ones(2, dtype=int))
+    dynamic = dynamic_tree.root()
+    for indices in [[0, 1], [0, 2], [1, 2], [0, 1, 2]]:
+        sub = dynamic.sub_dynamic(np.array(indices))
+        f.write(dynamic.display_alignment(sub))
+        if dynamic == sub:
+            f.write("Same!\n")
+    for start, end in [(0, 1), (0, 2), (1, 2)]:
+        partial = dynamic.partial_dynamic(start, end)
+        f.write(dynamic.display_alignment(partial))
+        if dynamic == partial:
+            f.write("Same!\n")
+    for positions in [[0, 1], [0, 2], [1, 2], [0, 1, 2]]:
+        sub = dynamic.sub_dynamic_p(np.array(positions))
+        f.write(dynamic.display_alignment(sub))
+        if dynamic == sub:
+            f.write("Same!\n")
+    for start, end in [(0, 1), (0, 2), (1, 2)]:
+        partial = dynamic.partial_dynamic_p(start, end)
+        f.write(dynamic.display_alignment(partial))
+        if dynamic == partial:
+            f.write("Same!\n")
+    f.write(dynamic_tree.display())
+
+    # from sub
+    dynamic_tree = DynamicTree(np.ones(4, dtype=int))
+    root = dynamic_tree.root()
+    dynamic = root.sub_dynamic(np.array([1, 2, 3]))
+    for indices in [[1, 2], [1, 3], [2, 3], [1, 2, 3]]:
+        sub = dynamic.sub_dynamic(np.array(indices))
+        f.write(dynamic.display_alignment(sub))
+        if dynamic == sub:
+            f.write("Same!\n")
+    for start, end in [(1, 2), (1, 3), (2, 3)]:
+        partial = dynamic.partial_dynamic(start, end)
+        f.write(dynamic.display_alignment(partial))
+        if dynamic == partial:
+            f.write("Same!\n")
+    for positions in [[0, 1], [0, 2], [1, 2], [0, 1, 2]]:
+        sub = dynamic.sub_dynamic_p(np.array(positions))
+        f.write(dynamic.display_alignment(sub))
+        if dynamic == sub:
+            f.write("Same!\n")
+    for start, end in [(0, 1), (0, 2), (1, 2)]:
+        partial = dynamic.partial_dynamic_p(start, end)
+        f.write(dynamic.display_alignment(partial))
+        if dynamic == partial:
+            f.write("Same!\n")
+    f.write(dynamic_tree.display())
+
+    # from partial
+    dynamic_tree = DynamicTree(np.ones(4, dtype=int))
+    root = dynamic_tree.root()
+    dynamic = root.partial_dynamic(1, 3)
+    for indices in [[1, 2], [1, 3], [2, 3], [1, 2, 3]]:
+        sub = dynamic.sub_dynamic(np.array(indices))
+        f.write(dynamic.display_alignment(sub))
+        if dynamic == sub:
+            f.write("Same!\n")
+    for start, end in [(1, 2), (1, 3), (2, 3)]:
+        partial = dynamic.partial_dynamic(start, end)
+        f.write(dynamic.display_alignment(partial))
+        if dynamic == partial:
+            f.write("Same!\n")
+    for positions in [[0, 1], [0, 2], [1, 2], [0, 1, 2]]:
+        sub = dynamic.sub_dynamic_p(np.array(positions))
+        f.write(dynamic.display_alignment(sub))
+        if dynamic == sub:
+            f.write("Same!\n")
+    for start, end in [(0, 1), (0, 2), (1, 2)]:
+        partial = dynamic.partial_dynamic_p(start, end)
+        f.write(dynamic.display_alignment(partial))
+        if dynamic == partial:
+            f.write("Same!\n")
+    f.write(dynamic_tree.display())
+
+    partial_intervall_map = dict()
+    for size in range(1, 4):
+        partial_intervalls = []
+        for start in range(size):
+            for end in range(start + 1, size + 1):
+                partial_intervalls.append((start, end))
+        partial_intervall_map[size] = partial_intervalls
+
+    positions_map = dict()
+    for size in range(1, 4):
+        positions = []
+        for i in range(0, 2 ** (size + 1)):
+            positions_candidate = [j for j in range(size + 1) if (i >> j) % 2 == 1]
+            if len(positions_candidate) >= 2:
+                positions.append(np.array(positions_candidate))
+        positions_map[size] = positions
+
+    # assignment same
+    dynamic_tree = DynamicTree(np.arange(1, 6, dtype=int))
+    root = dynamic_tree.root()
+    dynamic = root.sub_dynamic(np.arange(1, 5, dtype=int))
+    values = np.full(dynamic.number_of_steps(), np.nan, dtype=float)
+    f.write(dynamic.display_alignment(dynamic))
+    for start_1, end_1 in partial_intervall_map[3]:
+        for start_2, end_2 in partial_intervall_map[3]:
+            partial_1 = dynamic.partial_dynamic_p(start_1, end_1)
+            partial_2 = dynamic.partial_dynamic_p(start_2, end_2)
+            test_single_resampling(partial_1, partial_2, dynamic, values, f)
+        f.write("\n")
+
+    # assignment to_backed (down the tree)
+    dynamic_tree = DynamicTree(np.arange(1, 6, dtype=int))
+    root = dynamic_tree.root()
+    dynamic = root.sub_dynamic(np.arange(1, 5, dtype=int))
+    values = np.full(dynamic.number_of_steps(), np.nan, dtype=float)
+    for positions in positions_map[3]:
+        if len(positions) == 4:
+            continue
+        backed_dynamic = dynamic.sub_dynamic_p(positions)
+        f.write(dynamic.display_alignment(backed_dynamic))
+        for start_1, end_1 in partial_intervall_map[3]:
+            for start_2, end_2 in partial_intervall_map[
+                backed_dynamic.number_of_steps()
+            ]:
+                partial_1 = dynamic.partial_dynamic_p(start_1, end_1)
+                partial_2 = backed_dynamic.partial_dynamic_p(start_2, end_2)
+                test_single_resampling(partial_1, partial_2, dynamic, values, f)
+            f.write("\n")
+
+    # assignment from_backed (up the tree)
+    dynamic_tree = DynamicTree(np.arange(1, 6, dtype=int))
+    root = dynamic_tree.root()
+    dynamic = root.sub_dynamic(np.arange(1, 5, dtype=int))
+    values = np.full(dynamic.number_of_steps(), np.nan, dtype=float)
+    for positions in positions_map[3]:
+        if len(positions) == 4:
+            continue
+        backed_dynamic = dynamic.sub_dynamic_p(positions)
+        f.write(backed_dynamic.display_alignment(dynamic))
+        for start_1, end_1 in partial_intervall_map[backed_dynamic.number_of_steps()]:
+            for start_2, end_2 in partial_intervall_map[3]:
+                partial_1 = backed_dynamic.partial_dynamic_p(start_1, end_1)
+                partial_2 = dynamic.partial_dynamic_p(start_2, end_2)
+                test_single_resampling(partial_1, partial_2, dynamic, values, f)
+            f.write("\n")
+
+    # assignment common reference
+    dynamic_tree = DynamicTree(np.arange(1, 10, dtype=int))
+    root = dynamic_tree.root()
+    blocks = []
+    dynamic = root.sub_dynamic(np.arange(1, 6, dtype=int))
+    values = np.full(dynamic.number_of_steps(), np.nan, dtype=float)
+    sub_dynamics = []
+    for i in range(5):
+        sub_dynamics.append(
+            dynamic.sub_dynamic_p(np.fromiter((j for j in range(5) if j != i), int))
+        )
+    blocks.append(((dynamic, values), sub_dynamics))
+    dynamic = root.sub_dynamic(np.arange(1, 7, dtype=int))
+    values = np.full(dynamic.number_of_steps(), np.nan, dtype=float)
+    sub_dynamics = []
+    for i in range(5):
+        for j in range(i + 1, 6):
+            sub_dynamics.append(
+                dynamic.sub_dynamic_p(
+                    np.fromiter((k for k in range(6) if k not in [i, j]), int)
+                )
+            )
+    blocks.append(((dynamic, values), sub_dynamics))
+    dynamic = root.sub_dynamic(np.arange(1, 8, dtype=int))
+    values = np.full(dynamic.number_of_steps(), np.nan, dtype=float)
+    sub_dynamics = []
+    for i in range(5):
+        for j in range(i + 1, 6):
+            for k in range(j + 1, 7):
+                sub_dynamics.append(
+                    dynamic.sub_dynamic_p(
+                        np.fromiter((l for l in range(7) if l not in [i, j, k]), int)
+                    )
+                )
+    blocks.append(((dynamic, values), sub_dynamics))
+    dynamic = root.sub_dynamic(np.arange(1, 9, dtype=int))
+    values = np.full(dynamic.number_of_steps(), np.nan, dtype=float)
+    sub_dynamics = []
+    for i in range(5):
+        for j in range(i + 1, 6):
+            for k in range(j + 1, 7):
+                for l in range(k + 1, 8):
+                    sub_dynamics.append(
+                        dynamic.sub_dynamic_p(
+                            np.fromiter(
+                                (m for m in range(8) if m not in [i, j, k, l]), int
+                            )
+                        )
+                    )
+    blocks.append(((dynamic, values), sub_dynamics))
+    for i, block in enumerate(blocks):
+        print(i)
+        dynamic, values = block[0]
+        dynamics = block[1]
+        indices = dynamic._all_indices()
+        for j, dynamic_1 in enumerate(dynamics):
+            print(f"{j}", end="")
+            for k, dynamic_2 in enumerate(dynamics):
+                print(f" -> {k}", end="")
+                if dynamic_1 == dynamic_2:
+                    continue
+                if any(
+                    k not in dynamic_1._all_indices()
+                    and k not in dynamic_2._all_indices()
+                    for k in indices
+                ):
+                    continue
+                f.write(dynamic_1.display_alignment(dynamic_2))
+                for start_1, end_1 in partial_intervall_map[
+                    dynamic_1.number_of_steps()
+                ]:
+                    for start_2, end_2 in partial_intervall_map[
+                        dynamic_2.number_of_steps()
+                    ]:
+                        partial_1 = dynamic_1.partial_dynamic_p(start_1, end_1)
+                        partial_2 = dynamic_2.partial_dynamic_p(start_2, end_2)
+                        test_single_resampling(partial_1, partial_2, dynamic, values, f)
+                    f.write("\n")
+            print()
diff --git a/flexibility.py b/flexibility.py
deleted file mode 100644
index dd3ec99f5dcd3f91c3e0fc23d7a31cfb4e129d81..0000000000000000000000000000000000000000
--- a/flexibility.py
+++ /dev/null
@@ -1,1019 +0,0 @@
-"""
-MIT License
-
-Copyright (c) 2023 RWTH Aachen University
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-"""
-
-import pandas as pd
-import math
-import pyomo.environ as pyo
-from Model_Library.OptimizationModel import EntityResult, OptimizationModel
-from Model_Library.Component.model.AbstractComponent import ComponentKind, ComponentCommodity
-from Tooling.dynamics.Dynamic import resample
-
-def flexibility_activation(community, key, strategy):
-    # prepare data structures
-    graphs = dict()
-    for ps_name, ps in community.prosumers.items():
-        graphs[ps_name] = ps.build_graph()
-    for ca_name, ca in community.community_assets.items():
-        graphs[ca_name] = ca.build_graph()
-    rescheduling_models = dict()
-    flex_dict = dict()
-    c_f_dict = dict()
-    flex_dynamic_dict = dict()
-    validation_models = dict()
-    validated_results = dict()
-    for ps_name, ps in community.prosumers.items():
-        validated_results[ps_name] = ps.get_empty_entity_result()
-    for ca_name, ca in community.community_assets.items():
-        validated_results[ca_name] = ca.get_empty_entity_result()
-    community.result[key] = EntityResult(community.dynamic, [('agg_balance', 'T'), ('agg_export', 'T'), ('agg_import', 'T'), ('internal_exchange', 'T')])
-
-    i_start = community.dynamic.index_of(0)
-    i_end = community.dynamic.index_of(community.dynamic.number_of_steps())
-    n_overlap_default = 0  # deafult: 0; nr of timesteps the starting point of each interval is shifted into the prior interval
-    n_interval_default = 4*24  # default: 4*24; number of time steps in one interval of the RH
-
-    n_interval = n_interval_default
-
-    n_overlap = n_overlap_default
-
-    i_step_start = i_start
-
-    valid = True
-
-    changes = 0
-
-    while i_step_start < i_end:
-        print('-------------------' + str(i_step_start) + '-----------------')
-
-        # PREPARE INTERVAL
-        if i_step_start + n_interval - n_overlap < i_end and i_step_start + n_interval <= i_end:
-            # interval does not reach overall end
-            n_horizon_rh = n_interval  # number of time steps in this RH-interval
-            n_fix = max(n_horizon_rh - n_overlap, 1)  # number of fixed time steps of this RH-interval
-        elif i_step_start + n_interval - n_overlap < i_end and i_step_start + n_interval > i_end:
-            # interval reaches end of horizon, but just without overlap
-            n_horizon_rh = i_end - i_step_start  # interval length is adapted
-            n_fix = max(n_interval - n_overlap, 1)  # but not the number of fixed time steps
-        else:
-            # fixed time steps hit overall time horizon --> adapt also the fixed time steps
-            n_horizon_rh = i_end - i_step_start
-            n_fix = n_horizon_rh
-
-        # extract relevant timesteps values from overall time series
-        i_step_end = i_step_start + n_horizon_rh
-        i_fix_end = i_step_start + n_fix
-
-        # reschedule
-        for ps_name, ps in community.prosumers.items():
-            component_sizes = {component.name: ps._result[ps._last_result_key][(component.name, 'capacity')] for component in ps.get_components()}
-            grid_slack_values = {component.name: (ps._result[ps._last_result_key][(component.name, 'input_1')], ps._result[ps._last_result_key][(component.name, 'output_1')]) for component in ps.get_components(kind=ComponentKind.GRID, commodity=ComponentCommodity.ELECTRICITY)}
-            storage_slack_values = {component.name: ps._result[ps._last_result_key][(component.name, 'energy')] for component in ps.get_components(kind=ComponentKind.STORAGE)}
-            configuration = {
-                'fix_sizing': {
-                    'values': component_sizes
-                },
-                'predict': {},
-                'consumption_slack': {
-                    'commodity': [ComponentCommodity.COLD, ComponentCommodity.HEAT],
-                    'strategy_factor': 100
-                },
-                'grid_slack': {
-                    'commodity': ComponentCommodity.ELECTRICITY,
-                    'strategy_factor': 100,
-                    'values': grid_slack_values
-                },
-                'storage_slack': {
-                    'commodity': ComponentCommodity.ALL,
-                    'values': storage_slack_values,
-                    'strategy_factor': 1
-                },
-                'storage_boundaries': {
-                    'commodity': ComponentCommodity.ALL,
-                    'min': 0,
-                    'max': 1
-                }
-            }
-            if i_step_start != i_start:
-                initial_time_step = ps._dynamic.index_of(ps._dynamic.position_of(i_step_start) - 1)
-                storage_connect_values = {component.name: validated_results[ps_name][(component.name, 'energy')][initial_time_step] for component in ps.get_components(kind=ComponentKind.STORAGE)}
-                configuration['storage_connect'] = {
-                    'commodity': ComponentCommodity.ALL,
-                    'values': storage_connect_values
-                }
-            rescheduling_model = ps.optimize(configuration, i_step_start, i_step_end)
-            if rescheduling_model is None:
-                valid = False
-                break
-            else:
-                rescheduling_models[ps_name] = rescheduling_model
-                valid = True
-
-        if not valid:
-            changes += 1
-            result, n_interval = reduce_invervall(community, n_interval, i_step_start)
-            if not result:
-                print('Unteres Limit erreicht')
-                break
-            continue
-
-        for ca_name, ca in community.community_assets.items():
-            component_sizes = {component.name: ca._result[ca._last_result_key][(component.name, 'capacity')] for component in ca.get_components()}
-            grid_slack_values = {component.name: (ca._result[ca._last_result_key][(component.name, 'input_1')], ca._result[ca._last_result_key][(component.name, 'output_1')]) for component in ca.get_components(kind=ComponentKind.GRID, commodity=ComponentCommodity.ELECTRICITY)}
-            storage_slack_values = {component.name: ca._result[ca._last_result_key][(component.name, 'energy')] for component in ca.get_components(kind=ComponentKind.STORAGE)}
-            configuration = {
-                'fix_sizing': {
-                    'values': component_sizes
-                },
-                'predict': {},
-                'consumption_slack': {
-                    'commodity': [ComponentCommodity.COLD, ComponentCommodity.HEAT],
-                    'strategy_factor': 100
-                },
-                'grid_slack': {
-                    'commodity': ComponentCommodity.ELECTRICITY,
-                    'strategy_factor': 100,
-                    'values': grid_slack_values
-                },
-                'storage_slack': {
-                    'commodity': ComponentCommodity.ALL,
-                    'values': storage_slack_values,
-                    'strategy_factor': 1
-                },
-                'storage_boundaries': {
-                    'commodity': ComponentCommodity.ALL,
-                    'min': 0,
-                    'max': 1
-                }
-            }
-            if i_step_start != i_start:
-                initial_time_step = ca._dynamic.index_of(ca._dynamic.position_of(i_step_start) - 1)
-                storage_connect_values = {component.name: validated_results[ca_name][(component.name, 'energy')][initial_time_step] for component in ca.get_components(kind=ComponentKind.STORAGE)}
-                configuration['storage_connect'] = {
-                    'commodity': ComponentCommodity.ALL,
-                    'values': storage_connect_values
-                }
-            rescheduling_model = ca.optimize(configuration, i_step_start, i_step_end)
-            if rescheduling_model is None:
-                valid = False
-                break
-            else:
-                rescheduling_models[ca_name] = rescheduling_model
-                valid = True
-
-        if not valid:
-            changes += 1
-            result, n_interval = reduce_invervall(community, n_interval, i_step_start)
-            if not result:
-                print('Unteres Limit erreicht')
-                break
-            continue
-
-        # calculate flexibility
-        for ps_name, ps in community.prosumers.items():
-            flex, c_f, flex_dynamic = calculate_flex(ps, graphs[ps_name], rescheduling_models[ps_name], i_step_start, i_step_end)
-            flex_dict[ps_name] = flex
-            c_f_dict[ps_name] = c_f
-            flex_dynamic_dict[ps_name] = flex_dynamic
-
-        for ca_name, ca in community.community_assets.items():
-            flex, c_f, flex_dynamic = calculate_flex(ca, graphs[ca_name], rescheduling_models[ca_name], i_step_start, i_step_end)
-            flex_dict[ca_name] = flex
-            c_f_dict[ca_name] = c_f
-            flex_dynamic_dict[ca_name] = flex_dynamic
-
-        # aggregate rescheduled balance
-        rescheduled_balance = aggregate_rescheduled_balance(community, rescheduling_models, i_step_start, i_step_end)
-
-        # activate flexibility
-        activation_model = activate_flex(community, i_step_start, i_step_end, rescheduled_balance, strategy, flex_dict, c_f_dict, flex_dynamic_dict, rescheduling_models)
-
-        if activation_model is None:
-            changes += 1
-            result, n_interval = reduce_invervall(community, n_interval, i_step_start)
-            if not result:
-                print('Unteres Limit erreicht')
-                break
-            continue
-
-        # validate
-        valid = validate(community, i_start, i_step_start, i_fix_end, activation_model, validation_models, validated_results)
-        if not valid:
-            changes += 1
-            result, n_interval = reduce_invervall(community, n_interval, i_step_start)
-            if not result:
-                print('Unteres Limit erreicht')
-                break
-            continue
-                
-        # fix results
-        for ps_name in community.prosumers:
-            validated_results[ps_name].extract_partial_results_from_model(validation_models[ps_name], i_step_start, i_fix_end)
-        for ca_name in community.community_assets:
-            validated_results[ca_name].extract_partial_results_from_model(validation_models[ca_name], i_step_start, i_fix_end)
-
-        # aggregate exports and imports
-        aggregate_fixed_exports_imports(community, key, validated_results, i_step_start, i_fix_end)
-
-        # prepare next interval
-        i_step_start = int(i_step_start + max(n_interval - n_overlap, 1))
-        n_interval = n_interval_default
-        n_overlap = n_overlap_default
-    
-    for ps_name, ps in community.prosumers.items():
-        ps._result[key] = validated_results[ps_name]
-        ps._last_result_key = key
-
-    for ca_name, ca in community.community_assets.items():
-        ca._result[key] = validated_results[ca_name]
-        ca._last_result_key = key
-
-def reduce_invervall(community, n_interval, i_step_start):
-    #TODO assumes that n_overlap and n_overlap_default is 0
-    n_interval_new = int(math.floor(n_interval / 2))
-    if n_interval_new <= 0:
-        return False, None
-    while any(not prosumer._dynamic.has_index(i_step_start + n_interval_new) for prosumer in community.prosumers.values()) or any(not community_asset._dynamic.has_index(i_step_start + n_interval_new) for community_asset in community.community_assets.values()):
-        if n_interval_new <= 0:
-            return False, None
-        n_interval_new -= 1
-    return True, n_interval_new
-    
-def aggregate_rescheduled_balance(community, rescheduling_models, i_start, i_end):
-    dynamic = community.dynamic.partial_dynamic(i_start, i_end)
-    rescheduled_balance = pd.Series(data=0.0, index=dynamic.time_steps())
-
-    for ps_name, ps in community.prosumers.items():
-        rescheduled_data = ps.get_export_import(rescheduling_models[ps_name], dynamic)
-        rescheduled_balance += sum(rescheduled_export - rescheduled_import for rescheduled_export, rescheduled_import in rescheduled_data.values())
-    for ca_name, ca in community.community_assets.items():
-        rescheduled_data = ca.get_export_import(rescheduling_models[ca_name], dynamic)
-        rescheduled_balance += sum(rescheduled_export - rescheduled_import for rescheduled_export, rescheduled_import in rescheduled_data.values())
-    
-    return rescheduled_balance
-
-def calculate_flex(actor, graph, rescheduling_model, i_start, i_end):
-    """
-    The calc flex method does the following things:
-        1) building a graph of the prosumer to search it. The graph is a dictionary that has a structure which
-        connects every component (represented by its name as the key in a dictionary) with its neighbours
-        2) apply depth first search to the graph and find the endings of it. from the ending it backpropagates to
-        the grid. On the way it samples flexibilities while checking efficiency impacts and constraints of all
-        components on its way
-        3) measures the time in hours until every flexibility offer can be offered. This could be used as an
-        incentive of how convenient it is in this moment for the prosumer to actually offer this flexibility
-
-    Parameters
-    -------
-    time_steps: time steps of current RH-interval
-    """
-
-    # building the graph of the prosumer in a way that every component knows to which neighbours
-    # it is connected with input/output flows
-    dynamic = actor._dynamic.partial_dynamic(i_start, i_end)
-
-    # get the grid name
-    try:
-        grid_name = [comp for comp in actor._components if actor._components[comp].type == 'ElectricalGrid'][0]
-    except:
-        print('The grid for starting the dfs search was not found')
-        return
-
-    # do the dfs search first with the incoming flows and then with the outgoing flows
-    visited = set()
-
-    # make the DFS first for the electricity side, afterwards for heat/demand side
-    try:
-        elec_flex_from_inv, c_f_inv = dfs(actor, graph, rescheduling_model, visited, grid_name, dynamic, [], 'electricity')
-    except TypeError:
-        print('There is no flexibility from Storage or PV')
-
-    visited.remove(grid_name)
-
-    columns_list = ['e_dch_dmd', 'e_cha_dmd', 'e_dch_inv', 'e_cha_inv',
-                    'flex_pos_dmd', 'flex_pos_inc_dmd', 'flex_pos_dec_dmd',
-                    'flex_neg_dmd', 'flex_neg_inc_dmd', 'flex_neg_dec_dmd',
-                    'flex_pos_inv', 'flex_pos_inc_inv', 'flex_pos_dec_inv',
-                    'flex_neg_inv', 'flex_neg_inc_inv', 'flex_neg_dec_inv']
-
-    flex = pd.DataFrame(data=0.0, columns=columns_list, index=dynamic.time_steps())
-
-    # add flexibility from the inverter side (PV and battery)
-    flex['e_dch_inv'] = elec_flex_from_inv['e_dch']
-    flex['e_cha_inv'] = elec_flex_from_inv['e_cha']
-    flex['flex_pos_inv'] = elec_flex_from_inv['flex_pos']
-    flex['flex_pos_inc_inv'] = elec_flex_from_inv['flex_pos_inc']
-    flex['flex_pos_dec_inv'] = elec_flex_from_inv['flex_pos_dec']
-    flex['flex_neg_inv'] = elec_flex_from_inv['flex_neg']
-    flex['flex_neg_inc_inv'] = elec_flex_from_inv['flex_neg_inc']
-    flex['flex_neg_dec_inv'] = elec_flex_from_inv['flex_neg_dec']
-
-    # reset the grid values so that the additional flexibility is seen seperately
-    grid_comp = actor._components[grid_name]
-    for col in grid_comp.flex.columns:
-        grid_comp.flex[col].values[:] = 0.0
-
-    try:
-        elec_flex_from_dmd, c_f_dmd = dfs(actor, graph, rescheduling_model, visited, grid_name, dynamic, [], 'heat')
-        flex_dmd = 1
-    except TypeError:
-        flex_dmd = 0
-        print('There is no flexibility from DMD or Heat')
-
-    # combine correction factors in one dict
-    if not flex_dmd:
-        c_f_dmd = dict()
-        c_f_dmd['c_f_dch'] = pd.Series(data=0, index=dynamic.time_steps())
-        c_f_dmd['c_static_dch'] = pd.Series(data=0, index=dynamic.time_steps())
-        c_f_dmd['c_f_cha'] = pd.Series(data=0, index=dynamic.time_steps())
-        c_f_dmd['c_static_cha'] = pd.Series(data=0, index=dynamic.time_steps())
-    c_f_dict = {'elec': c_f_inv, 'heat': c_f_dmd}
-
-    # add (if exists) the flexibility from demand side (heat comm_batt + heat pump)
-    if flex_dmd:
-        flex['e_dch_dmd'] = elec_flex_from_dmd['e_dch']
-        flex['e_cha_dmd'] = elec_flex_from_dmd['e_cha']
-        flex['flex_pos_dmd'] = elec_flex_from_dmd['flex_pos']
-        flex['flex_pos_inc_dmd'] = elec_flex_from_dmd['flex_pos_inc']
-        flex['flex_pos_dec_dmd'] = elec_flex_from_dmd['flex_pos_dec']
-        flex['flex_neg_dmd'] = elec_flex_from_dmd['flex_neg']
-        flex['flex_neg_inc_dmd'] = elec_flex_from_dmd['flex_neg_inc']
-        flex['flex_neg_dec_dmd'] = elec_flex_from_dmd['flex_neg_dec']
-    
-    return flex, c_f_dict, dynamic
-
-def dfs(actor, graph, results, visited, node, dynamic, act_path, type):
-    """
-    This function starts a DFS search beginning with the grid node and searches for flexible components.
-    Every time it finds a flexible component following steps are done:
-        1) Calculate the maximal possible flexibilities from this component
-        2) Backpropagate through the graph back to the grid
-            - to check if constraints of the components in between are regarded
-            - to calculate the efficiency losses of the component to the grid
-        3) Finally, the usable flexibility of this component is added to the total amount of flexibility of the prosumer
-    Parameters
-    ----------
-    results: rescheduled results of this prosumer
-    visited: list of visited nodes
-    node: currently visited node
-    time_steps: relevant time steps of this RH-interval
-    init_results: initial results. Needed for DF calculation.
-    act_path: the path that is currently supervised. Used for backpropagation
-    type: can either be heat or electricity. This changes the vertexes beeing locked at.
-            Reason: The demand available flexibility on demand side should not interfere with the electricity side.
-            We differnciate between comm_batt from heat comm_batt and comm_batt from electric storages.
-    """
-    if node not in visited:
-
-        # get current node component
-        node_key = [comp for comp in actor._components if comp == node][0]
-        node_comp = actor._components[node_key]
-
-        # initialize flexibility df in first iteration
-        columns_list = ['e_dch', 'flex_pos', 'flex_pos_inc', 'flex_pos_dec',
-                        'e_cha', 'flex_neg', 'flex_neg_inc', 'flex_neg_dec']
-
-        node_comp.temp_flex = pd.DataFrame(data=0.0, columns=columns_list, index=dynamic.time_steps())
-
-        # flexible components are not added to the visited comps because they can be accessed through various paths
-        if not node_comp.flexible:
-            visited.add(node)
-
-        act_path.append(node)
-        c_static_df = pd.DataFrame(data=0.0, columns=['c_static_dch', 'c_static_cha'], index=dynamic.time_steps())
-        c_f_df = pd.DataFrame(data=1.0, columns=['c_f_dch', 'c_f_cha'], index=dynamic.time_steps())
-        c_f_df = pd.concat([c_f_df, c_static_df], axis=1)
-
-        #just go further down the tree if the component is not flexible
-        if not node_comp.flexible:
-            if type == 'electricity':
-                for neighbour in graph[node]['neigh_in']:
-                    if neighbour not in visited and neighbour in graph.keys():
-                        flex_pre_comp, c_f_pre_comp = dfs(actor, graph, results, visited, neighbour, dynamic, act_path, type)
-                        node_comp.temp_flex += flex_pre_comp  # adding the flexibility from the previous object to this component
-                        # choose the worst (DCH --> highest CHA--> lowest) correction factor of all neighbours so
-                        new_c_f = c_f_pre_comp['c_f_dch']
-                        c_f_df['c_f_dch'] = new_c_f.combine(c_f_df['c_f_dch'], max)
-
-                        new_c_f = c_f_pre_comp['c_static_dch']
-                        c_f_df['c_static_dch'] = new_c_f.combine(c_f_df['c_static_dch'], max)  # maybe change so that just one factor is used
-
-                        new_c_f = c_f_pre_comp['c_f_cha']
-                        c_f_df['c_f_cha'] = new_c_f.combine(c_f_df['c_f_cha'], min)
-
-                        new_c_f = c_f_pre_comp['c_static_cha']
-                        c_f_df['c_static_cha'] = new_c_f.combine(c_f_df['c_static_cha'], max)
-
-            elif type == 'heat':
-                for neighbour in graph[node]['neigh_out']:
-                    if neighbour not in visited and neighbour in graph.keys():
-                        flex_pre_comp, c_f_pre_comp = dfs(actor, graph, results, visited, neighbour, dynamic, act_path, type)
-                        node_comp.temp_flex += flex_pre_comp  # adding the flexibility from the previous object to this component
-                        # choose the worst (DCH --> highest CHA--> lowest) correction factor of all neighbours so
-                        new_c_f = c_f_pre_comp['c_f_dch']
-                        c_f_df['c_f_dch'] = new_c_f.combine(c_f_df['c_f_dch'], max)
-
-                        new_c_f = c_f_pre_comp['c_static_dch']
-                        c_f_df['c_static_dch'] = new_c_f.combine(c_f_df['c_static_dch'],max)  # maybe change so that just one factor is used
-
-                        new_c_f = c_f_pre_comp['c_f_cha']
-                        c_f_df['c_f_cha'] = new_c_f.combine(c_f_df['c_f_cha'], min) # max here because COP of HP is larger than 1
-
-                        new_c_f = c_f_pre_comp['c_static_cha']
-                        c_f_df['c_static_cha'] = new_c_f.combine(c_f_df['c_static_cha'], max)
-
-        # adjust the correction factors according to components efficiencies
-        c_f_df = node_comp.calc_correction_factors(actor._dynamic.time_steps(), c_f_df, results)
-
-        # when there are no neighbours anymore to check, calculate the flexibility of this comp itself
-        if node_comp.flexible:
-            # calculate the MAXIMAL THEORETICAL FLEXIBILITY in the component class
-            node_comp.calc_flex_comp(results, dynamic, actor._result)
-
-        # check limits of non flexible comp because flexible comps intrinsically respect their
-        # limits from flexibility calculation
-        else:
-            input_flows, output_flows = get_planned_flows(results, node_comp, dynamic)
-            # Transform the flexibilities to the input side of the inflexible component
-            node_comp.adjust_flex_with_efficiency(results, dynamic)
-            # Check, if the maximal available flexibility could hit the power limits of this component
-            node_comp.check_limits(input_flows, output_flows, results, dynamic)
-
-        # remove node from list if it has no more neighbours
-        act_path.remove(node)
-
-        return node_comp.temp_flex, c_f_df
-
-def get_planned_flows(rsl, component, dynamic):
-    """
-    get initially planned power flows in and out of a prosumers component
-
-    Parameters
-    ----------
-    rsl: prosumers initial result
-    component: component
-    time_steps: time steps of this RH-interval
-
-    Returns
-    -------
-    in_flow: scheduled power flow into component
-    out_flow: scheduled power flow out of component
-    """
-    input_commodity_1, input_commodity_2, output_commoditey_1, output_commoditey_2 = component.get_input_output_commodities()
-
-    in_flow = pd.Series(data=0, index=dynamic.time_steps())
-
-    if input_commodity_1 == ComponentCommodity.ELECTRICITY:
-        in_flow += rsl[(component.name, 'input_1')]
-
-    if input_commodity_2 == ComponentCommodity.ELECTRICITY:
-        in_flow += rsl[(component.name, 'input_2')]
-
-    out_flow = pd.Series(data=0, index=dynamic.time_steps())
-
-    if output_commoditey_1 == ComponentCommodity.ELECTRICITY:
-        out_flow += rsl[(component.name, 'output_1')]
-
-    if output_commoditey_2 == ComponentCommodity.ELECTRICITY:
-        out_flow += rsl[(component.name, 'output_2')]
-
-    return in_flow, out_flow
-
-def activate_flex(community, i_start, i_end, rescheduled_balance, strategy, flex_dict, c_f_dict, flex_dynamic_dict, rescheduling_models):
-    dynamic = community.dynamic.partial_dynamic(i_start, i_end) # TODO alles ab hier überarbeiten, wenn ich variableninterpolation implementiert habe
-    model = build_activation_model(community, dynamic, rescheduled_balance, strategy, flex_dict, c_f_dict, flex_dynamic_dict, rescheduling_models)
-
-    options = dict()
-    options['MIPGap'] = 0.02
-    options['Presolve'] = 2
-    options['TimeLimit'] = 200
-
-    model.solve(options, True)
-
-    if model.is_ok():
-        return model
-    else:
-        return None
-    
-def build_activation_model(community, dynamic, rescheduled_balance, strategy, flex_dict, c_f_dict, flex_dynamic_dict, rescheduling_models):
-    model = OptimizationModel(community.name, dynamic)
-
-    for ps_name, ps in community.prosumers.items():
-        add_base_variables(ps_name, ps, model, flex_dict[ps_name], flex_dynamic_dict[ps_name])
-        add_base_cons(ps_name, ps, model, flex_dict[ps_name], c_f_dict[ps_name], flex_dynamic_dict[ps_name])
-
-    for ca_name, ca in community.community_assets.items():
-        add_base_variables(ca_name, ca, model, flex_dict[ca_name], flex_dynamic_dict[ps_name])
-        add_base_cons(ca_name, ca, model, flex_dict[ca_name], c_f_dict[ca_name], flex_dynamic_dict[ps_name])
-
-    add_strategy(community, model, rescheduled_balance, strategy, rescheduling_models)
-
-    return model
-
-def add_base_variables(name, actor, model, flex, flex_dynamic):
-    flex_min_inv = resample(flex['flex_neg_inv'], flex_dynamic, model.dynamic)
-    flex_max_inv = resample(flex['flex_pos_inv'], flex_dynamic, model.dynamic)
-
-    flex_min_dmd = resample(flex['flex_neg_dmd'], flex_dynamic, model.dynamic)
-    flex_max_dmd = resample(flex['flex_pos_dmd'], flex_dynamic, model.dynamic)
-
-    model.add(('flex_' + name,), pyo.Var(model.T, bounds=(None, None)))
-
-    model.add(('flex_pos_inv_' + name,), pyo.Var(model.T, bounds=(0, None)))
-    model.add(('flex_neg_inv_' + name,), pyo.Var(model.T, bounds=(0, None)))
-
-    model.add(('flex_pos_dmd_' + name,), pyo.Var(model.T, bounds=(0, None)))
-    model.add(('flex_neg_dmd_' + name,), pyo.Var(model.T, bounds=(0, None)))
-
-    def rule(m, t):
-        return model.component_dict[('flex_pos_inv_' + name,)][t] <= 0.8 * flex_max_inv[t]
-    model.add(('flex_pos_inv_' + name + '_ub',), pyo.Constraint(model.T, rule = rule))
-
-    def rule(m, t):
-        return model.component_dict[('flex_neg_inv_' + name,)][t] <= 0.8 * flex_min_inv[t]
-    model.add(('flex_neg_inv_' + name + '_ub',), pyo.Constraint(model.T, rule = rule))
-
-    def rule(m, t):
-        return model.component_dict[('flex_pos_dmd_' + name,)][t] <= 0.8 * flex_max_dmd[t]
-    model.add(('flex_pos_dmd_' + name + '_ub',), pyo.Constraint(model.T, rule = rule))
-
-    def rule(m, t):
-        return model.component_dict[('flex_neg_dmd_' + name,)][t] <= 0.8 * flex_min_dmd[t]
-    model.add(('flex_neg_dmd_' + name + '_ub',), pyo.Constraint(model.T, rule = rule))
-
-    # there are just these two binary variables for the whole prosumer
-    # because we can not use negative flex from dmd and positive flex from elec. So they will be the same
-    model.add(('z_flex_pos_inv_' + name,), pyo.Var(model.T, domain=pyo.Binary))
-    model.add(('z_flex_neg_inv_' + name,), pyo.Var(model.T, domain=pyo.Binary))
-    model.add(('z_flex_pos_dmd_' + name,), pyo.Var(model.T, domain=pyo.Binary))
-    model.add(('z_flex_neg_dmd_' + name,), pyo.Var(model.T, domain=pyo.Binary))
-
-def add_base_cons(name, actor, model, flex, c_f_unsampled, flex_dynamic):
-    e_dch_inv = resample(flex['e_dch_inv'], flex_dynamic, model.dynamic)
-    e_cha_inv = resample(flex['e_cha_inv'], flex_dynamic, model.dynamic)
-
-    e_dch_dmd = resample(flex['e_dch_dmd'], flex_dynamic, model.dynamic)
-    e_cha_dmd = resample(flex['e_cha_dmd'], flex_dynamic, model.dynamic)
-
-    c_f = dict()
-    c_f['elec'] = dict()
-    c_f['elec']['c_f_dch'] = resample(c_f_unsampled['elec']['c_f_dch'], flex_dynamic, model.dynamic)
-    c_f['elec']['c_static_dch'] = resample(c_f_unsampled['elec']['c_static_dch'], flex_dynamic, model.dynamic)
-    c_f['elec']['c_f_cha'] = resample(c_f_unsampled['elec']['c_f_cha'], flex_dynamic, model.dynamic)
-    c_f['elec']['c_static_cha'] = resample(c_f_unsampled['elec']['c_static_cha'], flex_dynamic, model.dynamic)
-    c_f['heat'] = dict()
-    c_f['heat']['c_f_dch'] = resample(c_f_unsampled['heat']['c_f_dch'], flex_dynamic, model.dynamic)
-    c_f['heat']['c_static_dch'] = resample(c_f_unsampled['heat']['c_static_dch'], flex_dynamic, model.dynamic)
-    c_f['heat']['c_f_cha'] = resample(c_f_unsampled['heat']['c_f_cha'], flex_dynamic, model.dynamic)
-    c_f['heat']['c_static_cha'] = resample(c_f_unsampled['heat']['c_static_cha'], flex_dynamic, model.dynamic)
-
-    bigM = 50
-
-    def rule(m, t):
-        return model.component_dict[('flex_' + name,)][t] == model.component_dict[('flex_pos_inv_' + name,)][t] - model.component_dict[('flex_neg_inv_' + name,)][t] + model.component_dict[('flex_pos_dmd_' + name,)][t] - model.component_dict[('flex_neg_dmd_' + name,)][t]
-    model.add(('flex_' + name + '_sum',), pyo.Constraint(model.T, rule = rule))
-
-    # if z_flex_sign == 1 --> flex_pos < bigM; flex_neg = 0
-    def rule(m, t):
-        return model.component_dict[('flex_pos_inv_' + name,)][t] <= model.component_dict[('z_flex_pos_inv_' + name,)][t] * bigM
-    model.add(('z_flex_pos_inv_' + name + '_1',), pyo.Constraint(model.T, rule = rule))
-
-    def rule(m, t):
-        return model.component_dict[('flex_pos_inv_' + name,)][t] >= model.component_dict[('z_flex_pos_inv_' + name,)][t] * 0.001
-    model.add(('z_flex_pos_inv_' + name + '_2',), pyo.Constraint(model.T, rule = rule))
-
-
-    def rule(m, t):
-        return model.component_dict[('flex_neg_inv_' + name,)][t] <= model.component_dict[('z_flex_neg_inv_' + name,)][t] * bigM
-    model.add(('z_flex_neg_inv_' + name + '_1',), pyo.Constraint(model.T, rule = rule))
-
-    def rule(m, t):
-        return model.component_dict[('flex_neg_inv_' + name,)][t] >= model.component_dict[('z_flex_neg_inv_' + name,)][t] * 0.001
-    model.add(('z_flex_neg_inv_' + name + '_2',), pyo.Constraint(model.T, rule = rule))
-
-
-    def rule(m, t):
-        return model.component_dict[('flex_pos_dmd_' + name,)][t] <= model.component_dict[('z_flex_pos_dmd_' + name,)][t] * bigM
-    model.add(('z_flex_pos_dmd_' + name + '_1',), pyo.Constraint(model.T, rule = rule))
-
-    def rule(m, t):
-        return model.component_dict[('flex_pos_dmd_' + name,)][t] >= model.component_dict[('z_flex_pos_dmd_' + name,)][t] * 0.001
-    model.add(('z_flex_pos_dmd_' + name + '_2',), pyo.Constraint(model.T, rule = rule))
-
-
-    def rule(m, t):
-        return model.component_dict[('flex_neg_dmd_' + name,)][t] <= model.component_dict[('z_flex_neg_dmd_' + name,)][t] * bigM
-    model.add(('z_flex_neg_dmd_' + name + '_1',), pyo.Constraint(model.T, rule = rule))
-
-    def rule(m, t):
-        return model.component_dict[('flex_neg_dmd_' + name,)][t] >= model.component_dict[('z_flex_neg_dmd_' + name,)][t] * 0.001
-    model.add(('z_flex_neg_dmd_' + name + '_2',), pyo.Constraint(model.T, rule = rule))
-
-    # make sure that just one flexibility is used at a time
-    def rule(m, t):
-        return model.component_dict[('z_flex_pos_inv_' + name,)][t] + model.component_dict[('z_flex_neg_inv_' + name,)][t] <= 1
-    model.add(('z_flex_pos_inv_' + name + '_3',), pyo.Constraint(model.T, rule = rule))
-
-    def rule(m, t):
-        return model.component_dict[('z_flex_pos_dmd_' + name,)][t] + model.component_dict[('z_flex_neg_dmd_' + name,)][t] <= 1
-    model.add(('z_flex_pos_dmd_' + name + '_3',), pyo.Constraint(model.T, rule = rule))
-
-    def rule(m, t):
-        return model.component_dict[('z_flex_pos_inv_' + name,)][t] + model.component_dict[('z_flex_neg_dmd_' + name,)][t] <= 1
-    model.add(('z_flex_pos_inv_' + name + '_4',), pyo.Constraint(model.T, rule = rule))
-
-    def rule(m, t):
-        return model.component_dict[('z_flex_pos_dmd_' + name,)][t] + model.component_dict[('z_flex_neg_inv_' + name,)][t] <= 1
-    model.add(('z_flex_pos_dmd_' + name + '_4',), pyo.Constraint(model.T, rule = rule))
-
-    # upper energy boundary for energy flexibility
-    def rule(m, t):
-        return pyo.quicksum(
-            model.component_dict[('flex_pos_inv_' + name,)][t_old] * c_f['elec']['c_f_dch'][t_old] +
-            model.component_dict[('z_flex_pos_inv_' + name,)][t_old] * c_f['elec']['c_static_dch'][t_old] * model.step_size(t_old)
-            for t_old in range(model.T.first(), model.T.first() + model.T.ord(t))) - pyo.quicksum(
-            model.component_dict[('flex_neg_inv_' + name,)][t_old] * c_f['elec']['c_f_cha'][t_old] -
-            model.component_dict[('z_flex_neg_inv_' + name,)][t_old] * c_f['elec']['c_static_cha'][t_old] * model.step_size(t_old)
-            for t_old in range(model.T.first(), model.T.first() + model.T.ord(t))) <= 1 * e_dch_inv[t]
-    model.add(('flex_inv_' + name + '_ub',), pyo.Constraint(model.T, rule = rule))
-
-    # lower energy boundary for energy flexibility
-    def rule(m, t):
-        return pyo.quicksum(
-            model.component_dict[('flex_pos_inv_' + name,)][t_old] * c_f['elec']['c_f_dch'][t_old] +
-            model.component_dict[('z_flex_pos_inv_' + name,)][t_old] * c_f['elec']['c_static_dch'][t_old] * model.step_size(t_old)
-            for t_old in range(model.T.first(), model.T.first() + model.T.ord(t))) - pyo.quicksum(
-            model.component_dict[('flex_neg_inv_' + name,)][t_old] * c_f['elec']['c_f_cha'][t_old] -
-            model.component_dict[('z_flex_neg_inv_' + name,)][t_old] * c_f['elec']['c_static_cha'][t_old] * model.step_size(t_old)
-            for t_old in range(model.T.first(), model.T.first() + model.T.ord(t))) >= -1 * e_cha_inv[t]
-    model.add(('flex_inv_' + name + '_lb',), pyo.Constraint(model.T, rule = rule))
-
-    if e_cha_dmd.any():
-        # upper energy boundary for energy flexibility
-        def rule(m, t):
-            return pyo.quicksum(
-                model.component_dict[('flex_pos_dmd_' + name,)][t_old] * c_f['heat']['c_f_dch'][t_old] +
-                model.component_dict[('z_flex_pos_dmd_' + name,)][t_old] * c_f['heat']['c_static_dch'][t_old] * model.step_size(t_old)
-                for t_old in range(model.T.first(), model.T.first() + model.T.ord(t))) - pyo.quicksum(
-                model.component_dict[('flex_neg_dmd_' + name,)][t_old] * c_f['heat']['c_f_cha'][t_old] -
-                model.component_dict[('z_flex_neg_dmd_' + name,)][t_old] * c_f['heat']['c_static_cha'][t_old] * model.step_size(t_old)
-                for t_old in range(model.T.first(), model.T.first() + model.T.ord(t))) <= 1 * e_dch_dmd[t]
-        model.add(('flex_dmd_' + name + '_ub',), pyo.Constraint(model.T, rule = rule))
-
-        # lower energy boundary for energy flexibility
-        def rule(m, t):
-            return pyo.quicksum(
-                model.component_dict[('flex_pos_dmd_' + name,)][t_old] * c_f['heat']['c_f_dch'][t_old] +
-                model.component_dict[('z_flex_pos_dmd_' + name,)][t_old] * c_f['heat']['c_static_dch'][t_old] * model.step_size(t_old)
-                for t_old in range(model.T.first(), model.T.first() + model.T.ord(t))) - pyo.quicksum(
-                model.component_dict[('flex_neg_dmd_' + name,)][t_old] * c_f['heat']['c_f_cha'][t_old] -
-                model.component_dict[('z_flex_neg_dmd_' + name,)][t_old] * c_f['heat']['c_static_cha'][t_old] * model.step_size(t_old)
-                for t_old in range(model.T.first(), model.T.first() + model.T.ord(t))) >= -1 * e_cha_dmd[t]
-        model.add(('flex_dmd_' + name + '_lb',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return pyo.quicksum(
-                model.component_dict[('flex_pos_dmd_' + name,)][t_old] * c_f['heat']['c_f_dch'][t_old] +
-                model.component_dict[('z_flex_pos_dmd_' + name,)][t_old] * c_f['heat']['c_static_dch'][t_old] * model.step_size(t_old)
-                for t_old in range(model.T.first(), model.T.first() + model.T.ord(t))) <= pyo.quicksum(
-                model.component_dict[('flex_neg_dmd_' + name,)][t_old] * c_f['heat']['c_f_cha'][t_old] -
-                model.component_dict[('z_flex_neg_dmd_' + name,)][t_old] * c_f['heat']['c_static_cha'][t_old] * model.step_size(t_old)
-                for t_old in range(model.T.first(), model.T.first() + model.T.ord(t)))
-        model.add(('flex_dmd_' + name + '_1',), pyo.Constraint(model.T, rule = rule))
-        
-    else:
-        def rule(m, t):
-            return model.component_dict[('flex_pos_dmd_' + name,)][t] == 0
-        model.add(('flex_dmd_' + name + '_1',), pyo.Constraint(model.T, rule = rule))
-        
-        def rule(m, t):
-            return model.component_dict[('flex_neg_dmd_' + name,)][t] == 0
-        model.add(('flex_dmd_' + name + '_2',), pyo.Constraint(model.T, rule = rule))
-
-def add_strategy(community, model, rescheduled_balance, strategy, rescheduling_models):
-    if 'max_operational_profit' == strategy:
-        # Energy exported by all actors of the community
-        model.add(('agg_export',), pyo.Var(model.T, bounds=(0, None)))
-        # Energy imported by all actors of the community
-        model.add(('agg_import',), pyo.Var(model.T, bounds=(0, None)))
-        # Part of the energy imported by all actors that is fullfilled by part of the energy exported by all actors in the community
-        model.add(('internal_exchange',), pyo.Var(model.T, bounds=(0, None)))
-        # Energy exported by the community to the external grid
-        model.add(('community_export',), pyo.Var(model.T, bounds=(0, None)))
-        # Energy imported by the community from the extrnal grid
-        model.add(('community_import',), pyo.Var(model.T, bounds=(0, None)))
-
-        bigM = 1000
-
-        for ps_name, ps in community.prosumers.items():
-            model.add(('z_' + ps_name,), pyo.Var(model.T, domain=pyo.Binary))
-            model.add(('export_' + ps_name,), pyo.Var(model.T, bounds=(0, None)))
-            model.add(('import_' + ps_name,), pyo.Var(model.T, bounds=(0, None)))
-
-            grid_name = [comp for comp in ps._components if ps._components[comp].type == 'ElectricalGrid'][0]
-            ps_export, ps_import = ps.get_export_import(rescheduling_models[ps_name], model.dynamic)[grid_name]
-
-            # z_ps == 0, if outputs > inputs
-            # z_ps == 1, if inputs > outputs
-            def rule(m, t):
-                return model.component_dict[('z_' + ps_name,)][t] <= 1 - (ps_export[t] - ps_import[t] + model.component_dict[('flex_' + ps_name,)][t]) / bigM
-            model.add(('z_' + ps_name + '_1',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[('z_' + ps_name,)][t] >= -1 * (ps_export[t] - ps_import[t] + model.component_dict[('flex_' + ps_name,)][t]) / bigM
-            model.add(('z_' + ps_name + '_2',), pyo.Constraint(model.T, rule = rule))
-
-            # output_ps = output_ps - input_ps + flex if outputs + flex > inputs (z_ps= 0)
-            # output_ps = 0 if z_ps = 1
-            # input_ps = output_ps - input_ps + flex if inputs - flex > outputs (z_ps = 1)
-            # input_ps = 0 if z_ps = 0
-            def rule(m, t):
-                return model.component_dict[('export_' + ps_name,)][t] <= (ps_export[t] - ps_import[t] + model.component_dict[('flex_' + ps_name,)][t]) + (model.component_dict[('z_' + ps_name,)][t]) * bigM
-            model.add(('z_' + ps_name + '_3',), pyo.Constraint(model.T, rule = rule))
-            
-            def rule(m, t):
-                return model.component_dict[('export_' + ps_name,)][t] >= (ps_export[t] - ps_import[t] + model.component_dict[('flex_' + ps_name,)][t]) - (model.component_dict[('z_' + ps_name,)][t]) * bigM
-            model.add(('z_' + ps_name + '_4',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[('import_' + ps_name,)][t] <= -1 * (ps_export[t] - ps_import[t] + model.component_dict[('flex_' + ps_name,)][t]) + (1 - model.component_dict[('z_' + ps_name,)][t]) * bigM
-            model.add(('z_' + ps_name + '_5',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[('import_' + ps_name,)][t] >= -1 * (ps_export[t] - ps_import[t] + model.component_dict[('flex_' + ps_name,)][t]) - (1 - model.component_dict[('z_' + ps_name,)][t]) * bigM
-            model.add(('z_' + ps_name + '_6',), pyo.Constraint(model.T, rule = rule))
-
-            # input_ps == 0 if z_ps == 0
-            def rule(m, t):
-                return model.component_dict[('import_' + ps_name,)][t] >= -1 * bigM * model.component_dict[('z_' + ps_name,)][t]
-            model.add(('z_' + ps_name + '_7',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[('import_' + ps_name,)][t] <= bigM * model.component_dict[('z_' + ps_name,)][t]
-            model.add(('z_' + ps_name + '_8',), pyo.Constraint(model.T, rule = rule))
-
-            # output_ps == 0 if z_ps == 1
-            def rule(m, t):
-                return model.component_dict[('export_' + ps_name,)][t] >= -1 * bigM * (1 - model.component_dict[('z_' + ps_name,)][t])
-            model.add(('z_' + ps_name + '_9',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[('export_' + ps_name,)][t] <= bigM * (1 - model.component_dict[('z_' + ps_name,)][t])
-            model.add(('z_' + ps_name + '_10',), pyo.Constraint(model.T, rule = rule))
-
-        for ca_name, ca in community.community_assets.items():
-            model.add(('z_' + ca_name,), pyo.Var(model.T, domain=pyo.Binary))
-            model.add(('export_' + ca_name,), pyo.Var(model.T, bounds=(0, None)))
-            model.add(('import_' + ca_name,), pyo.Var(model.T, bounds=(0, None)))
-
-            grid_name = [comp for comp in ca._components if ca._components[comp].type == 'ElectricalGrid'][0]
-            ca_export, ca_import = ca.get_export_import(rescheduling_models[ca_name], model.dynamic)[grid_name]
-
-            # z_ca == 0, if outputs > inputs
-            # z_ca == 1, if inputs > outputs
-            def rule(m, t):
-                return model.component_dict[('z_' + ca_name,)][t] <= 1 - (ca_export[t] - ca_import[t] + model.component_dict[('flex_' + ca_name,)][t]) / bigM
-            model.add(('z_' + ca_name + '_1',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[('z_' + ca_name,)][t] >= -1 * (ca_export[t] - ca_import[t] + model.component_dict[('flex_' + ca_name,)][t]) / bigM
-            model.add(('z_' + ca_name + '_2',), pyo.Constraint(model.T, rule = rule))
-
-            # output_ca = output_ca - input_ca + flex if outputs + flex > inputs (z_ca= 0)
-            # output_ca = 0 if z_ca = 1
-            # input_ca = output_ca - input_ca + flex if inputs - flex > outputs (z_ca = 1)
-            # input_ca = 0 if z_ca = 0
-            def rule(m, t):
-                return model.component_dict[('export_' + ca_name,)][t] <= (ca_export[t] - ca_import[t] + model.component_dict[('flex_' + ca_name,)][t]) + (model.component_dict[('z_' + ca_name,)][t]) * bigM
-            model.add(('z_' + ca_name + '_3',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[('export_' + ca_name,)][t] >= (ca_export[t] - ca_import[t] + model.component_dict[('flex_' + ca_name,)][t]) - (model.component_dict[('z_' + ca_name,)][t]) * bigM
-            model.add(('z_' + ca_name + '_4',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[('import_' + ca_name,)][t] <= -1 * (ca_export[t] - ca_import[t] + model.component_dict[('flex_' + ca_name,)][t]) + (1 - model.component_dict[('z_' + ca_name,)][t]) * bigM
-            model.add(('z_' + ca_name + '_5',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[('import_' + ca_name,)][t] >= -1 * (ca_export[t] - ca_import[t] + model.component_dict[('flex_' + ca_name,)][t]) - (1 - model.component_dict[('z_' + ca_name,)][t]) * bigM
-            model.add(('z_' + ca_name + '_6',), pyo.Constraint(model.T, rule = rule))
-
-            # input_ca == 0 if z_ca == 0
-            def rule(m, t):
-                return model.component_dict[('import_' + ca_name,)][t] >= -1 * bigM * model.component_dict[('z_' + ca_name,)][t]
-            model.add(('z_' + ca_name + '_7',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[('import_' + ca_name,)][t] <= bigM * model.component_dict[('z_' + ca_name,)][t]
-            model.add(('z_' + ca_name + '_8',), pyo.Constraint(model.T, rule = rule))
-
-            # output_ca == 0 if z_ca == 1
-            def rule(m, t):
-                return model.component_dict[('export_' + ca_name,)][t] >= -1 * bigM * (1 - model.component_dict[('z_' + ca_name,)][t])
-            model.add(('z_' + ca_name + '_9',), pyo.Constraint(model.T, rule = rule))
-
-            def rule(m, t):
-                return model.component_dict[('export_' + ca_name,)][t] <= bigM * (1 - model.component_dict[('z_' + ca_name,)][t])
-            model.add(('z_' + ca_name + '_10',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[('agg_export',)][t] == pyo.quicksum(model.component_dict[('export_' + ps_name,)][t] for ps_name in community.prosumers) + pyo.quicksum(model.component_dict[('export_' + ca_name,)][t] for ca_name in community.community_assets)
-        model.add(('outputs_cons',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[('agg_import',)][t] == pyo.quicksum(model.component_dict[('import_' + ps_name,)][t] for ps_name in community.prosumers) + pyo.quicksum(model.component_dict[('import_' + ca_name,)][t] for ca_name in community.community_assets)
-        model.add(('intputs_cons',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[('internal_exchange',)][t] <= model.component_dict[('agg_export',)][t]
-        model.add(('internal_exchange_ub_output',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[('internal_exchange',)][t] <= model.component_dict[('agg_import',)][t]
-        model.add(('internal_exchange_ub_input',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[('community_export',)][t] == model.component_dict[('agg_export',)][t] - model.component_dict[('internal_exchange',)][t]
-        model.add(('community_export_cons',), pyo.Constraint(model.T, rule = rule))
-
-        def rule(m, t):
-            return model.component_dict[('community_import',)][t] == model.component_dict[('agg_import',)][t] - model.component_dict[('internal_exchange',)][t]
-        model.add(('community_import_cons',), pyo.Constraint(model.T, rule = rule))
-
-        # Peak Shaving
-        model.add(('peak_community_import',), pyo.Var())
-
-        def rule(m, t):
-            return model.component_dict[('community_import',)][t] <= model.component_dict[('peak_community_import',)]
-        model.add(('peak_community_import_cons',), pyo.Constraint(model.T, rule = rule))
-
-        # The sum of the external demands after DF activations are not allowed to be higher than before.
-        # This aims to prohibit arbitrage trading.
-        community_import = rescheduled_balance[model.time_steps] * -1
-        community_import.loc[community_import < 0] = 0
-        total_community_import = community_import.sum()
-
-        model.add(('total_community_import_cons',), pyo.Constraint(expr = pyo.quicksum(model.component_dict[('community_import',)][t] for t in model.time_steps) <= total_community_import))
-
-        # factor that converts the simulation to ONE year
-        annual_factor = 8760.0 / sum(model.dynamic.step_size_p(position) for position in range(model.dynamic.number_of_steps()))
-
-        model.block.f1 = pyo.Var()
-        elec_price_int = resample(community.configuration['elec_price_int'], community.dynamic, model.dynamic)
-        elec_price_ext = resample(community.configuration['elec_price_ext'], community.dynamic, model.dynamic)
-        injection_price = resample(community.configuration['injection_price'], community.dynamic, model.dynamic)
-        model.block.C_f1 = pyo.Constraint(expr=model.block.f1 == (- pyo.quicksum(model.component_dict[('internal_exchange',)][t] * elec_price_int[t] * model.step_size(t) for t in model.time_steps)
-                                                                  - pyo.quicksum(model.component_dict[('community_import',)][t] * elec_price_ext[t] * model.step_size(t) for t in model.time_steps)
-                                                                  + pyo.quicksum(model.component_dict[('internal_exchange',)][t] * injection_price[t] * model.step_size(t) for t in model.time_steps)
-                                                                  + pyo.quicksum(model.component_dict[('community_export',)][t] * injection_price[t] * model.step_size(t) for t in model.time_steps)
-                                                                 ) * annual_factor
-                                                               - model.component_dict[('peak_community_import',)] * community.configuration['network_usage_capacity_fee'])
-
-        model.block.O_f1 = pyo.Objective(expr=model.block.f1, sense=pyo.maximize)
-    if 'max_wholesale_profit' == strategy:
-        model.add(('agg_balance',), pyo.Var(model.T, bounds=(None, None)))
-
-        def rule(m, t):
-            return model.component_dict[('agg_balance',)][t] == rescheduled_balance[t] + pyo.quicksum(model.component_dict[('flex_' + ps_name,)][t] for ps_name in community.prosumers) + pyo.quicksum(model.component_dict[('flex_' + cc_name,)][t] for cc_name in community.community_assets)
-        model.add(('agg_balance_cons',), pyo.Constraint(model.T, rule = rule))
-       
-        model.block.f1 = pyo.Var()
-        model.block.C_f1 = pyo.Constraint(expr=model.block.f1 == pyo.quicksum(model.component_dict[('agg_balance',)][t] * community.configuration['spot_price'][t] for t in model.time_steps))
-
-        model.block.O_f1 = pyo.Objective(expr=model.block.f1, sense=pyo.maximize)
-    
-def validate(community, i_start, i_start_validation, i_end_validation, activation_model, validation_models, validated_results):
-    dynamic_validation = community.dynamic.partial_dynamic(i_start_validation, i_end_validation)
-    time_steps_validation = community.dynamic.indices_within(i_start_validation, i_end_validation)
-
-    for ps_name, ps in community.prosumers.items():
-        dynamic_ps = ps._dynamic.partial_dynamic(i_start_validation, i_end_validation)
-        grid_name = [comp for comp in ps._components if ps._components[comp].type == 'ElectricalGrid'][0]
-        grid_fix_values = {component.name: (ps._result[ps._last_result_key][(component.name, 'input_1')], ps._result[ps._last_result_key][(component.name, 'output_1')]) for component in ps.get_components(kind=ComponentKind.GRID, commodity=ComponentCommodity.ELECTRICITY) if component.name != grid_name}
-        grid_fix_values[grid_name] = (resample(activation_model[('export_' + ps_name,)][time_steps_validation], dynamic_validation, dynamic_ps), resample(activation_model[('import_' + ps_name,)][time_steps_validation], dynamic_validation, dynamic_ps))
-        component_sizes = {component.name: ps._result[ps._last_result_key][(component.name, 'capacity')] for component in ps.get_components()}
-        storage_slack_values = {component.name: ps._result[ps._last_result_key][(component.name, 'energy')] for component in ps.get_components(kind=ComponentKind.STORAGE)}
-        configuration = {
-            'fix_sizing': {
-                'values': component_sizes
-            },
-            'consumption_slack': {
-                'commodity': [ComponentCommodity.COLD, ComponentCommodity.HEAT],
-                'strategy_factor': 10000
-            },
-            'grid_fix': {
-                'commodity': ComponentCommodity.ELECTRICITY,
-                'values': grid_fix_values
-            },
-            'storage_slack': {
-                'commodity': ComponentCommodity.ALL,
-                'values': storage_slack_values,
-                'strategy_factor': 10
-            },
-            'storage_boundaries': {
-                'commodity': ComponentCommodity.ALL,
-                'min': 0,
-                'max': 1
-            }
-        }
-        if i_start != i_start_validation:
-            initial_time_step = ps._dynamic.index_of(ps._dynamic.position_of(i_start_validation) - 1)
-            storage_connect_values = {component.name: validated_results[ps_name][(component.name, 'energy')][initial_time_step] for component in ps.get_components(kind=ComponentKind.STORAGE)}
-            configuration['storage_connect'] = {
-                'commodity': ComponentCommodity.ALL,
-                'values': storage_connect_values
-            }
-        validation_model = ps.optimize(configuration, i_start_validation, i_end_validation)
-        if validation_model is None:
-            return False
-        else:
-            validation_models[ps_name] = validation_model
-        
-    for ca_name, ca in community.community_assets.items():
-        dynamic_ca = ca._dynamic.partial_dynamic(i_start_validation, i_end_validation)
-        grid_name = [comp for comp in ca._components if ca._components[comp].type == 'ElectricalGrid'][0]
-        grid_fix_values = {component.name: (ca._result[ca._last_result_key][(component.name, 'input_1')], ca._result[ca._last_result_key][(component.name, 'output_1')]) for component in ca.get_components(kind=ComponentKind.GRID, commodity=ComponentCommodity.ELECTRICITY) if component.name != grid_name}
-        grid_fix_values[grid_name] = (resample(activation_model[('export_' + ca_name,)][time_steps_validation], dynamic_validation, dynamic_ca), resample(activation_model[('import_' + ca_name,)][time_steps_validation], dynamic_validation, dynamic_ca))
-        component_sizes = {component.name: ca._result[ca._last_result_key][(component.name, 'capacity')] for component in ca.get_components()}
-        storage_slack_values = {component.name: ca._result[ca._last_result_key][(component.name, 'energy')] for component in ca.get_components(kind=ComponentKind.STORAGE)}
-        configuration = {
-            'fix_sizing': {
-                'values': component_sizes
-            },
-            'consumption_slack': {
-                'commodity': [ComponentCommodity.COLD, ComponentCommodity.HEAT],
-                'strategy_factor': 10000
-            },
-            'grid_fix': {
-                'commodity': ComponentCommodity.ELECTRICITY,
-                'values': grid_fix_values
-            },
-            'storage_slack': {
-                'commodity': ComponentCommodity.ALL,
-                'values': storage_slack_values,
-                'strategy_factor': 10
-            },
-            'storage_boundaries': {
-                'commodity': ComponentCommodity.ALL,
-                'min': 0,
-                'max': 1
-            }
-        }
-        if i_start != i_start_validation:
-            initial_time_step = ca._dynamic.index_of(ca._dynamic.position_of(i_start_validation) - 1)
-            storage_connect_values = {component.name: validated_results[ca_name][(component.name, 'energy')][initial_time_step] for component in ca.get_components(kind=ComponentKind.STORAGE)}
-            configuration['storage_connect'] = {
-                'commodity': ComponentCommodity.ALL,
-                'values': storage_connect_values
-            }
-        validation_model = ca.optimize(configuration, i_start_validation, i_end_validation)
-        if validation_model is None:
-            return False
-        else:
-            validation_models[ca_name] = validation_model
-        
-    return True
-        
-def aggregate_fixed_exports_imports(community, key, validated_results, i_start, i_end_fix):
-    dynamic_fix = community.dynamic.partial_dynamic(i_start, i_end_fix)
-    
-    agg_validated_export = pd.Series(0.0, index=dynamic_fix.time_steps())
-    agg_validated_import = pd.Series(0.0, index=dynamic_fix.time_steps())
-    
-    for ps_name, ps in community.prosumers.items():
-        validated_data = ps.get_export_import(validated_results[ps_name], dynamic_fix)
-        for validated_export, validated_import in validated_data.values():
-            agg_validated_export += validated_export
-            agg_validated_import += validated_import
-    
-    for ca_name, ca in community.community_assets.items():
-        validated_data = ca.get_export_import(validated_results[ca_name], dynamic_fix)
-        for validated_export, validated_import in validated_data.values():
-            agg_validated_export += validated_export
-            agg_validated_import += validated_import
-    
-    community.result[key]['agg_balance'][dynamic_fix.time_steps()] = agg_validated_export - agg_validated_import
-    community.result[key]['agg_export'][dynamic_fix.time_steps()]  = agg_validated_export
-    community.result[key]['agg_import'][dynamic_fix.time_steps()]  = agg_validated_import
-    community.result[key]['internal_exchange'][dynamic_fix.time_steps()]  = pd.concat([agg_validated_export, agg_validated_import], axis=1).min(axis=1)
diff --git a/optimization_model.py b/optimization_model.py
new file mode 100644
index 0000000000000000000000000000000000000000..1466552e7be4bfa00692cd1201aa75437455859c
--- /dev/null
+++ b/optimization_model.py
@@ -0,0 +1,306 @@
+"""
+MIT License
+
+Copyright (c) 2023 RWTH Aachen University
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+
+from Model_Library.architecture import (
+    Profile,
+    resample_first_state_result,
+    resample_result,
+)
+
+from enum import Enum
+import numpy as np
+import pandas as pd
+import pyomo.environ as pyo
+from pyomo.opt import SolverStatus, TerminationCondition
+
+
+class VariableKind(Enum):
+    UNINDEXED = 0
+    INDEXED = 1
+    EXTENDED_INDEXED = 2
+
+
+class OptimizationBlock:
+    def __init__(self, prefix, dynamic, block, model):
+        self.prefix = prefix
+        self.dynamic = dynamic
+        self.block = block
+        self.model = model
+
+        self.block.T = pyo.Set(
+            initialize=range(dynamic.number_of_steps()), ordered=True
+        )
+        self.T = self.block.T
+
+        self.block.T_prime = pyo.Set(
+            initialize=range(-1, dynamic.number_of_steps()), ordered=True
+        )
+        self.T_prime = self.block.T_prime
+
+        self.component_dict = dict()
+        self.general_scaled_objective = 0.0
+        self.blocks = dict()
+
+    def step_size(self, index):
+        return self.dynamic.step_size(index)
+
+    def add(self, name, component):
+        if not isinstance(component, np.ndarray):
+            self.block.add_component(name, component)
+        if isinstance(component, pyo.Var):
+            self.component_dict[name] = component
+        elif isinstance(component, pyo.Expression):
+            self.component_dict[name] = component
+        elif isinstance(component, pyo.Param):
+            self.component_dict[name] = component
+        elif isinstance(component, np.ndarray):
+            self.component_dict[name] = component
+
+    # TODO should only be used when during asset resampling
+    def re_add(self, name, component):
+        self.component_dict[name] = component
+
+    def add_block(self, name, dynamic):
+        local_prefix = name + "."
+        block = OptimizationBlock(
+            self.prefix + local_prefix, dynamic, pyo.Block(), self.model
+        )
+        self.blocks[local_prefix] = block
+        self.model.all_blocks[self.prefix + local_prefix] = block
+        self.block.add_component(local_prefix[:-1], block.block)
+        return block
+
+    def add_general_scaled_objective(self, expression):
+        self.general_scaled_objective += expression
+
+
+class OptimizationModel:
+    def __init__(self, name, dynamic):
+        self.name = name
+        self.root = OptimizationBlock("", dynamic, pyo.ConcreteModel(name), self)
+        self.all_blocks = {"": self.root}
+        self.objectives = dict()
+        self.cache = dict()
+
+    def add_objective(self, name, expression):
+        if name not in self.objectives:
+            self.objectives[name] = 0.0
+        self.objectives[name] += expression
+
+    def set_objectives(self):
+        for name, expression in self.objectives.items():
+            setattr(self.root.block, "f_" + name, pyo.Var())
+
+            setattr(
+                self.root.block,
+                "c_" + name,
+                pyo.Constraint(
+                    expr=getattr(self.root.block, "f_" + name) == expression
+                ),
+            )
+
+            setattr(
+                self.root.block,
+                "O_" + name,
+                pyo.Objective(
+                    expr=getattr(self.root.block, "f_" + name), sense=pyo.minimize
+                ),
+            )
+
+    def solve(self, options, tee):
+        # glpk(bad for milp), cbc(good for milp), gurobi: linear, ipopt: nonlinear
+        # in order to install a new solver paste the .exe file in env. path 'C:\Users\User\anaconda3\envs\envINEED'
+        solver = pyo.SolverFactory("gurobi")
+        solver.options.update(options)
+        self.solver_result = solver.solve(self.root.block, tee=tee)
+
+    def is_ok(self):
+        return (
+            self.solver_result.solver.status == SolverStatus.ok
+            and self.solver_result.solver.termination_condition
+            == TerminationCondition.optimal
+        )
+
+
+class DynamicResult:
+    def __init__(self, dynamic, var_names):
+        n_u_vars = 0
+        n_i_vars = 0
+        n_i_prime_vars = 0
+        self.var_map = dict()
+
+        for var_name, var_kind in var_names:
+            if var_kind == VariableKind.INDEXED:
+                self.var_map[var_name] = (1, n_i_vars)
+                n_i_vars += 1
+            elif var_kind == VariableKind.UNINDEXED:
+                self.var_map[var_name] = (0, n_u_vars)
+                n_u_vars += 1
+            elif var_kind == VariableKind.EXTENDED_INDEXED:
+                self.var_map[var_name] = (2, n_i_prime_vars)
+                n_i_prime_vars += 1
+
+        self.n_u_vars = n_u_vars
+        self.u_result = np.empty(n_u_vars, dtype=float)
+
+        self.n_i_vars = n_i_vars
+        self.i_result = np.empty((n_i_vars, dynamic.shape()), dtype=float)
+
+        self.n_i_prime_vars = n_i_prime_vars
+        self.i_prime_first_result = np.empty(
+            (n_i_prime_vars, dynamic.first_state_shape()), dtype=float
+        )
+        self.i_prime_result = np.empty((n_i_prime_vars, dynamic.shape()), dtype=float)
+
+        self.dynamic = dynamic
+
+    def extract_result(self, block):
+        for var_name, (outer_index, inner_index) in self.var_map.items():
+            if outer_index == 0:
+                self.u_result[inner_index] = pyo.value(block.component_dict[var_name])
+
+        resample_result(
+            (
+                (var_name, inner_index)
+                for var_name, (outer_index, inner_index) in self.var_map.items()
+                if outer_index == 1
+            ),
+            block,
+            block.dynamic,
+            self.i_result,
+            self.dynamic,
+        )
+
+        resample_first_state_result(
+            (
+                (var_name, inner_index)
+                for var_name, (outer_index, inner_index) in self.var_map.items()
+                if outer_index == 2
+            ),
+            block,
+            block.dynamic,
+            self.i_prime_first_result,
+            self.dynamic,
+        )
+        resample_result(
+            (
+                (var_name, inner_index)
+                for var_name, (outer_index, inner_index) in self.var_map.items()
+                if outer_index == 2
+            ),
+            block,
+            block.dynamic,
+            self.i_prime_result,
+            self.dynamic,
+        )
+
+    def __getitem__(self, var_name):
+        if var_name in self.var_map:
+            outer_index, inner_index = self.var_map[var_name]
+            if outer_index == 1:
+                return Profile(self.i_result[inner_index], self.dynamic)
+            elif outer_index == 0:
+                return self.u_result[inner_index]
+            elif outer_index == 2:
+                return Profile(self.i_prime_result[inner_index], self.dynamic)
+            else:
+                return None
+
+    def to_excel(self, excel_writer, **kwargs):
+        sheet_name = kwargs.get("sheet_name", "")
+
+        kwargs["sheet_name"] = "u" if sheet_name == "" else sheet_name + "_u"
+        df = pd.Series(
+            self.u_result,
+            index=(
+                var_name
+                for var_name, (outer_index, _) in self.var_map.items()
+                if outer_index == 0
+            ),
+        )
+        df.to_excel(excel_writer, **kwargs)
+
+        kwargs["sheet_name"] = "i" if sheet_name == "" else sheet_name + "_i"
+        df = pd.DataFrame(
+            np.transpose(self.i_result),
+            index=self.dynamic.pandas_index(),
+            columns=(
+                var_name
+                for var_name, (outer_index, _) in self.var_map.items()
+                if outer_index == 1
+            ),
+        )
+        df.to_excel(excel_writer, **kwargs)
+
+        kwargs["sheet_name"] = (
+            "i_prime" if sheet_name == "" else sheet_name + "_i_prime"
+        )
+        df_first = pd.DataFrame(
+            np.transpose(self.i_prime_first_result),
+            index=self.dynamic.first_state_pandas_index(),
+            columns=(
+                var_name
+                for var_name, (outer_index, _) in self.var_map.items()
+                if outer_index == 2
+            ),
+        )
+        df = pd.DataFrame(
+            np.transpose(self.i_prime_result),
+            index=self.dynamic.pandas_index(),
+            columns=(
+                var_name
+                for var_name, (outer_index, _) in self.var_map.items()
+                if outer_index == 2
+            ),
+        )
+        df = pd.concat([df, df_first])
+        df.sort_index(inplace=True)
+        df.to_excel(excel_writer, **kwargs)
+
+
+class EntityResult:
+    def __init__(self, architecture, varss, distributions):
+        self.architecture = architecture
+        self.dynamic_results = dict()
+        self.var_map = dict()
+        self.distributions = distributions
+
+        for identitifer_str, vars in varss.items():
+            dynamic = architecture.get_dynamic(identitifer_str)
+            self.dynamic_results[identitifer_str] = DynamicResult(dynamic, vars)
+            for var_name, _ in vars:
+                self.var_map[var_name] = identitifer_str
+
+    def __getitem__(self, var_name):
+        if var_name in self.var_map:
+            return self.dynamic_results[self.var_map[var_name]][var_name]
+        else:
+            return None
+
+    def to_excel(self, excel_writer, **kwargs):
+        sheet_name = kwargs.get("sheet_name", "")
+
+        for key, dynamic_result in self.dynamic_results.items():
+            kwargs["sheet_name"] = key if sheet_name == "" else sheet_name + "_" + key
+            dynamic_result.to_excel(excel_writer, **kwargs)
diff --git a/topology.py b/topology.py
new file mode 100644
index 0000000000000000000000000000000000000000..89138f5fab0df5177bb6228bc858776dceeb75ca
--- /dev/null
+++ b/topology.py
@@ -0,0 +1,1077 @@
+"""
+MIT License
+
+Copyright (c) 2023 RWTH Aachen University
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+
+from Model_Library.architecture import (
+    Distribution,
+    Identifier,
+    PeriodPiece,
+    Profile,
+    ReductionPiece,
+    resample_variable,
+    ScenarioPiece,
+)
+from Model_Library.component.core import (
+    ComponentCommodity,
+    ComponentKind,
+    ComponentPart,
+    ComponentPartPattern,
+    VariableLink,
+)
+from Model_Library.component.adapter import AssetLink, MemberLink
+from Model_Library.optimization_model import (
+    EntityResult,
+    OptimizationModel,
+    VariableKind,
+)
+
+from enum import Enum
+import importlib
+import inspect
+import numpy as np
+import os
+import pandas as pd
+import pyomo.environ as pyo
+
+module = importlib.import_module(".", package="Model_Library.component")
+component_library = {}
+for name, klass in inspect.getmembers(module, inspect.isclass):
+    component_library[name] = klass
+
+
+# class ConnectorMode(Enum):
+#     EMPTY = 1
+#     SINGLE_CONTRACTED = 2
+#     SINGLE = 3
+#     MULTIPLE = 4
+
+
+class Connector:
+    def __init__(self, name):
+        self.name = name
+        self.flows = []
+        self.indices = []
+        # self.other_sides = []
+
+    # def replace_flow(self, flow, replacement):
+    #     index = [i for i in range(len(self.flows)) if self.flows[i] == flow][0]
+    #     self.flows[index] = replacement
+
+
+class Connection:
+    def __init__(self, in_flows, out_flows, loss_factor, capacity):
+        self.in_flows = in_flows
+        self.out_flows = out_flows
+        self.loss_factor = loss_factor
+        self.capacity = capacity
+
+    # def replace_flow(self, flow, replacement):
+    #     if flow in self.in_flows:
+    #         index = [i for i in range(len(self.in_flows)) if self.in_flows[i] == flow][
+    #             0
+    #         ]
+    #         self.in_flows[index] = replacement
+    #     else:
+    #         index = [
+    #             i for i in range(len(self.out_flows)) if self.out_flows[i] == flow
+    #         ][0]
+    #         self.out_flows[index] = replacement
+
+
+class DistributionItem(Enum):
+    COMPONENT_PART = 1
+    CONNECTION = 2
+    LOGIC = 3
+
+
+class Topology:
+    def __init__(self, name, configuration, members, assets):
+        self._name = name
+        self._members = members
+        self._assets = assets
+
+        self._components = dict()
+        self._connectors = dict()
+
+        for component_name, component_configuration in configuration[
+            "components"
+        ].items():
+            component_type = component_configuration["type"]
+            if component_type == "MemberAdapter":
+                component = component_library[component_type](
+                    component_name, component_configuration, members
+                )
+            elif component_type == "AssetAdapter":
+                component = component_library[component_type](
+                    component_name, component_configuration, assets
+                )
+            else:
+                component = component_library[component_type](
+                    component_name, component_configuration
+                )
+            self._components[component_name] = component
+
+            (
+                input_commodity_1,
+                input_commodity_2,
+                output_commodity_1,
+                output_commodity_2,
+            ) = component.commodities_ordered()
+            component_connectors = dict()
+            if input_commodity_1 != None:
+                component_connectors["input_1"] = Connector(component_name + ".input_1")
+            if input_commodity_2 != None:
+                component_connectors["input_2"] = Connector(component_name + ".input_2")
+            if output_commodity_1 != None:
+                component_connectors["output_1"] = Connector(
+                    component_name + ".output_1"
+                )
+            if output_commodity_2 != None:
+                component_connectors["output_2"] = Connector(
+                    component_name + ".output_2"
+                )
+            self._connectors[component_name] = component_connectors
+
+        self._connections = []
+        self._connections_map = dict()
+
+        for connection in configuration["connections"]:
+            index = len(self._connections)
+            if connection["type"] == "OneToOne":
+                flow_from = connection["from"].split(".")
+                flow_from_component = flow_from[0]
+                flow_from_connector = flow_from[1]
+                in_flow = (
+                    flow_from_component + "." + flow_from_connector + "__" + str(index)
+                )
+                connector_from = self._connectors[flow_from_component][
+                    flow_from_connector
+                ]
+                connector_from.flows.append(in_flow)
+                connector_from.indices.append(index)
+                # connector_from.other_sides.append(index)
+                in_flows = [in_flow]
+                flow_to = connection["to"].split(".")
+                flow_to_component = flow_to[0]
+                flow_to_connector = flow_to[1]
+                out_flow = (
+                    str(index) + "__" + flow_to_component + "." + flow_to_connector
+                )
+                connector_to = self._connectors[flow_to_component][flow_to_connector]
+                connector_to.flows.append(out_flow)
+                connector_to.indices.append(index)
+                # connector_to.other_sides.append(index)
+                out_flows = [out_flow]
+                self._connections_map[connection["from"], connection["to"]] = index
+            elif connection["type"] == "Sum":
+                in_flows = []
+                out_flows = []
+                for member in connection["members"]:
+                    temp = member.split(".")
+                    member_component = temp[0]
+                    member_connector = temp[1]
+                    if "output" in member:
+                        flow = (
+                            member_component
+                            + "."
+                            + member_connector
+                            + "__"
+                            + str(index)
+                        )
+                        in_flows.append(flow)
+                    else:
+                        flow = (
+                            str(index)
+                            + "__"
+                            + member_component
+                            + "."
+                            + member_connector
+                        )
+                        out_flows.append(flow)
+                    connector = self._connectors[member_component][member_connector]
+                    connector.flows.append(flow)
+                    connector.indices.append(index)
+                    # connector.other_sides.append(index)
+            loss_factor = connection.get("loss_factor", 0.0)
+            capacity = connection.get("capacity", None)
+            self._connections.append(
+                Connection(in_flows, out_flows, loss_factor, capacity)
+            )
+
+        # self._removed_flows = dict()
+
+        # for connector in self._connectors.values():
+        #     if len(connector.flows) == 0:
+        #         connector.mode = ConnectorMode.EMPTY
+        #     elif len(connector.flows) > 1:
+        #         connector.mode = ConnectorMode.MULTIPLE
+        #     else:
+        #         # the connector is single, but if we can contract depends on the other side
+        #         other_side = connector.other_sides[0]
+        #         if isinstance(other_side, Connector):
+        #             # the other side is a connector
+        #             # test if the connector on the other side has been assigned a mode
+        #             if hasattr(other_side, "mode"):
+        #                 # the connector on the other side has been assigend a mode, so it could be contracted
+        #                 if other_side.mode != ConnectorMode.SINGLE_CONTRACTED:
+        #                     # it is not, we can contract
+        #                     connector.mode = ConnectorMode.SINGLE_CONTRACTED
+        #                 else:
+        #                     # it is, we cannot contract
+        #                     connector.mode = ConnectorMode.SINGLE
+        #             else:
+        #                 # the connector on the other side has not been assigend a mode, so it is not contracted, so we can contract
+        #                 connector.mode = ConnectorMode.SINGLE_CONTRACTED
+        #         else:
+        #             # the other side is a sum connection so we can contract
+        #             other_side = self._sum_connections[other_side]
+        #             connector.mode = ConnectorMode.SINGLE_CONTRACTED
+        #         # contract the connector
+        #         if connector.mode == ConnectorMode.SINGLE_CONTRACTED:
+        #             # remove flow from topology
+        #             flow_to_remove = connector.flows[0]
+        #             self._flows.remove(flow_to_remove)
+        #             self._removed_flows[flow_to_remove] = connector.name
+        #             # replace flow in both connectors
+        #             connector.flows[0] = connector.name
+        #             other_side.replace_flow(flow_to_remove, connector.name)
+
+        self._additional_model_logic = configuration.get(
+            "additional_model_logic", dict()
+        )
+        self._planning_horizon = configuration.get("planning_horizon", 1)
+        self._price_change_factor = configuration.get("price_change_factor", 1.0)
+        self._interest_factor = configuration.get("interest_factor", 1.0)
+
+        self._results = dict()
+        self._last_result_key = None
+
+    def iter_components(
+        self,
+        kind=ComponentKind.ALL,
+        type="all",
+        commodity=ComponentCommodity.ALL,
+        name="all",
+    ):
+        pattern = ComponentPartPattern(
+            kind, type, commodity, name, part=ComponentPart.ALL
+        )
+        for component in self._components.values():
+            if pattern.match(component, ComponentPart.ALL):
+                yield component
+
+    def iter_component_parts(
+        self,
+        kind=ComponentKind.ALL,
+        type="all",
+        commodity=ComponentCommodity.ALL,
+        name="all",
+        part=ComponentPart.ALL,
+    ):
+        pattern = ComponentPartPattern(kind, type, commodity, name, part)
+        for component in self._components.values():
+            for part in component.iter_component_parts():
+                if pattern.match(component, ComponentPart.ALL):
+                    yield (component, part)
+
+    def optimize(
+        self, key, architecture, component_iden_strs, strategy, sub_configurations
+    ):
+        def _extract_sub_configuration(sub_configurations, indices):
+            return {
+                name: tuple(sub_configurations[name][index] for index in indices)
+                + (_extract_sub_configuration(sub_configurations[name][-1], indices),)
+                for name in sub_configurations
+            }
+
+        model = self.build_model(
+            architecture,
+            component_iden_strs,
+            strategy,
+            _extract_sub_configuration(sub_configurations, [1, 2, 3]),
+        )
+
+        options = dict()
+        options["MIPGap"] = 0.01
+        options["Presolve"] = 2
+        options["TimeLimit"] = 200
+
+        model.solve(options, True)
+
+        if not model.is_ok():
+            raise RuntimeError("Model is infeasible or unbounded!")
+
+        self.create_empty_entity_result(
+            key,
+            architecture,
+            component_iden_strs,
+            _extract_sub_configuration(sub_configurations, [0, 1, 2]),
+        )
+
+        self.extract_result(
+            model, key, _extract_sub_configuration(sub_configurations, [0])
+        )
+
+    def build_model(
+        self, architecture, component_ident_strs, strategy, sub_model_configurations
+    ):
+        model = OptimizationModel(self._name, architecture.get_dynamic(""))
+
+        self._build_model(
+            architecture,
+            component_ident_strs,
+            strategy,
+            sub_model_configurations,
+            model,
+            model.root,
+        )
+
+        model.set_objectives()
+
+        return model
+
+    def _build_model(
+        self,
+        architecture,
+        input_component_ident_strs,
+        strategy,
+        sub_model_configurations,
+        model,
+        block,
+    ):
+        for name, asset in self._assets.items():
+            sub_architecture = sub_model_configurations[name][0]
+            sub_component_ident_strs = sub_model_configurations[name][1]
+            sub_strategy = sub_model_configurations[name][2]
+            sub_sub_model_configutation = sub_model_configurations[name][3]
+            sub_block = block.add_block(name, sub_architecture.get_dynamic(""))
+            asset._build_model(
+                sub_architecture,
+                sub_component_ident_strs,
+                sub_strategy,
+                sub_sub_model_configutation,
+                model,
+                sub_block,
+            )
+
+        dists = dict()
+        dists[""] = (Distribution(Identifier.from_str("")), [], [])
+
+        component_ident_strs = dict()
+        links = []
+        for name in self._components:
+            for part in self._components[name].iter_component_parts():
+                if (name, part) not in input_component_ident_strs:
+                    component_ident_strs[name, part] = ""
+                    dists[""][2].append((DistributionItem.COMPONENT_PART, (name, part)))
+
+                else:
+                    ident_str = input_component_ident_strs[name, part]
+                    component_ident_strs[name, part] = ident_str
+
+                    if ident_str not in dists:
+                        dists[ident_str] = (
+                            Distribution(Identifier.from_str(ident_str)),
+                            [],
+                            [],
+                        )
+
+                    dists[ident_str][2].append(
+                        (DistributionItem.COMPONENT_PART, (name, part))
+                    )
+
+            links.extend(self._components[name].iter_links())
+
+        connection_idents = {i: [] for i in range(len(self._connections))}
+        for name, connectors in self._connectors.items():
+            for connector in connectors.values():
+                for index in connector.indices:
+                    connection_idents[index].append(
+                        Identifier.from_str(
+                            component_ident_strs[name, ComponentPart.NONE_STATE]
+                        )
+                    )
+
+        for index, idents in connection_idents.items():
+            ident = Identifier.union(idents)
+            connection_idents[index] = ident
+
+            if ident.string not in dists:
+                dists[ident.string] = (Distribution(ident), [], [])
+
+            dists[ident.string][2].append((DistributionItem.CONNECTION, index))
+
+        for logic_name, logic in self._additional_model_logic.items():
+            if logic["type"] == "EqualCapacity":
+                dists[""][2].append((DistributionItem.LOGIC, (logic_name, None)))
+
+            elif logic["type"] == "ConnectorEnable":
+                for index, connector in enumerate(logic["connectors"]):
+                    name = connector.split(".")[0]
+                    dists[component_ident_strs[name, ComponentPart.NONE_STATE]][
+                        2
+                    ].append((DistributionItem.LOGIC, (logic_name, index)))
+
+            elif logic["type"] == "ConnectionEnable":
+                for connection in logic["connections"]:
+                    index = self._connections_map[connection["from"], connection["to"]]
+                    dists[connection_idents[index].string][2].append(
+                        (DistributionItem.LOGIC, (logic_name, index))
+                    )
+
+        cache = dict()
+        self.first_pass(
+            architecture.root,
+            block,
+            dists,
+            cache,
+        )
+
+        # TODO maybe resample component parameters?
+
+        for link in links:
+            if isinstance(link, VariableLink):
+                resample_variable(
+                    link.var_name,
+                    block.prefix,
+                    model,
+                    architecture.get_dynamic(component_ident_strs[link.start]),
+                    architecture.get_dynamic(component_ident_strs[link.end]),
+                )
+
+            elif isinstance(link, AssetLink):
+                (
+                    asset_architecture,
+                    asset_block_prefix,
+                    asset_component_ident_strs,
+                ) = model.cache[link.asset]
+                asset_root_dynamic = asset_architecture.get_dynamic("")
+                root_dynamic = architecture.get_dynamic("")
+
+                resample_variable(
+                    link.asset_var_name,
+                    asset_block_prefix,
+                    model,
+                    asset_architecture.get_dynamic(
+                        asset_component_ident_strs[link.start]
+                    ),
+                    asset_root_dynamic,
+                )
+
+                if asset_root_dynamic.dynamic != root_dynamic.dynamic:
+                    NotImplementedError
+                block.re_add(
+                    link.var_name,
+                    model.all_blocks[asset_block_prefix].component_dict[
+                        link.asset_var_name
+                    ],
+                )
+
+                resample_variable(
+                    link.var_name,
+                    block.prefix,
+                    model,
+                    root_dynamic,
+                    architecture.get_dynamic(component_ident_strs[link.end]),
+                )
+
+            elif isinstance(link, MemberLink):
+                member_result = link.member._results[link.member._last_result_key]
+                member_root_dynamic = member_result.architecture.get_dynamic("")
+                root_dynamic = architecture.get_dynamic("")
+
+                member_temp = member_result[link.member_res_name].resample(
+                    member_result.architecture.get_dynamic("")
+                )
+
+                if member_root_dynamic.dynamic != root_dynamic.dynamic:
+                    NotImplementedError
+
+                temp = Profile(member_temp.values, root_dynamic)
+
+                setattr(
+                    self._components[link.end[0]],
+                    link.local_res_name,
+                    temp.resample(
+                        architecture.get_dynamic(component_ident_strs[link.end])
+                    ),
+                )
+
+        for name, connectors in self._connectors.items():
+            for connector in connectors.values():
+                for flow, index in zip(connector.flows, connector.indices):
+                    resample_variable(
+                        flow,
+                        block.prefix,
+                        model,
+                        architecture.get_dynamic(connection_idents[index].string),
+                        architecture.get_dynamic(
+                            component_ident_strs[name, ComponentPart.NONE_STATE]
+                        ),
+                    )
+
+        # TODO maybe resample logic parameters?
+
+        if strategy is None:
+            objectives = {"objective": []}
+        elif isinstance(strategy, list):
+            objectives = {"objective": strategy}
+        elif isinstance(strategy, dict):
+            objectives = strategy
+        else:
+            raise ValueError(f"Invalid strategy type!")
+
+        objective_exprs = self.second_pass(
+            architecture.root,
+            block,
+            dists,
+            component_ident_strs,
+            cache,
+            objectives,
+        )
+
+        w = (365.0 * 24.0) / (np.sum(architecture.root.dynamic.step_lengths()) / 3600.0)
+        for objective_name, (
+            d_expr,
+            scaled_expr,
+            one_time_expr,
+        ) in objective_exprs.items():
+            model.add_objective(
+                objective_name, d_expr + scaled_expr * w + one_time_expr
+            )
+
+        model.cache[self] = (architecture, block.prefix, component_ident_strs)
+
+    # constructs further optimization blocks
+    # determines in which blocks the component parts, connections and logics are placed
+    # defines component part base variables
+    # defines flow variables
+    # collects possible resamplings between component parts
+    def first_pass(
+        self,
+        arc_block,
+        block,
+        dists,
+        cache,
+    ):
+        block_to_dist_list = []
+        for ident_str, (dist, blocks, items) in dists.items():
+            if dist.check():
+                block_to_dist_list.append(ident_str)
+                blocks.append(block)
+
+                for kind, key in items:
+                    if kind == DistributionItem.COMPONENT_PART:
+                        name, part = key
+                        if part == ComponentPart.DESIGN:
+                            self._components[name].add_design_variables(block)
+                        elif part == ComponentPart.NONE_STATE:
+                            self._components[name].add_non_state_variables(block)
+                        elif part == ComponentPart.STATE:
+                            self._components[name].add_state_variables(block)
+
+                    elif kind == DistributionItem.CONNECTION:
+                        connection = self._connections[key]
+                        for flow in connection.in_flows:
+                            block.add(flow, pyo.Var(block.T, bounds=(0, None)))
+                        for flow in connection.out_flows:
+                            block.add(flow, pyo.Var(block.T, bounds=(0, None)))
+
+                    elif kind == DistributionItem.LOGIC:
+                        pass
+
+        cache[block] = block_to_dist_list
+
+        for i, reduction in enumerate(arc_block.dynamic_reductions):
+            piece = ReductionPiece(i)
+            r_block = block.add_block(piece.to_str(), reduction.get_dynamic(""))
+
+            for dist, _, _ in dists.values():
+                dist.down(piece)
+            self.first_pass(
+                reduction,
+                r_block,
+                dists,
+                cache,
+            )
+            for dist, _, _ in dists.values():
+                dist.up()
+
+        for i, aggregation in enumerate(arc_block.period_aggregations):
+            for j, period in enumerate(aggregation.period_blocks):
+                piece = PeriodPiece(i, j)
+                p_block = block.add_block(piece.to_str(), period.get_dynamic(""))
+
+                for dist, _, _ in dists.values():
+                    dist.down(piece)
+                self.first_pass(period, p_block, dists, cache)
+                for dist, _, _ in dists.values():
+                    dist.up()
+
+        for i, stage in enumerate(arc_block.stochastic_stages):
+            for j, scenario in enumerate(stage.scenario_blocks):
+                piece = ScenarioPiece(i, j)
+                s_block = block.add_block(piece.to_str(), scenario.get_dynamic(""))
+
+                for dist, _, _ in dists.values():
+                    dist.down(piece)
+                self.first_pass(scenario, s_block, dists, cache)
+                for dist, _, _ in dists.values():
+                    dist.up()
+
+    # construct the component models
+    # construct the connector models
+    # construct the connection models
+    # add aditional model logics
+    # construct the objective function
+    def second_pass(
+        self,
+        arc_block,
+        block,
+        dists,
+        component_ident_strs,
+        cache,
+        objectives,
+    ):
+        objective_exprs = {
+            objective_name: [0.0, 0.0, 0.0] for objective_name in objectives
+        }
+        for ident_str in cache[block]:
+            for kind, key in dists[ident_str][2]:
+                if kind == DistributionItem.COMPONENT_PART:
+                    name, part = key
+                    if (name, ComponentPart.DESIGN) not in component_ident_strs:
+                        d_block = None
+                    else:
+                        d_block = dists[
+                            component_ident_strs[name, ComponentPart.DESIGN]
+                        ][1][0]
+
+                    if part == ComponentPart.DESIGN:
+                        for objective_name, objective in objectives.items():
+                            if "annuity" in objective:
+                                objective_exprs[objective_name][0] += self._components[
+                                    name
+                                ].design_annuity(
+                                    block,
+                                    self._planning_horizon,
+                                    self._price_change_factor,
+                                    self._interest_factor,
+                                )
+
+                    elif part == ComponentPart.NONE_STATE:
+                        self._components[name].add_non_state_model(d_block, block)
+                        self._components[name].add_additional_model_logic(
+                            d_block, block
+                        )
+
+                        for connector in self._connectors[name].values():
+                            connector_var = block.component_dict[connector.name]
+
+                            terms = np.empty(len(connector.flows), dtype=object)
+                            for i, flow in enumerate(connector.flows):
+                                terms[i] = block.component_dict[flow]
+
+                            def rule(m, t):
+                                return connector_var[t] == pyo.quicksum(
+                                    term[t] for term in terms
+                                )
+
+                            block.add(
+                                connector.name + "_sum",
+                                pyo.Constraint(block.T, rule=rule),
+                            )
+
+                        for objective_name, objective in objectives.items():
+                            if "annuity" in objective:
+                                objective_exprs[objective_name][1] += self._components[
+                                    name
+                                ].operational_annuity(
+                                    block,
+                                    self._planning_horizon,
+                                    self._price_change_factor,
+                                    self._interest_factor,
+                                )
+
+                            if "peak_power_cost" in objective:
+                                T = self._planning_horizon
+                                r = self._price_change_factor
+                                q = self._interest_factor
+                                if q == 1.0:
+                                    a = 1.0 / T
+                                else:
+                                    a = (q - 1.0) / (1.0 - q ** (-T))
+                                if q == r:
+                                    b = T / q
+                                else:
+                                    b = (1.0 - (r / q) ** T) / (q - r)
+
+                                objective_exprs[objective_name][2] += (
+                                    self._components[name].peak_power_cost(block)
+                                    * a
+                                    * b
+                                )
+
+                            if "co_2_emissions" in objective:
+                                objective_exprs[objective_name][1] += self._components[
+                                    name
+                                ].co2_emissions(block)
+
+                    elif part == ComponentPart.STATE:
+                        self._components[name].add_state_model(d_block, block)
+
+                elif kind == DistributionItem.CONNECTION:
+                    connection = self._connections[key]
+
+                    def rule(m, t):
+                        return pyo.quicksum(
+                            block.component_dict[out_flow][t]
+                            for out_flow in connection.out_flows
+                        ) == pyo.quicksum(
+                            block.component_dict[in_flow][t]
+                            for in_flow in connection.in_flows
+                        ) * (
+                            1.0 - connection.loss_factor
+                        )
+
+                    block.add(str(key) + "_sum", pyo.Constraint(block.T, rule=rule))
+
+                    if connection.capacity is not None:
+                        capacity = connection.capacity
+
+                        def rule(m, t):
+                            return (
+                                pyo.quicksum(
+                                    block.component_dict[in_flow][t]
+                                    for in_flow in connection.in_flows
+                                )
+                                <= capacity
+                            )
+
+                        block.add(
+                            str(key) + "_capacity", pyo.Constraint(block.T, rule=rule)
+                        )
+
+                elif kind == DistributionItem.LOGIC:
+                    logic = self._additional_model_logic[key[0]]
+
+                    if logic["type"] == "EqualCapacity":
+                        components = logic["components"]
+                        d_blocks = [
+                            dists[component_ident_strs[name, ComponentPart.DESIGN]][1][
+                                0
+                            ]
+                            for name in components
+                        ]
+                        for i in range(len(components) - 1):
+
+                            def rule(m):
+                                return (
+                                    d_blocks[i].component_dict[
+                                        components[i] + ".capacity"
+                                    ]
+                                    == d_blocks[i + 1].component_dict[
+                                        components[i + 1] + ".capacity"
+                                    ]
+                                )
+
+                            d_blocks[i].add(
+                                key[0] + "_cons_" + str(i), pyo.Constraint(rule=rule)
+                            )
+
+                    elif logic["type"] == "ConnectorEnable":
+                        enable = logic["enable"].resample(block.dynamic).values
+
+                        connector_var = block.component_dict[
+                            logic["connectors"][key[1]]
+                        ]
+
+                        def rule(m, t):
+                            if not enable[t]:
+                                return connector_var[t] == 0
+                            else:
+                                return pyo.Constraint.Skip
+
+                        block.add(
+                            key[0] + "_cons_" + str(key[1]),
+                            pyo.Constraint(block.T, rule=rule),
+                        )
+
+                    elif logic["type"] == "ConnectionEnable":
+                        enable = logic["enable"].resample(block.dynamic).values
+
+                        connection = self._connections[key[1]]
+                        flow_var = block.component_dict[connection.in_flows[0]]
+
+                        def rule(m, t):
+                            if not enable[t]:
+                                return flow_var[t] == 0
+                            else:
+                                return pyo.Constraint.Skip
+
+                        block.add(
+                            key[0] + "_cons_" + str(key[1]),
+                            pyo.Constraint(block.T, rule=rule),
+                        )
+
+        for objective_name in objectives:
+            objective_exprs[objective_name][1] += block.general_scaled_objective
+
+        for i, reduction in enumerate(arc_block.dynamic_reductions):
+            r_block = block.blocks["r_" + str(i) + "."]
+
+            r_objective_exprs = self.second_pass(
+                reduction,
+                r_block,
+                dists,
+                component_ident_strs,
+                cache,
+                objectives,
+            )
+
+            for objective_name in objectives:
+                objective_exprs[objective_name][0] += r_objective_exprs[objective_name][
+                    0
+                ]
+                objective_exprs[objective_name][1] += r_objective_exprs[objective_name][
+                    1
+                ]
+                objective_exprs[objective_name][2] += r_objective_exprs[objective_name][
+                    2
+                ]
+
+        for i, aggregation in enumerate(arc_block.period_aggregations):
+            for j, period in enumerate(aggregation.period_blocks):
+                p_block = block.blocks["p_" + str(i) + "_" + str(j) + "."]
+
+                period_objective_exprs = self.second_pass(
+                    period,
+                    p_block,
+                    dists,
+                    component_ident_strs,
+                    cache,
+                    objectives,
+                )
+
+                for objective_name in objectives:
+                    objective_exprs[objective_name][0] += (
+                        aggregation.n_s[j] / aggregation.n
+                    ) * period_objective_exprs[objective_name][0]
+                    objective_exprs[objective_name][1] += (
+                        aggregation.n_s[j] * period_objective_exprs[objective_name][1]
+                    )
+                    objective_exprs[objective_name][2] += (
+                        aggregation.n_s[j] / aggregation.n
+                    ) * period_objective_exprs[objective_name][2]
+
+            # TODO: BaseBusBar does not have configuration in BaseBasBar
+
+        for i, stage in enumerate(arc_block.stochastic_stages):
+            for j, scenario in enumerate(stage.scenario_blocks):
+                s_block = block.blocks["s_" + str(i) + "_" + str(j) + "."]
+
+                scenario_objective_exprs = self.second_pass(
+                    scenario,
+                    s_block,
+                    dists,
+                    component_ident_strs,
+                    cache,
+                    objectives,
+                )
+
+                for objective_name in objectives:
+                    objective_exprs[objective_name][0] += (
+                        stage.probabilities[j]
+                        * scenario_objective_exprs[objective_name][0]
+                    )
+                    objective_exprs[objective_name][1] += (
+                        stage.probabilities[j]
+                        * scenario_objective_exprs[objective_name][1]
+                    )
+                    objective_exprs[objective_name][2] += (
+                        stage.probabilities[j]
+                        * scenario_objective_exprs[objective_name][2]
+                    )
+
+        return objective_exprs
+
+    def create_empty_entity_result(
+        self, key, architecture, input_component_ident_strs, sub_result_configurations
+    ):
+        for name, asset in self._assets.items():
+            sub_key = sub_result_configurations[name][0]
+            sub_architecture = sub_result_configurations[name][1]
+            sub_component_ident_strs = sub_result_configurations[name][2]
+            sub_sub_result_configurations = sub_result_configurations[name][3]
+            asset.create_empty_entity_result(
+                sub_key,
+                sub_architecture,
+                sub_component_ident_strs,
+                sub_sub_result_configurations,
+            )
+
+        vars = dict()
+        vars[""] = []
+
+        component_ident_strs = dict()
+        for name in self._components:
+            for part in self._components[name].iter_component_parts():
+                if (name, part) not in input_component_ident_strs:
+                    ident_str = ""
+
+                else:
+                    ident_str = input_component_ident_strs[name, part]
+
+                    if ident_str not in vars:
+                        vars[ident_str] = []
+
+                component_ident_strs[name, part] = ident_str
+
+                if part == ComponentPart.DESIGN:
+                    vars[ident_str].extend(
+                        self._components[name].design_base_variable_names()
+                    )
+                elif part == ComponentPart.NONE_STATE:
+                    vars[ident_str].extend(
+                        self._components[name].non_state_base_variable_names()
+                    )
+                elif part == ComponentPart.STATE:
+                    vars[ident_str].extend(
+                        self._components[name].state_base_variable_names()
+                    )
+
+        connection_idents = {i: [] for i in range(len(self._connections))}
+        for name, connectors in self._connectors.items():
+            for connector in connectors.values():
+                for index in connector.indices:
+                    connection_idents[index].append(
+                        Identifier.from_str(
+                            component_ident_strs[name, ComponentPart.NONE_STATE]
+                        )
+                    )
+
+        for index, idents in connection_idents.items():
+            ident = Identifier.union(idents)
+
+            if ident.string not in vars:
+                vars[ident.string] = []
+
+            connection = self._connections[index]
+            for flow in connection.in_flows:
+                vars[ident.string].append((flow, VariableKind.INDEXED))
+            for flow in connection.out_flows:
+                vars[ident.string].append((flow, VariableKind.INDEXED))
+
+        dists = {
+            ident: (Distribution(Identifier.from_str(ident)), []) for ident in vars
+        }
+        self._compute_dists(architecture.root, dists)
+
+        self._results[key] = EntityResult(architecture, vars, dists)
+        self._last_result_key = key
+
+    def _compute_dists(self, arc_block, dists):
+        for dist, dist_to_block_list in dists.values():
+            if dist.check():
+                dist_to_block_list.append(arc_block.prefix_str)
+
+        for i, reduction in enumerate(arc_block.dynamic_reductions):
+            piece = ReductionPiece(i)
+            for dist, _ in dists.values():
+                dist.down(piece)
+            self._compute_dists(reduction, dists)
+            for dist, _ in dists.values():
+                dist.up()
+
+        for i, aggregation in enumerate(arc_block.period_aggregations):
+            for j, period in enumerate(aggregation.period_blocks):
+                piece = PeriodPiece(i, j)
+                for dist, _ in dists.values():
+                    dist.down(piece)
+                self._compute_dists(period, dists)
+                for dist, _ in dists.values():
+                    dist.up()
+
+        for i, stage in enumerate(arc_block.stochastic_stages):
+            for j, scenario in enumerate(stage.scenario_blocks):
+                piece = ScenarioPiece(i, j)
+                for dist, _ in dists.values():
+                    dist.down(piece)
+                self._compute_dists(scenario, dists)
+                for dist, _ in dists.values():
+                    dist.up()
+
+    def extract_result(self, model, key, sub_keys):
+        for name, asset in self._assets.items():
+            sub_key = sub_keys[name][0]
+            sub_sub_keys = sub_keys[name][1]
+            asset.extract_result(model, sub_key, sub_sub_keys)
+
+        result = self._results[key]
+
+        _, block_prefix, _ = model.cache[self]
+        for result_ident_str, (_, local_block_prefixes) in result.distributions.items():
+            for local_block_prefix in local_block_prefixes:
+                result.dynamic_results[result_ident_str].extract_result(
+                    model.all_blocks[block_prefix + local_block_prefix]
+                )
+
+    def save_results(self, path, keys=None):
+        for asset in self._assets.values():
+            asset.save_results(path, keys)
+
+        if keys is None:
+            keys = [self._last_result_key]
+        elif isinstance(keys, str):
+            keys = [keys]
+
+        if not os.path.exists(path):
+            os.makedirs(path)
+        with pd.ExcelWriter(os.path.join(path, self._name + ".xlsx")) as writer:
+            for key in keys:
+                self._results[key].to_excel(writer, sheet_name=key)
+
+
+class Prosumer(Topology):
+    def __init__(self, name, configuration):
+        super().__init__(name, configuration, dict(), dict())
+
+
+class DistrictAsset(Topology):
+    def __init__(self, name, configuration):
+        super().__init__(name, configuration, dict(), dict())
+
+
+class District(Topology):
+    def __init__(self, name, configuration, prosumers, district_assets):
+        super().__init__(name, configuration, prosumers, district_assets)
+
+
+class CityAsset(Topology):
+    def __init__(self, name, configuration):
+        super().__init__(name, configuration, dict(), dict())
+
+
+class City(Topology):
+    def __init__(self, name, configuration, districts, city_assets):
+        super().__init__(name, configuration, districts, city_assets)
diff --git a/utility.py b/utility.py
new file mode 100644
index 0000000000000000000000000000000000000000..4e4df560527c9070b72e5c25435303a234a2a1dd
--- /dev/null
+++ b/utility.py
@@ -0,0 +1,282 @@
+"""
+MIT License
+
+Copyright (c) 2023 RWTH Aachen University
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+
+import math
+import numpy as np
+import pandas as pd
+
+
+# Annuität basiert auf der Diplomarbeit von Bernhard Kranebitter, basiert auf VDI 2067
+def design_annuity(A_0, T, T_N, r, q, f_b):
+    n = math.ceil(T / T_N) - 1
+    temp_sum = A_0
+    for i in range(1, n + 1):
+        temp_sum += A_0 * (r ** (i * T_N)) / (q ** (i * T_N))
+    R_W = A_0 * r ** (n * T_N) * ((n + 1.0) * T_N - T) / T_N * (1.0 / q**T)
+    temp_sum -= R_W
+    if q == 1.0:
+        a = 1.0 / T
+    else:
+        a = (q - 1.0) / (1.0 - q ** (-T))
+    A_NK = temp_sum * a
+    if q == r:
+        b = T / q
+    else:
+        b = (1.0 - (r / q) ** T) / (q - r)
+    A_B1 = A_0 * f_b
+    A_NB = A_B1 * a * b
+    return A_NK + A_NB
+
+
+def operational_annuity(
+    T, r, q, A_V
+):  # normally also w is a parameter, used to scale the operational expenditure A_V to the operational expenditure in the first year A_V1, but that is done for all operational annuities right before the objective is added to the model
+    if q == 1.0:
+        a = 1.0 / T
+    else:
+        a = (q - 1.0) / (1.0 - q ** (-T))
+    A_V1 = A_V  # * w
+    if q == r:
+        b = T / q
+    else:
+        b = (1.0 - (r / q) ** T) / (q - r)
+    A_NV = A_V1 * a * b
+    return A_NV
+
+
+# Based on https://www.agopax.it/Libri_astronomia/pdf/Astronomical%20Algorithms.pdf
+# contrary to the source, all dates are considered to be in the Gegorian calendar, even bevore the calendar shift from 1582 Oktober 4 (Julian) to 1582 Oktober 15 (Gegorian), this is inline with the way pandas, numpy and python handle dates
+def calculate_solar_position(time, time_zone, phi, lambda_geographic):
+    # time: pandas timestamps
+    # time_zone: offset to UTC in hours
+    # phi: latitute, latitue on earth of where you are observing from (in degrees north of equator)
+    # lambda_geographic: geographic longitute, longitute on earth of where you are observing from (in degrees west of Greenwich)
+
+    # shift time to UTC
+    time -= pd.Timedelta(time_zone, "h")
+
+    # year, astronomical numbering
+    Y = time.year.values
+
+    # month, January = 1, etc
+    M = time.month.values
+
+    # day of the month
+    D = time.day.values
+
+    # universal time in hours
+    universal_time = ((time - time.floor("D")) / pd.Timedelta(1, "h")).values
+
+    Y[M <= 2] -= 1
+    M[M <= 2] += 12
+
+    A = np.floor(Y / 100).astype(int)
+    B = 2 - A + np.floor(A / 4).astype(int)
+
+    # Julian Day
+    JD = (
+        np.floor(365.25 * (Y + 4716)).astype(int)
+        + np.floor(30.6001 * (M + 1)).astype(int)
+        + D
+        + universal_time / 24
+        + B
+        - 1524.5
+    )
+
+    # Julian century (from 2000 January 1.5 TD) (25.1)
+    T = (JD - 2451545) / 36525
+
+    # geometric mean longitude of the Sun (25.2)
+    L_0 = (280.46646 + 36000.76983 * T + 0.0003032 * T**2) % 360
+
+    # mean anomaly of the Sun (25.3)
+    M = (357.52911 + 35999.05029 * T - 0.0001537 * T**2) % 360
+
+    # eccentricity of the Earth's orbit (25.4)
+    e = 0.016708634 - 0.000042037 * T - 0.0000001267 * T**2
+
+    # Sun's equation of center (between 25.4 and 25.5)
+    C = (
+        (1.914602 - 0.004817 * T - 0.000014 * T**2) * np.sin(np.radians(M))
+        + (0.019993 - 0.000101 * T) * np.sin(np.radians(2 * M))
+        + 0.000289 * np.sin(np.radians(3 * M))
+    )
+
+    # Sun's true longitude (between 25.4 and 25.5)
+    circle_dot = L_0 + C
+
+    # Sun's true anomaly (between 25.4 and 25.5)
+    v = M + C
+
+    # Sun's radius vector in AUs (25.5)
+    R = (1.000001018 * (1 - e**2)) / (1 + e * np.cos(np.radians(v)))
+
+    # number used in the correction from true to apparent longitute and obliquity of the ecliptic (between 25.5 and 25.6)
+    Omega = (125.04 - 1934.136 * T) % 360
+
+    # Sun's apparent right ascension (between 25.5 and 25.6)
+    lambda_ = circle_dot - 0.00569 - 0.00478 * np.sin(np.radians(Omega))
+
+    # obliquity of the ecliptic (22.2)
+    epsilon = (
+        23
+        + (26 + (21.448 - 46.8150 * T - 0.00059 * T**2 + 0.001813 * T**3) / 60) / 60
+    )
+
+    # corrected obliquity of the ecliptic (25.8)
+    epsilon_corrected = epsilon + 0.00256 * np.cos(np.radians(Omega))
+
+    # Sun's apparent declination (25.6) in rad
+    delta_rad = np.arcsin(
+        np.sin(np.radians(epsilon_corrected)) * np.sin(np.radians(lambda_))
+    )
+
+    # number used in the equation of time
+    y = np.tan(np.radians(epsilon / 2)) ** 2
+
+    # equation of time (28.3) in hours
+    E = (
+        np.rad2deg(
+            y * np.sin(np.radians(2 * L_0))
+            - 2 * e * np.sin(np.radians(M))
+            + 4 * e * y * np.sin(np.radians(M)) * np.cos(np.radians(2 * L_0))
+            - 1 / 2 * y**2 * np.sin(np.radians(4 * L_0))
+            - 5 / 4 * e**2 * np.sin(np.radians(2 * M))
+        )
+        / 15
+    )
+
+    # solar time in hours (based on the formular to go from local time to solar time found on wikipedia)
+    # (remember lambda_geographic is in degrees west of Greenwich)
+    # (remember universal_time = local_time - time_zone)
+    solar_time = universal_time + E - lambda_geographic / 15
+
+    # solar hour angle
+    H = 15 * (solar_time - 12)
+
+    H[H < -180] += 360
+    H[H > 180] -= 360
+
+    # Sun's azimuth (13.5) in rad cw from south
+    A_rad = np.arctan2(
+        np.sin(np.radians(H)),
+        np.cos(np.radians(H)) * np.sin(np.radians(phi))
+        - np.tan(delta_rad) * np.cos(np.radians(phi)),
+    )
+
+    # Sun's altitue (13.6) in rad above the horizon
+    h_rad = np.arcsin(
+        np.sin(np.radians(phi)) * np.sin(delta_rad)
+        + np.cos(np.radians(phi)) * np.cos(delta_rad) * np.cos(np.radians(H))
+    )
+
+    return A_rad, h_rad, R
+
+
+def calculate_solar_radiation(
+    A_sun_rad, h_sun_rad, R, A_pv_rad, T_pv_rad, rho, swd, dir, dif
+):
+    # A_sun_rad: Sun's azimuth in rad cw from south
+    # h_sun_rad: Sun's altitue in rad above the horizon
+    # R: Sun's radius vector (distance between the Sun's center and Earth's center) in AU
+    # A_pv_rad: PV array azimuth in rad cw from south
+    # T_pv_rad: PV array tilt (angle between PV array surface and the horizontal plane) in rad
+    # rho: ground albedo
+    # swd: solar radiation on a horizontal surface
+    # dir: solar radiation from the solar disc on a surface normal to the direction of the sun
+    # dif: solar radiation excluding the solar disc on a horizontal surface
+
+    # Sun's zenith angle in rad
+    Z_sun_rad = math.pi / 2 - h_sun_rad
+
+    # angle of incidence between the Sun's rays and the PV array (based on https://juser.fz-juelich.de/record/849639/files/J%C3%BCl_2600_Ritzenhoff.pdf)
+    cos_AOI = np.cos(h_sun_rad) * np.sin(T_pv_rad) * np.cos(
+        A_sun_rad - A_pv_rad
+    ) + np.sin(h_sun_rad) * np.cos(T_pv_rad)
+    cos_AOI[cos_AOI > 1] = 1
+    cos_AOI[cos_AOI < 0] = 0
+
+    # direct radiation
+    direct = dir * cos_AOI
+
+    # diffuse radiation (based on Modeling daylight availability and irradiance components from direct and global irradiance Perez et. al.)
+    # sky's clearness
+    # because the entire result of this process is multiplied with dif, when dif is 0 (and the fraction in the sky's clearness formula results in a nan) we can just set value of the fraction to anything
+    fraction = np.empty(len(dif), dtype=float)
+    fraction[dif == 0] = 1
+    fraction[dif != 0] = (dif[dif != 0] + dir[dif != 0]) / dif[dif != 0]
+    epsilon = (fraction + 1.041 * Z_sun_rad**3) / (1 + 1.041 * Z_sun_rad**3)
+
+    # solar constant (mean radiation at the top of the atmosphese)
+    E_sc = 1367
+
+    # extraterestrial radiation
+    E = E_sc * (1 / R) ** 2
+
+    # air mass (based on Revised optical air mass tables and approximation formula Kasten and Young)
+    AM = 1 / (
+        np.cos(Z_sun_rad) + 0.50572 * (96.07995 - np.rad2deg(Z_sun_rad) ** -1.6364)
+    )
+
+    # sky's brightness
+    Delta = dif * AM / E
+
+    a = np.maximum(0, cos_AOI)
+    b = np.maximum(0.087, np.cos(Z_sun_rad))
+
+    F_coefficiences = np.array(
+        [
+            [-0.008, 0.588, -0.062, -0.060, 0.072, -0.022],
+            [0.130, 0.683, -0.151, -0.019, 0.066, -0.029],
+            [0.330, 0.487, -0.221, 0.055, -0.064, -0.026],
+            [0.568, 0.187, -0.295, 0.109, -0.152, -0.014],
+            [0.873, -0.392, -0.362, 0.226, -0.462, 0.001],
+            [1.132, -1.237, -0.412, 0.288, -0.823, 0.056],
+            [1.060, -1.600, -0.359, 0.264, -1.127, 0.131],
+            [0.678, -0.327, -0.250, 0.156, -1.377, 0.251],
+        ]
+    )
+    bins = (
+        ((1 <= epsilon) & (epsilon < 1.065)) * 1
+        + ((1.065 <= epsilon) & (epsilon < 1.23)) * 2
+        + ((1.23 <= epsilon) & (epsilon < 1.5)) * 3
+        + ((1.5 <= epsilon) & (epsilon < 1.95)) * 4
+        + ((1.95 <= epsilon) & (epsilon < 2.8)) * 5
+        + ((2.8 <= epsilon) & (epsilon < 4.5)) * 6
+        + ((4.5 <= epsilon) & (epsilon < 6.2)) * 7
+        + (6.5 <= epsilon) * 8
+    ) - 1
+    F = F_coefficiences[bins]
+
+    F_1 = np.maximum(0, F[:, 0] + F[:, 1] * Delta + F[:, 2] * Z_sun_rad)
+    F_2 = F[:, 3] + F[:, 4] * Delta + F[:, 5] * Z_sun_rad
+
+    diffuse = dif * (
+        (1 - F_1) * (1 + np.cos(T_pv_rad)) / 2 + F_1 * (a / b) + F_2 * np.sin(T_pv_rad)
+    )
+
+    # reflected radiation (based on https://pvpmc.sandia.gov/modeling-steps/1-weather-design-inputs/plane-of-array-poa-irradiance/calculating-poa-irradiance/poa-ground-reflected/)
+    reflected = swd * rho * (1 - np.cos(T_pv_rad)) / 2
+
+    return direct + diffuse + reflected