Skip to content
Snippets Groups Projects
Select Git revision
  • 9f04aa9504ca4b4735ff07ffc7f7752bd963dd4f
  • main default protected
  • NIS-Workshop
  • dev_yhe_citymodel
  • dev_jbr_mkr_gui
  • dev_jli_grid_test
  • dev_jgn_gridmodel
  • dev_jgn_buscharging
  • dev_jli_gridmodel
  • dev_jfu_community
  • sce_mobility_district
  • dev_jbr_mkr_updating_pandas
  • dev_market_comp
  • dev_V2X_jfu
  • dev_network_yni
  • dev_nni_prosumer_rh
  • dev_haoyu
  • Landlord-to-Tenant_Study
  • dev_lcoe
  • dev_transfer_V2X
  • dev_jfu_V2X
  • v1.1
22 results

runme.py

Blame
  • runme.py 13.87 KiB
    """
    The FEN-Tool is an optimization tool for prosumer, district, and interconnected city models.
    
    Copyright (C) 2022. Mauricio Celi Cortés, Jingyu Gong, Jonas van Ouwerkerk, Felix Wege, Nie Yi, Jonas Brucksch
    
    This program is free software; you can redistribute it and/or
    modify it under the terms of the GNU Lesser General Public License
    as published by the Free Software Foundation; either version 3 of
    the License, or (at your option) any later version.
    
    This program is distributed in the hope that it will be useful, but
    WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
    Lesser General Public License for more details.
    
    You should have received a copy of the GNU Lesser General Public
    License along with this library; if not, write to the Free Software
    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
    02110-1301 USA.
    
    Project host: RWTH Aachen University, Aachen, Germany
    Project Website: https://www.fenaachen.net/projekte/fen-ineed-dc
    """
    
    import time
    import pandas as pd
    import argparse
    from tqdm import tqdm
    import Model_Library.Prosumer.main as main
    from multiprocessing import Pool
    import os
    from functools import partial
    from Model_Library.Prosumer.scripts.results_evaluation.results_evaluation import \
        Plot_savings
    
    
    # from Tooling.results_evaluation.results_evaluation import Plot_savings
    
    def process_each_prosumer(prosumer_name, prosumer_dict, data_source, commentary,
                              no_process_bar_rh):
        """
        The method XYZ adds ...
        :param
        XYZ:
        :returns
        XYZ
        """
        try:
            # PLEASE CHANGE HERE
            # Set the simulation time frame and optional rolling horizon configurations:
            # 't_start': start date of simulations, Unit: JJJJ-MM-DD hh:mm:ss
            # 't_end': end date of simulations, Unit: JJJJ-MM-DD hh:mm:ss
            # 't_step': granularity of optimization model, Unit: hours
            # Rolling horizon (RH) can be set by:
            # 't_rh_horizon': width of rolling horizon intervals, Unit: hours, MINIMUM VALUE: 2 !!!
            # 't_rh_shift': rolling horizon shift between intervals, Unit: hours
            # 't_current_value_length': number of values at beginning of rolling horizon interval that are replaced by real values, Unit: hours
            # 't_history': number of days before actual simulation interval for the demand generator to be able to make required predictions
            if prosumer_dict[prosumer_name]['rolling_horizon']:
                t_start = pd.Timestamp("2019-09-01 00:00:00")
                t_end = pd.Timestamp("2019-09-01 5:00:00")
                t_step = 1
                t_rh_horizon = 3
                t_rh_shift = 1
                t_current_value_length = 2
                t_history = 14  # days
    
                # PLEASE CHANGE HERE
                # Prediction settings
                predictions = {'demand_electric': 'SameHourYesterday',
                               'demand_heat': 'SameHourYesterday',
                               'day_ahead_price': 'SameHourYesterday',
                               'intraday_price': 'SameHourYesterday',
                               'solar_radiation': 'Perfect',
                               # currently the method generate_g_t_series takes the same t_start as the prediction -> no historical
                               # data for the prediction available: easy fix would be to set a minus time delta in the t_start
                               # argument of generate_g_t_series
                               'temperature': 'SameHourYesterday'}
            else:
                t_start = pd.Timestamp("2019-07-01 00:00:00")
                t_end = pd.Timestamp("2019-7-10 23:00:00") + pd.Timedelta(hours=1)
                t_step = 1
                t_rh_horizon = (t_end - t_start) / pd.Timedelta(hours=1)
                t_rh_shift = t_rh_horizon - 1
                t_current_value_length = t_rh_horizon
                t_history = 0  # days
    
                # PLEASE CHANGE HERE
                # Prediction settings
                predictions = {'demand_electric': 'Perfect',
                               'demand_heat': 'Perfect',
                               'day_ahead_price': 'Perfect',
                               'intraday_price': 'Perfect',
                               'solar_radiation': 'Perfect',
                               # currently the method generate_g_t_series takes the same t_start as the prediction -> no historical
                               # data for the prediction available: easy fix would be to set a minus time delta in the t_start
                               # argument of generate_g_t_series
                               'temperature': 'Perfect'}
    
            # Fixed variables - DO NOT CHANGE
            storage_states = {}
            interim_results = {}
            final_iteration = False
    
            # Set aggregation options
            parser = argparse.ArgumentParser(
                description='Start optimization from DB or local data')
            parser.add_argument('-a', '--aggregate', action="store_true",
                                dest="aggregate",
                                help="activating aggregation of input time series",
                                default=False)
            options = parser.parse_args()
    
            # Calculate number of rolling horizon intervals and loop through them
            for t in tqdm(pd.date_range(t_start,
                                        t_end - pd.Timedelta(hours=t_rh_shift + 1),
                                        freq=str(t_rh_shift) + 'H'),
                          disable=no_process_bar_rh):
                # ToDo: replace first value with perfect value (can be done in runme)
                # set end date for current loop
                t_end_loop = t + pd.Timedelta(hours=t_rh_horizon)
    
                # exceptions that occur at global end of simulation horizon
                if t_end_loop > t_end:
                    t_end_loop = t_end
                if t_current_value_length > (t_end_loop - t) / pd.Timedelta(
                        hours=1):
                    t_current_value_length = (t_end_loop - t) / pd.Timedelta(
                        hours=1)
    
                # Set flag for final iteration
                if t == t_end - pd.Timedelta(hours=t_rh_shift + 1):
                    final_iteration = True
    
                # Start main programme
                prosumer = main.Main(data_source,
                                     {prosumer_name: prosumer_dict[prosumer_name]},
                                     t, t_end_loop, t_step,
                                     predictions, t_current_value_length, t_end,
                                     t_history, commentary, storage_states,
                                     t_rh_shift, aggregation=options.aggregate)
    
                # Run optimization
                prosumer.run_optimization(prosumer.prosumer_name_list)
    
                # Show results - Results are only plotted after last iteration of rolling horizon
                prosumer.show_results(prosumer.prosumer_name_list, interim_results,
                                      final_iteration)
    
                # Get storage states from this iteration
                storage_states = prosumer.charge_status
    
                # Get interim results of current rolling horizon interval
                interim_results = prosumer.interim_results
        except ValueError:
            print(prosumer_name + " could not be optimized!")
    
    
    # # MAIN PROGRAM -------------------------------------------------------------------------------------------------------
    if __name__ == "__main__":
        """
        The method XYZ adds ...
        :param
        XYZ:
        :returns
        XYZ
        """
        # Initialization scenario path and global variables for the prosumer optimization
        # Start timer
        start = time.time()
    
        # PLEASE CHANGE HERE
        # Path to local data - this is only used when selecting local mode
        # 'topology_path': path to matrices that define the prosumer topology
        # 'config_path': path to global configurations like prices, injection prices, emission costs, etc.
        # topology_path = 'input_files/scenarios/Study_Base'
        # config_path = topology_path + '/config.csv'
        # data_path = topology_path + '/data_path.csv'
        # prosumer_name = 'office'
        # prosumer_dict = {prosumer_name: {'topology_path': topology_path, 'config_path': config_path, 'data_path': data_path}}
        # topology_path = ['input_files/models/SCN0_CAT1']
        # prosumer_name = ['SCN0_CAT1']
        # rolling_horizon = [False]
        # elec_demand = [1500, 10000]
        # therm_demand = [5000, 20000]
        # hot_water_demand = 1500  # [1500, 1500]
        # step_elec_demand = 500
        # step_therm_demand = 500
        # step_hot_water_demand = 0
        # prosumer_dict = {}
        """    for i in range(len(prosumer_name)):
            for j in range(elec_demand[0], elec_demand[1], step_elec_demand):
                for k in range(therm_demand[0], therm_demand[1], step_therm_demand):
                    #for l in range(hot_water_demand[0], hot_water_demand[1], step_hot_water_demand):
                    prosumer_dict[prosumer_name[i]+'_'+str(j)+'_'+str(k)] = {'elec_demand': j,
                                                                                        'therm_demand': k,
                                                                                        'hot_water_demand': hot_water_demand,
                                                                                        'topology_path': topology_path[i],
                                                                                        'config_path': topology_path[i] + '/config.csv',
                                                                                        'data_path': topology_path[i] + '/data_path.csv',
                                                                                        'rolling_horizon': rolling_horizon[i]}"""
    
        prosumer_dict = {
            # 'SCN0_CAT1_2000_6000': {'elec_demand': 2000, 'therm_demand': 6000, 'hot_water_demand': 1500, 'topology_path': 'input_files/models/SCN0_CAT1', 'config_path': 'input_files/models/SCN0_CAT1/config.csv', 'data_path': 'input_files/models/SCN0_CAT1/data_path.csv', 'rolling_horizon': False},
            # 'SCN3_CAT1_3000_6000': {'elec_demand': 3000, 'therm_demand': 6000, 'hot_water_demand': 1500, 'topology_path': 'input_files/models/SCN3_CAT1', 'config_path': 'input_files/models/SCN3_CAT1/config.csv', 'data_path': 'input_files/models/SCN3_CAT1/data_path.csv', 'rolling_horizon': False},
            # 'SCN1_CAT1_3000_6000': {'elec_demand': 3000, 'therm_demand': 6000, 'hot_water_demand': 1500, 'topology_path': 'input_files/models/SCN1_CAT1', 'config_path': 'input_files/models/SCN1_CAT1/config.csv', 'data_path': 'input_files/models/SCN1_CAT1/data_path.csv', 'rolling_horizon': False},
            # 'SCN2_CAT1_PV14_HP_3000_6000': {'elec_demand': 3000, 'therm_demand': 6000,
            #                                 'hot_water_demand': 1500,
            #                                 'topology_path': 'input_files/models/prosumer_models/SCN2_CAT1_PV14_HP',
            #                                 'config_path': 'input_files/models/prosumer_models/SCN2_CAT1_PV14_HP/config.csv',
            #                                 'data_path': 'input_files/models/prosumer_models/SCN2_CAT1_PV14_HP/data_path.csv',
            #                                 'rolling_horizon': False}
            'office_pv_heatpump': {'elec_demand': 32905,
                                   'therm_demand': 115154,
                                   'hot_water_demand': 11882,
                                   'topology_path': 'input_files/models/prosumer_models/office_pv_heatpump',
                                   'config_path': 'input_files/models/prosumer_models/office_pv_heatpump/config.csv',
                                   'data_path': 'input_files/models/prosumer_models/office_pv_heatpump/data_path.csv',
                                   'rolling_horizon': False}
            # 'SCN2_CAT1_PV12_BA_3000_6000': {'elec_demand': 9000, 'therm_demand': 20000, 'hot_water_demand': 1500, 'topology_path': 'input_files/models/SCN2_CAT1_PV12_BA', 'config_path': 'input_files/models/SCN2_CAT1_PV12_BA/config.csv', 'data_path': 'input_files/models/SCN3_CAT1_PV12_BA/data_path.csv', 'rolling_horizon': True}}
    
        }
        # PLEASE CHANGE HERE
        # Select data source
        # Options: '1': import from database, '2': import from local folder
        data_source = 2
        reference_results = {}
        commentary = True
        no_process_bar_rh = True
        parallel_processing = False
    
        # Timer output
        tic = time.time()
        # Start program
        # Run multiple independent prosumers in parallel on multiple cores
        if parallel_processing:
            count_processes = len(prosumer_dict.keys())
            pool = Pool(os.cpu_count())
            parallel_func = partial(process_each_prosumer,
                                    prosumer_dict=prosumer_dict,
                                    data_source=data_source,
                                    commentary=commentary,
                                    no_process_bar_rh=no_process_bar_rh)
            mapped_values = list(
                tqdm(pool.map(parallel_func, list(prosumer_dict.keys())),
                     total=count_processes))
        # Normal processing, one core only
        else:
            for prosumer_name in list(prosumer_dict.keys()):
                process_each_prosumer(prosumer_name=prosumer_name,
                                      prosumer_dict=prosumer_dict,
                                      data_source=data_source,
                                      commentary=commentary,
                                      no_process_bar_rh=no_process_bar_rh)
    
        # Timer output
        toc = time.time()
    
        # if reference_results:
        #    for topology in prosumer_name:
        #        Plot_savings(reference_results, topology)
    
        # Timer output
        end = time.time()
        # Additional console output
        if commentary:
            print("============ Execution Times =============")
            print("Pre-processing [s]: \t" + str(tic - start))
            # print("(Interaction with database [s]:\t" + str(prosumer.connect_with_db) + ")")
            print("Optimization [s]: \t" + str(toc - tic))
            print("Post-processing [s]: \t" + str(end - toc))
            print("----------------------------------------")
            print("Total [s]: \t" + str((end - toc) + (toc - tic) + (tic - start)))
            print("==========================================")