2024-07-15 19:17:59 -04:00
|
|
|
from pathlib import Path
|
|
|
|
from scripts.ep_workflow import energy_plus_workflow
|
2024-08-01 10:16:52 -04:00
|
|
|
from hub.helpers.monthly_values import MonthlyValues
|
2024-07-15 19:17:59 -04:00
|
|
|
from hub.imports.geometry_factory import GeometryFactory
|
|
|
|
from hub.helpers.dictionaries import Dictionaries
|
|
|
|
from hub.imports.construction_factory import ConstructionFactory
|
|
|
|
from hub.imports.usage_factory import UsageFactory
|
|
|
|
from hub.imports.weather_factory import WeatherFactory
|
2024-07-16 12:47:05 -04:00
|
|
|
import hub.helpers.constants as cte
|
|
|
|
from hub.imports.energy_systems_factory import EnergySystemsFactory
|
2024-08-01 10:16:52 -04:00
|
|
|
from hub.helpers.peak_loads import PeakLoads
|
2024-08-01 12:46:40 -04:00
|
|
|
from pathlib import Path
|
|
|
|
import subprocess
|
|
|
|
from hub.imports.results_factory import ResultFactory
|
|
|
|
from hub.imports.energy_systems_factory import EnergySystemsFactory
|
|
|
|
from scripts.energy_system_sizing_and_simulation_factory import EnergySystemsSimulationFactory
|
2024-08-08 07:58:25 -04:00
|
|
|
from scripts.solar_angles import CitySolarAngles
|
2024-08-01 12:46:40 -04:00
|
|
|
import hub.helpers.constants as cte
|
|
|
|
from hub.exports.exports_factory import ExportsFactory
|
2024-08-08 07:58:25 -04:00
|
|
|
from scripts.pv_sizing_and_simulation import PVSizingSimulation
|
|
|
|
import pandas as pd
|
2024-07-17 18:39:52 -04:00
|
|
|
import geopandas as gpd
|
2024-08-01 10:16:52 -04:00
|
|
|
import json
|
2024-08-08 07:58:25 -04:00
|
|
|
|
2024-08-01 12:46:40 -04:00
|
|
|
#%% # -----------------------------------------------
|
2024-07-15 19:17:59 -04:00
|
|
|
# Specify the GeoJSON file path
|
2024-08-01 12:46:40 -04:00
|
|
|
#%% # -----------------------------------------------
|
2024-07-15 19:17:59 -04:00
|
|
|
input_files_path = (Path(__file__).parent / 'input_files')
|
|
|
|
output_path = (Path(__file__).parent / 'out_files').resolve()
|
|
|
|
output_path.mkdir(parents=True, exist_ok=True)
|
2024-08-01 12:46:40 -04:00
|
|
|
energy_plus_output_path = output_path / 'energy_plus_outputs'
|
|
|
|
energy_plus_output_path.mkdir(parents=True, exist_ok=True)
|
|
|
|
simulation_results_path = (Path(__file__).parent / 'out_files' / 'simulation_results').resolve()
|
|
|
|
simulation_results_path.mkdir(parents=True, exist_ok=True)
|
|
|
|
sra_output_path = output_path / 'sra_outputs'
|
|
|
|
sra_output_path.mkdir(parents=True, exist_ok=True)
|
|
|
|
cost_analysis_output_path = output_path / 'cost_analysis'
|
|
|
|
cost_analysis_output_path.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
|
|
#%%-----------------------------------------------
|
|
|
|
#"""add geojson paths and create city for Baseline"""
|
|
|
|
#%% # -----------------------------------------------
|
|
|
|
|
2024-08-01 10:16:52 -04:00
|
|
|
geojson_file_path_baseline = output_path / 'updated_buildings_with_all_data_baseline.geojson'
|
2024-08-08 07:58:25 -04:00
|
|
|
geojson_file_path_2024 = output_path / 'updated_buildings_with_all_data_2024.geojson'
|
2024-08-01 10:16:52 -04:00
|
|
|
with open(geojson_file_path_baseline , 'r') as f:
|
|
|
|
building_type_data = json.load(f)
|
|
|
|
with open(geojson_file_path_2024, 'r') as f:
|
|
|
|
building_type_data_2024 = json.load(f)
|
|
|
|
|
2024-07-15 19:17:59 -04:00
|
|
|
# Create city object from GeoJSON file
|
|
|
|
city = GeometryFactory('geojson',
|
2024-08-01 10:16:52 -04:00
|
|
|
path=geojson_file_path_baseline,
|
2024-07-15 19:17:59 -04:00
|
|
|
height_field='maximum_roof_height',
|
|
|
|
year_of_construction_field='year_built',
|
|
|
|
function_field='building_type',
|
|
|
|
function_to_hub=Dictionaries().montreal_function_to_hub_function).city
|
2024-08-08 07:58:25 -04:00
|
|
|
#%%----------------------------------------------
|
2024-07-15 19:17:59 -04:00
|
|
|
# Enrich city data
|
2024-08-08 07:58:25 -04:00
|
|
|
#%% # ----------------------------------------------
|
2024-07-15 19:17:59 -04:00
|
|
|
ConstructionFactory('nrcan', city).enrich()
|
|
|
|
UsageFactory('nrcan', city).enrich()
|
|
|
|
WeatherFactory('epw', city).enrich()
|
2024-08-01 10:16:52 -04:00
|
|
|
# #energy plus is not going to be processed here, as demand has been obtained before
|
|
|
|
# energy_plus_workflow(city)
|
2024-08-01 12:46:40 -04:00
|
|
|
#%% # -----------------------------------------------
|
|
|
|
#"""Enrich city with geojson file data"""
|
|
|
|
#%% # -----------------------------------------------
|
2024-08-01 10:16:52 -04:00
|
|
|
percentage_data = {
|
|
|
|
1646: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 2672.550473, "total_floor_area": 26725.50473},
|
|
|
|
1647: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 2653.626087, "total_floor_area": 26536.26087},
|
|
|
|
1648: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 1056.787496, "total_floor_area": 10567.87496},
|
|
|
|
1649: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 1906.620746, "total_floor_area": 19066.20746},
|
|
|
|
1650: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 659.1119416, "total_floor_area": 5272.895533},
|
|
|
|
1651: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 1167.208109, "total_floor_area": 9337.664871},
|
|
|
|
1652: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 1193.251653, "total_floor_area": 9546.013222},
|
|
|
|
1653: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 1491.722543, "total_floor_area": 11933.78035},
|
|
|
|
1654: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 1168.005028, "total_floor_area": 9344.040224},
|
|
|
|
1655: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 1264.906961, "total_floor_area": 10119.25569},
|
|
|
|
1656: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 1281.768818, "total_floor_area": 10254.15054},
|
|
|
|
1657: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 290.3886018, "total_floor_area": 2323.108814},
|
|
|
|
1658: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 847.5095193, "total_floor_area": 6780.076155},
|
|
|
|
1659: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 1115.319153, "total_floor_area": 8922.553224},
|
|
|
|
1660: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 469.2918062, "total_floor_area": 3754.33445},
|
|
|
|
1661: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 1292.298346, "total_floor_area": 10338.38677},
|
|
|
|
1662: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 625.7828863, "total_floor_area": 5006.263091},
|
|
|
|
1663: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 1876.02897, "total_floor_area": 15008.23176},
|
|
|
|
1664: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 1118.224781, "total_floor_area": 22364.49562},
|
|
|
|
1665: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 1502.787808, "total_floor_area": 30055.75617},
|
|
|
|
1666: {"type1_%": 0.891045711, "type2_%": 0.108954289, "type3_%": 0, "roof_area": 3038.486076, "total_floor_area": 30384.86076},
|
|
|
|
1667: {"type1_%": 0.8, "type2_%": 0.2, "type3_%": 0, "roof_area": 1343.832818, "total_floor_area": 13438.32818},
|
|
|
|
1668: {"type1_%": 0.7, "type2_%": 0.3, "type3_%": 0, "roof_area": 961.0996956, "total_floor_area": 4805.498478},
|
|
|
|
1669: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 489.1282111, "total_floor_area": 1956.512845},
|
|
|
|
1673: {"type1_%": 0.7, "type2_%": 0.3, "type3_%": 0, "roof_area": 1693.141465, "total_floor_area": 5079.424396},
|
|
|
|
1674: {"type1_%": 0.7, "type2_%": 0.3, "type3_%": 0, "roof_area": 3248.827576, "total_floor_area": 9746.482729},
|
|
|
|
1675: {"type1_%": 0.7, "type2_%": 0.3, "type3_%": 0, "roof_area": 4086.842191, "total_floor_area": 12260.52657},
|
|
|
|
1676: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 2786.114146, "total_floor_area": 11144.45658},
|
|
|
|
1677: {"type1_%": 1, "type2_%": 0, "type3_%": 0, "roof_area": 5142.784184, "total_floor_area": 15428.35255},
|
|
|
|
1678: {"type1_%": 0.7, "type2_%": 0.3, "type3_%": 0, "roof_area": 6068.664574, "total_floor_area": 18205.99372},
|
|
|
|
1679: {"type1_%": 0.7, "type2_%": 0.3, "type3_%": 0, "roof_area": 5646.751407, "total_floor_area": 16940.25422},
|
|
|
|
1680: {"type1_%": 0.7, "type2_%": 0.3, "type3_%": 0, "roof_area": 1601.765953, "total_floor_area": 4805.297859},
|
|
|
|
1681: {"type1_%": 0.7, "type2_%": 0.3, "type3_%": 0, "roof_area": 9728.221797, "total_floor_area": 29184.66539},
|
|
|
|
1687: {"type1_%": 0.606611029, "type2_%": 0.28211422, "type3_%": 0.11127475, "roof_area": 4268.608743, "total_floor_area": 59760.52241},
|
|
|
|
1688: {"type1_%": 0.92, "type2_%": 0.08, "type3_%": 0, "roof_area": 2146.654828, "total_floor_area": 38639.7869},
|
|
|
|
1689: {"type1_%": 0.96, "type2_%": 0.04, "type3_%": 0, "roof_area": 2860.270711, "total_floor_area": 57205.41421},
|
|
|
|
1690: {"type1_%": 0.94, "type2_%": 0.06, "type3_%": 0, "roof_area": 2189.732519, "total_floor_area": 28466.52275},
|
|
|
|
1691: {"type1_%": 0.75, "type2_%": 0.25, "type3_%": 0, "roof_area": 3159.077523, "total_floor_area": 31590.77523},
|
|
|
|
}
|
|
|
|
|
|
|
|
def enrich_buildings_with_geojson_data (building_type_data, city):
|
|
|
|
for building in city.buildings:
|
|
|
|
for idx, feature in enumerate(building_type_data['features']):
|
|
|
|
if feature['properties']['id'] == str(building.name):
|
|
|
|
building.heating_demand[cte.HOUR] = [x *1000* cte.WATTS_HOUR_TO_JULES for x in building_type_data['features'][idx]['properties'].get('heating_demand_kWh', [0])]
|
|
|
|
building.cooling_demand[cte.HOUR] = [x *1000* cte.WATTS_HOUR_TO_JULES for x in building_type_data['features'][idx]['properties'].get('cooling_demand_kWh', [0])]
|
|
|
|
building.domestic_hot_water_heat_demand[cte.HOUR] = [x *1000* cte.WATTS_HOUR_TO_JULES for x in building_type_data['features'][idx]['properties'].get('domestic_hot_water_heat_demand_kWh', [0])]
|
|
|
|
building.appliances_electrical_demand[cte.HOUR] = [x *1000* cte.WATTS_HOUR_TO_JULES for x in building_type_data['features'][idx]['properties'].get('appliances_electrical_demand_kWh', [0])]
|
|
|
|
building.lighting_electrical_demand[cte.HOUR] = [x *1000* cte.WATTS_HOUR_TO_JULES for x in building_type_data['features'][idx]['properties'].get('lighting_electrical_demand_kWh', [0])]
|
|
|
|
building.heating_demand[cte.MONTH] = MonthlyValues.get_total_month(building.heating_demand[cte.HOUR])
|
|
|
|
building.cooling_demand[cte.MONTH] = MonthlyValues.get_total_month(building.cooling_demand[cte.HOUR])
|
|
|
|
building.domestic_hot_water_heat_demand[cte.MONTH] = (MonthlyValues.get_total_month(building.domestic_hot_water_heat_demand[cte.HOUR]))
|
|
|
|
building.appliances_electrical_demand[cte.MONTH] = (MonthlyValues.get_total_month(building.appliances_electrical_demand[cte.HOUR]))
|
|
|
|
building.lighting_electrical_demand[cte.MONTH] = (MonthlyValues.get_total_month(building.lighting_electrical_demand[cte.HOUR]))
|
|
|
|
building.heating_demand[cte.YEAR] = [sum(building.heating_demand[cte.MONTH])]
|
|
|
|
building.cooling_demand[cte.YEAR] = [sum(building.cooling_demand[cte.MONTH])]
|
|
|
|
building.domestic_hot_water_heat_demand[cte.YEAR] = [sum(building.domestic_hot_water_heat_demand[cte.MONTH])]
|
|
|
|
building.appliances_electrical_demand[cte.YEAR] = [sum(building.appliances_electrical_demand[cte.MONTH])]
|
|
|
|
building.lighting_electrical_demand[cte.YEAR] = [sum(building.lighting_electrical_demand[cte.MONTH])]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
enrich_buildings_with_geojson_data (building_type_data, city)
|
|
|
|
print('test')
|
2024-08-01 12:46:40 -04:00
|
|
|
|
|
|
|
#%%-----------------------------------------------
|
|
|
|
# """ADD energy systems"""
|
|
|
|
#%% # -----------------------------------------------
|
|
|
|
|
2024-08-01 10:16:52 -04:00
|
|
|
for building in city.buildings:
|
2024-08-08 07:58:25 -04:00
|
|
|
building.energy_systems_archetype_name = 'system 1 electricity'
|
2024-08-01 10:16:52 -04:00
|
|
|
|
|
|
|
EnergySystemsFactory('montreal_custom', city).enrich()
|
2024-08-08 07:58:25 -04:00
|
|
|
|
|
|
|
def baseline_to_dict(building):
|
2024-07-17 18:39:52 -04:00
|
|
|
return {
|
2024-08-08 07:58:25 -04:00
|
|
|
'heating_consumption_kWh': [x / (cte.WATTS_HOUR_TO_JULES * 1000) for x in building.heating_consumption[cte.HOUR]],
|
|
|
|
'cooling_consumption_kWh':[x / (cte.WATTS_HOUR_TO_JULES * 1000) for x in building.cooling_consumption[cte.HOUR]],
|
|
|
|
'domestic_hot_water_consumption_kWh': [x / (cte.WATTS_HOUR_TO_JULES * 1000) for x in building.domestic_hot_water_consumption[cte.HOUR]],
|
2024-08-01 10:16:52 -04:00
|
|
|
'appliances_consumption_kWh':[x / (cte.WATTS_HOUR_TO_JULES * 1000) for x in building.appliances_electrical_demand[cte.HOUR]],
|
|
|
|
'lighting_consumption_kWh': [x / (cte.WATTS_HOUR_TO_JULES * 1000) for x in building.lighting_electrical_demand[cte.HOUR]]
|
2024-07-17 18:39:52 -04:00
|
|
|
}
|
|
|
|
buildings_dic={}
|
2024-08-08 07:58:25 -04:00
|
|
|
for building in city.buildings:
|
|
|
|
buildings_dic[building.name]=baseline_to_dict(building)
|
|
|
|
scenario={}
|
|
|
|
|
|
|
|
scenario['baseline']=buildings_dic
|
|
|
|
print("Scenario 1: Baseline is performed successfully")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
del city
|
|
|
|
del buildings_dic
|
|
|
|
del building_type_data
|
|
|
|
|
|
|
|
#%%-----------------------------------------------
|
|
|
|
# Scenario 2
|
|
|
|
#%% # -----------------------------------------------
|
|
|
|
|
|
|
|
# Create city object from GeoJSON file
|
|
|
|
city = GeometryFactory('geojson',
|
|
|
|
path=geojson_file_path_2024,
|
|
|
|
height_field='maximum_roof_height',
|
|
|
|
year_of_construction_field='year_built',
|
|
|
|
function_field='building_type',
|
|
|
|
function_to_hub=Dictionaries().montreal_function_to_hub_function).city
|
|
|
|
#%%-----------------------------------------------
|
|
|
|
# Enrich city data
|
|
|
|
#%% # -----------------------------------------------
|
|
|
|
ConstructionFactory('nrcan', city).enrich()
|
|
|
|
UsageFactory('nrcan', city).enrich()
|
|
|
|
WeatherFactory('epw', city).enrich()
|
|
|
|
|
|
|
|
enrich_buildings_with_geojson_data (building_type_data_2024, city)
|
|
|
|
|
|
|
|
def to_dict(building,hourly_pv):
|
|
|
|
return {
|
|
|
|
'heating_consumption_kWh': [x / (cte.WATTS_HOUR_TO_JULES * 1000) for x in building.heating_consumption[cte.HOUR]],
|
|
|
|
'cooling_consumption_kWh':[x / (cte.WATTS_HOUR_TO_JULES * 1000) for x in building.cooling_consumption[cte.HOUR]],
|
|
|
|
'domestic_hot_water_consumption_kWh': [x / (cte.WATTS_HOUR_TO_JULES * 1000) for x in building.domestic_hot_water_consumption[cte.HOUR]],
|
|
|
|
'appliances_consumption_kWh':[x / (cte.WATTS_HOUR_TO_JULES * 1000) for x in building.appliances_electrical_demand[cte.HOUR]],
|
|
|
|
'lighting_consumption_kWh': [x / (cte.WATTS_HOUR_TO_JULES * 1000) for x in building.lighting_electrical_demand[cte.HOUR]],
|
|
|
|
'hourly_pv_kWh': [x /(cte.WATTS_HOUR_TO_JULES * 1000) for x in hourly_pv]
|
|
|
|
}
|
|
|
|
buildings_dic={}
|
2024-07-17 18:39:52 -04:00
|
|
|
|
2024-07-22 18:14:54 -04:00
|
|
|
|
2024-07-16 12:47:05 -04:00
|
|
|
for building in city.buildings:
|
2024-08-08 07:58:25 -04:00
|
|
|
building.energy_systems_archetype_name = 'system 1 electricity pv'
|
2024-07-17 18:39:52 -04:00
|
|
|
|
2024-08-08 07:58:25 -04:00
|
|
|
EnergySystemsFactory('montreal_custom', city).enrich()
|
|
|
|
# #%%-----------------------------------------------
|
|
|
|
# # """SRA"""
|
|
|
|
# #%% # -----------------------------------------------
|
|
|
|
ExportsFactory('sra', city, output_path).export()
|
|
|
|
sra_path = (output_path / f'{city.name}_sra.xml').resolve()
|
|
|
|
subprocess.run(['sra', str(sra_path)])
|
|
|
|
ResultFactory('sra', city, output_path).enrich()
|
|
|
|
solar_angles = CitySolarAngles(city.name,
|
|
|
|
city.latitude,
|
|
|
|
city.longitude,
|
|
|
|
tilt_angle=45,
|
|
|
|
surface_azimuth_angle=180).calculate
|
|
|
|
df = pd.DataFrame()
|
|
|
|
df.index = ['yearly lighting (kWh)', 'yearly appliance (kWh)', 'yearly heating (kWh)', 'yearly cooling (kWh)',
|
|
|
|
'yearly dhw (kWh)', 'roof area (m2)', 'used area for pv (m2)', 'number of panels', 'pv production (kWh)']
|
|
|
|
for building in city.buildings:
|
|
|
|
ghi = [x / cte.WATTS_HOUR_TO_JULES for x in building.roofs[0].global_irradiance[cte.HOUR]]
|
|
|
|
pv_sizing_simulation = PVSizingSimulation(building,
|
|
|
|
solar_angles,
|
|
|
|
tilt_angle=45,
|
|
|
|
module_height=1,
|
|
|
|
module_width=2,
|
|
|
|
ghi=ghi)
|
|
|
|
pv_sizing_simulation.pv_output()
|
|
|
|
yearly_lighting = building.lighting_electrical_demand[cte.YEAR][0] / 1000
|
|
|
|
yearly_appliance = building.appliances_electrical_demand[cte.YEAR][0] / 1000
|
|
|
|
yearly_heating = building.heating_demand[cte.YEAR][0] / (3.6e6 * 3)
|
|
|
|
yearly_cooling = building.cooling_demand[cte.YEAR][0] / (3.6e6 * 4.5)
|
|
|
|
yearly_dhw = building.domestic_hot_water_heat_demand[cte.YEAR][0] / 1000
|
|
|
|
roof_area = building.roofs[0].perimeter_area
|
|
|
|
used_roof = pv_sizing_simulation.available_space()
|
|
|
|
number_of_pv_panels = pv_sizing_simulation.total_number_of_panels
|
|
|
|
yearly_pv = building.onsite_electrical_production[cte.YEAR][0] / (3.6e6)
|
|
|
|
hourly_pv = building.onsite_electrical_production[cte.HOUR]
|
|
|
|
df[f'{building.name}'] = [yearly_lighting, yearly_appliance, yearly_heating, yearly_cooling, yearly_dhw, roof_area,
|
|
|
|
used_roof, number_of_pv_panels, yearly_pv]
|
|
|
|
buildings_dic[building.name]=to_dict(building,hourly_pv)
|
2024-07-17 18:39:52 -04:00
|
|
|
|
2024-08-08 07:58:25 -04:00
|
|
|
# %%-----------------------------------------------
|
|
|
|
# """South facing facades"""
|
|
|
|
# %% # -----------------------------------------------
|
|
|
|
# Function to convert radians to degrees
|
|
|
|
import math
|
|
|
|
def radians_to_degrees(radians):
|
|
|
|
return radians * (180 / math.pi)
|
|
|
|
# Step 1: Create the walls_id dictionary
|
|
|
|
walls_id={}
|
2024-07-17 18:39:52 -04:00
|
|
|
|
2024-08-08 07:58:25 -04:00
|
|
|
for building in city.buildings:
|
|
|
|
ids = {}
|
|
|
|
for walls in building.walls:
|
|
|
|
id=walls.id
|
|
|
|
azimuth_degree=radians_to_degrees(float(walls.azimuth))
|
|
|
|
if azimuth_degree>90.0 or azimuth_degree <float(-90.0):
|
|
|
|
ids[id]= {
|
|
|
|
'azimuth': azimuth_degree,
|
|
|
|
'global_irradiance': walls.global_irradiance[cte.HOUR],
|
|
|
|
'area': walls.perimeter_area
|
|
|
|
}
|
|
|
|
walls_id[building.name] = ids
|
2024-07-17 18:39:52 -04:00
|
|
|
|
2024-08-08 07:58:25 -04:00
|
|
|
# Step 2: Calculate pv_on_facade for each wall
|
|
|
|
for building_id, ids in walls_id.items():
|
|
|
|
for wall_id, wall_data in ids.items():
|
|
|
|
if 'global_irradiance' in wall_data:
|
|
|
|
ghi = [x / cte.WATTS_HOUR_TO_JULES/1000 for x in wall_data['global_irradiance']]
|
|
|
|
wall_data['pv_on_facade'] = [x * 0.6 * wall_data['area']*0.22 for x in ghi]
|
2024-07-17 18:39:52 -04:00
|
|
|
|
|
|
|
|
2024-08-01 12:46:40 -04:00
|
|
|
|
2024-08-08 07:58:25 -04:00
|
|
|
|
|
|
|
walls_dic = output_path / 'walls_id.json'
|
|
|
|
with open(walls_dic , 'w') as json_file:
|
|
|
|
json.dump(walls_id, json_file, indent=4)
|
|
|
|
|
|
|
|
import pandas as pd
|
|
|
|
#### EXPORT
|
|
|
|
# Convert walls_id dictionary to a DataFrame
|
|
|
|
# Convert walls_id dictionary to DataFrames for static and hourly data
|
|
|
|
# def convert_walls_id_to_dfs(walls_id):
|
|
|
|
# static_data = {}
|
|
|
|
# hourly_data = {}
|
|
|
|
#
|
|
|
|
# for building_id, ids in walls_id.items():
|
|
|
|
# for wall_id, wall_data in ids.items():
|
|
|
|
# # Static data
|
|
|
|
# static_data[f"{building_id}_{wall_id}_azimuth"] = wall_data.get('azimuth', None)
|
|
|
|
# static_data[f"{building_id}_{wall_id}_area"] = wall_data.get('area', None)
|
|
|
|
#
|
|
|
|
# if 'pv_on_facade' in wall_data:
|
|
|
|
# hourly_data[f"{building_id}_{wall_id}_pv_on_facade"] = wall_data['pv_on_facade']
|
|
|
|
#
|
|
|
|
# # Create DataFrames
|
|
|
|
# static_df = pd.DataFrame([static_data])
|
|
|
|
# hourly_df = pd.DataFrame(hourly_data)
|
|
|
|
#
|
|
|
|
# return static_df, hourly_df
|
|
|
|
|
|
|
|
|
|
|
|
# output_path_walls_id_dic =output_path / 'walls_id_data.xlsx'
|
|
|
|
#
|
|
|
|
# static_df, hourly_df = convert_walls_id_to_dfs(walls_id)
|
|
|
|
# with pd.ExcelWriter(output_path_walls_id_dic) as writer:
|
|
|
|
# static_df.to_excel(writer, sheet_name='Static Data', index=False)
|
|
|
|
# hourly_df.to_excel(writer, sheet_name='Hourly Data', index=False)
|
|
|
|
|
|
|
|
# print(f"Data successfully exported to {output_path}")
|
|
|
|
# # Save the DataFrame to an Excel file
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
df.to_csv(output_path / 'pv.csv')
|
|
|
|
|
|
|
|
scenario['efficient with PV']=buildings_dic
|
|
|
|
print("Scenario 2: efficient with PV run successfully")
|
|
|
|
|
|
|
|
# #%%-----------------------------------------------
|
|
|
|
# # Scenario 3
|
|
|
|
# #%% # -----------------------------------------------
|
|
|
|
#
|
2024-08-01 12:46:40 -04:00
|
|
|
# for building in city.buildings:
|
|
|
|
# building.energy_systems_archetype_name = 'PV+4Pipe+DHW'
|
|
|
|
# EnergySystemsFactory('montreal_future', city).enrich()
|
2024-08-08 07:58:25 -04:00
|
|
|
# buildings_dic = {}
|
2024-08-01 12:46:40 -04:00
|
|
|
# for building in city.buildings:
|
2024-08-08 07:58:25 -04:00
|
|
|
# EnergySystemsSimulationFactory('archetype13', building=building, output_path=simulation_results_path).enrich()
|
|
|
|
# buildings_dic[building.name] = to_dict(building, hourly_pv)
|
|
|
|
# scenario['efficient with PV+4Pipe+DHW']=buildings_dic
|
|
|
|
# print("Scenario 3: efficient with PV+4Pipe+DHW run successfully")
|
|
|
|
#
|
|
|
|
# def extract_HP_size(building):
|
|
|
|
# dic={
|
|
|
|
# # Heat Pump Rated Heating and Cooling Output
|
|
|
|
# 'hp_heat_size': building.energy_systems[1].generation_systems[1].nominal_heat_output/1000,
|
|
|
|
# 'hp_cooling_output': building.energy_systems[1].generation_systems[1].nominal_cooling_output/1000,
|
|
|
|
# # Boiler Rated Heat Output
|
|
|
|
# 'boiler_heat_output': building.energy_systems[1].generation_systems[0].nominal_heat_output/1000,
|
|
|
|
# # TES characteristics
|
|
|
|
# 'tes_volume':building.energy_systems[1].generation_systems[0].energy_storage_systems[0].volume,
|
|
|
|
# 'tes_height':building.energy_systems[1].generation_systems[0].energy_storage_systems[0].height,
|
|
|
|
# # DHW HP
|
|
|
|
# 'dhw_hp_heat_output': building.energy_systems[-1].generation_systems[0].nominal_heat_output/1000,
|
|
|
|
# # DHW TES Characteristics
|
|
|
|
# 'dhw_tes_volume': building.energy_systems[-1].generation_systems[0].energy_storage_systems[0].volume,
|
|
|
|
# 'dhw_tes_height': building.energy_systems[-1].generation_systems[0].energy_storage_systems[0].height,
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
#
|
|
|
|
# return dic
|
|
|
|
# HPs={}
|
|
|
|
# for building in city.buildings:
|
|
|
|
# HPs[building.name]=extract_HP_size(building)
|
|
|
|
|
|
|
|
clusters=pd.read_csv(output_path/'clusters.csv')
|
|
|
|
|
|
|
|
|
|
|
|
# Step 2: Extract the demand data for each building
|
|
|
|
def extract_building_demand(city):
|
|
|
|
building_demand = {}
|
|
|
|
for building in city.buildings:
|
|
|
|
demands = {
|
|
|
|
'heating_demand': [x / (1000 * cte.WATTS_HOUR_TO_JULES) for x in building.heating_demand[cte.HOUR]],
|
|
|
|
'cooling_demand': [x / (1000 * cte.WATTS_HOUR_TO_JULES) for x in building.cooling_demand[cte.HOUR]],
|
|
|
|
'domestic_hot_water_demand': [x / (1000 * cte.WATTS_HOUR_TO_JULES) for x in building.domestic_hot_water_heat_demand[cte.HOUR]],
|
|
|
|
'appliances_electrical_demand': [x / (1000 * cte.WATTS_HOUR_TO_JULES) for x in building.appliances_electrical_demand[cte.HOUR]],
|
|
|
|
'lighting_electrical_demand': [x / (1000 * cte.WATTS_HOUR_TO_JULES) for x in building.lighting_electrical_demand[cte.HOUR]]
|
|
|
|
}
|
|
|
|
building_demand[building.name] = demands
|
|
|
|
return building_demand
|
|
|
|
|
|
|
|
# Step 3: Sum the demand types for each cluster
|
|
|
|
def sum_demands_by_cluster(building_demand, clusters, demand_types):
|
|
|
|
cluster_demands = {cluster: {demand_type: [0] * 8760 for demand_type in demand_types} for cluster in clusters['cluster'].unique()}
|
|
|
|
|
|
|
|
for _, row in clusters.iterrows():
|
|
|
|
building_id = str(row['id'])
|
|
|
|
cluster = row['cluster']
|
|
|
|
if building_id in building_demand:
|
|
|
|
for demand_type in demand_types:
|
|
|
|
cluster_demands[cluster][demand_type] = [sum(x) for x in zip(cluster_demands[cluster][demand_type], building_demand[building_id][demand_type])]
|
|
|
|
|
|
|
|
return cluster_demands
|
|
|
|
|
|
|
|
|
|
|
|
def plot_demands_by_cluster(cluster_demands, demand_types, output_folder):
|
|
|
|
import os
|
|
|
|
os.makedirs(output_folder, exist_ok=True)
|
|
|
|
|
|
|
|
for cluster, demands in cluster_demands.items():
|
|
|
|
plt.figure(figsize=(15, 10))
|
|
|
|
for demand_type in demand_types:
|
|
|
|
plt.plot(demands[demand_type], label=demand_type)
|
|
|
|
|
|
|
|
plt.title(f'Summed Demands for Cluster {cluster}')
|
|
|
|
plt.xlabel('Hour of the Year')
|
|
|
|
plt.ylabel('Demand (kWh)')
|
|
|
|
plt.legend(loc='upper right')
|
|
|
|
plt.grid(True)
|
|
|
|
plt.tight_layout()
|
|
|
|
plt.savefig(os.path.join(output_folder, f'cluster_{cluster}_summed_demands.png'))
|
|
|
|
plt.close()
|
|
|
|
|
|
|
|
|
|
|
|
# Example usage
|
|
|
|
demand_types = [
|
|
|
|
'heating_demand',
|
|
|
|
'cooling_demand',
|
|
|
|
'domestic_hot_water_demand',
|
|
|
|
'appliances_electrical_demand',
|
|
|
|
'lighting_electrical_demand'
|
|
|
|
]
|
|
|
|
|
|
|
|
# Extract the building demand data
|
|
|
|
building_demand = extract_building_demand(city)
|
|
|
|
cluster_demands = sum_demands_by_cluster(building_demand, clusters, demand_types)
|
|
|
|
# Create a DataFrame to export the results
|
|
|
|
cluster_demands_df = {f"{cluster}_{demand_type}": data for cluster, demands in cluster_demands.items() for
|
|
|
|
demand_type, data in demands.items()}
|
|
|
|
cluster_demands_df = pd.DataFrame(cluster_demands_df)
|
|
|
|
|
|
|
|
# Save the results to an Excel file
|
|
|
|
|
|
|
|
cluster_demands_df.to_excel(output_path/'cluster_demands.xlsx', index=False)
|
|
|
|
|
|
|
|
print(f"Clustered demand data successfully exported to {output_path}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#%%-----------------------------------------------
|
|
|
|
# Scenario 4
|
|
|
|
#%% # -----------------------------------------------
|
|
|
|
|
|
|
|
del city
|
|
|
|
del buildings_dic
|
|
|
|
|
|
|
|
|
|
|
|
geojson_file_path_clusters= output_path / 'new.geojson'
|
|
|
|
|
|
|
|
with open(geojson_file_path_clusters , 'r') as f:
|
|
|
|
building_type_data_new = json.load(f)
|
|
|
|
|
|
|
|
# Create city object from GeoJSON file
|
|
|
|
city = GeometryFactory('geojson',
|
|
|
|
path=geojson_file_path_clusters,
|
|
|
|
height_field='maximum_roof_height',
|
|
|
|
year_of_construction_field='year_built',
|
|
|
|
function_field='building_type',
|
|
|
|
function_to_hub=Dictionaries().montreal_function_to_hub_function).city
|
|
|
|
#%%-----------------------------------------------
|
|
|
|
# Enrich city data
|
|
|
|
#%% # -----------------------------------------------
|
|
|
|
ConstructionFactory('nrcan', city).enrich()
|
|
|
|
UsageFactory('nrcan', city).enrich()
|
|
|
|
WeatherFactory('epw', city).enrich()
|
|
|
|
|
|
|
|
buildings_clusters={
|
|
|
|
1651: 4,
|
|
|
|
1662: 0,
|
|
|
|
1667: 1,
|
|
|
|
1674: 2,
|
|
|
|
1688: 3
|
|
|
|
}
|
|
|
|
|
|
|
|
for building_id in buildings_clusters:
|
|
|
|
cluster=buildings_clusters[building_id]
|
|
|
|
for idx, feature in enumerate(building_type_data_new['features']):
|
|
|
|
if feature['properties']['id'] == str(building_id):
|
|
|
|
building_type_data_new['features'][idx]['properties']['heating_demand_kWh']=cluster_demands[cluster]['heating_demand']
|
|
|
|
building_type_data_new['features'][idx]['properties']['cooling_demand_kWh'] = cluster_demands[cluster]['cooling_demand']
|
|
|
|
building_type_data_new['features'][idx]['properties']['domestic_hot_water_heat_demand_kWh'] = cluster_demands[cluster]['domestic_hot_water_demand']
|
|
|
|
building_type_data_new['features'][idx]['properties']['appliances_electrical_demand_kWh'] = cluster_demands[cluster]['appliances_electrical_demand']
|
|
|
|
building_type_data_new['features'][idx]['properties']['lighting_electrical_demand_kWh'] = cluster_demands[cluster]['lighting_electrical_demand']
|
|
|
|
|
|
|
|
enrich_buildings_with_geojson_data (building_type_data_new, city)
|
|
|
|
|
|
|
|
|
|
|
|
for building in city.buildings:
|
|
|
|
building.energy_systems_archetype_name = 'PV+4Pipe+DHW'
|
|
|
|
EnergySystemsFactory('montreal_future', city).enrich()
|
|
|
|
buildings_dic = {}
|
|
|
|
for building in city.buildings:
|
|
|
|
EnergySystemsSimulationFactory('archetype13', building=building, output_path=simulation_results_path).enrich()
|
|
|
|
buildings_dic[building.name] = to_dict(building, hourly_pv)
|
|
|
|
scenario['efficient with PV+4Pipe+DHW']=buildings_dic
|
|
|
|
print("Scenario 4: efficient with PV+4Pipe+DHW run successfully for Clusters")
|
|
|
|
|
|
|
|
def extract_HP_size(building):
|
|
|
|
dic={
|
|
|
|
# Heat Pump Rated Heating and Cooling Output
|
|
|
|
'hp_heat_size': building.energy_systems[1].generation_systems[1].nominal_heat_output/1000,
|
|
|
|
'hp_cooling_output': building.energy_systems[1].generation_systems[1].nominal_cooling_output/1000,
|
|
|
|
# Boiler Rated Heat Output
|
|
|
|
'boiler_heat_output': building.energy_systems[1].generation_systems[0].nominal_heat_output/1000,
|
|
|
|
# TES characteristics
|
|
|
|
'tes_volume':building.energy_systems[1].generation_systems[0].energy_storage_systems[0].volume,
|
|
|
|
'tes_height':building.energy_systems[1].generation_systems[0].energy_storage_systems[0].height,
|
|
|
|
# DHW HP
|
|
|
|
'dhw_hp_heat_output': building.energy_systems[-1].generation_systems[0].nominal_heat_output/1000,
|
|
|
|
# DHW TES Characteristics
|
|
|
|
'dhw_tes_volume': building.energy_systems[-1].generation_systems[0].energy_storage_systems[0].volume,
|
|
|
|
'dhw_tes_height': building.energy_systems[-1].generation_systems[0].energy_storage_systems[0].height,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return dic
|
|
|
|
HPs={}
|
|
|
|
for building in city.buildings:
|
|
|
|
HPs[building.name]=extract_HP_size(building)
|
|
|
|
|
|
|
|
#%%-------------------------------------------------------
|
|
|
|
#""""EXPORTERS"""
|
|
|
|
#%%-------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
# Convert the dictionary to a DataFrame
|
|
|
|
df = pd.DataFrame.from_dict(HPs, orient='index')
|
|
|
|
|
|
|
|
# Save the DataFrame to an Excel file
|
|
|
|
output_path_HPs =output_path/ 'HPs_data_sc4.xlsx'
|
|
|
|
df.to_excel(output_path_HPs, index_label='building_id')
|
|
|
|
|
|
|
|
print(f"Data successfully exported to {output_path}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#%%-------------------------------------------------------
|
|
|
|
#""""EXPORTERS"""
|
|
|
|
#%%-------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
# Convert the dictionary to a DataFrame
|
|
|
|
df = pd.DataFrame.from_dict(HPs, orient='index')
|
|
|
|
|
|
|
|
# Save the DataFrame to an Excel file
|
|
|
|
output_path_HPs =output_path/ 'HPs_data.xlsx'
|
|
|
|
df.to_excel(output_path_HPs, index_label='building_id')
|
|
|
|
|
|
|
|
print(f"Data successfully exported to {output_path}")
|
|
|
|
|
|
|
|
import pandas as pd
|
|
|
|
|
|
|
|
districts_demands={}
|
|
|
|
def extract_and_sum_demand_data(scenario, demand_types):
|
|
|
|
|
|
|
|
|
|
|
|
# Conversion factor constant
|
|
|
|
conversion_factor = 1 / (cte.WATTS_HOUR_TO_JULES * 1000)
|
|
|
|
|
|
|
|
# Loop through each scenario
|
|
|
|
for scenario_key, buildings in scenario.items():
|
|
|
|
# Loop through each building in the scenario
|
|
|
|
# Initialize an empty dictionary to store the district demand sums
|
|
|
|
district_demand = {demand_type: [0] * 8760 for demand_type in demand_types}
|
|
|
|
district_demand['hourly_pv_kWh']= [0] * 8760
|
|
|
|
for building_id, building_data in buildings.items():
|
|
|
|
# Loop through each demand type and sum up the data
|
|
|
|
for demand_type in demand_types:
|
|
|
|
if demand_type in building_data:
|
|
|
|
district_demand[demand_type] = [sum(x) for x in zip(district_demand[demand_type], building_data[demand_type])]
|
|
|
|
|
|
|
|
# If PV data is available and relevant
|
|
|
|
if scenario_key == "efficient with PV":
|
|
|
|
district_demand['hourly_pv_kWh'] = [sum(x) for x in zip(district_demand['hourly_pv_kWh'], building_data['hourly_pv_kWh'])]
|
|
|
|
if scenario_key == 'efficient with PV+4Pipe+DHW':
|
|
|
|
district_demand['hourly_pv_kWh'] = districts_demands["efficient with PV"]['hourly_pv_kWh']
|
|
|
|
districts_demands[scenario_key]=district_demand
|
|
|
|
|
|
|
|
return districts_demands
|
|
|
|
|
|
|
|
# Example usage
|
|
|
|
# Assuming 'scenario' is a dictionary with the required structure and 'cte' is defined somewhere with WATTS_HOUR_TO_JULES constant
|
|
|
|
demand_types = [
|
|
|
|
'heating_consumption_kWh',
|
|
|
|
'cooling_consumption_kWh',
|
|
|
|
'domestic_hot_water_consumption_kWh',
|
|
|
|
'appliances_consumption_kWh',
|
|
|
|
'lighting_consumption_kWh',
|
|
|
|
# 'hourly_pv_kWh' # Include this only if you want to consider PV data
|
|
|
|
]
|
|
|
|
|
|
|
|
# # Call the function with your scenario data
|
|
|
|
district_demand = extract_and_sum_demand_data(scenario, demand_types)
|
|
|
|
#
|
|
|
|
# """"EXPORTERS"""
|
|
|
|
# import pandas as pd
|
|
|
|
#
|
|
|
|
#
|
|
|
|
# Export the DataFrame to an Excel file
|
|
|
|
excel_file_path = r'C:\Users\a_gabald\PycharmProjects\summer_course_2024\out_files\districts_balance.xlsx'
|
|
|
|
# df.to_excel(excel_file_path, index=True, index_label='Building')
|
|
|
|
|
|
|
|
# Create an Excel writer object
|
|
|
|
with pd.ExcelWriter(excel_file_path, engine='xlsxwriter') as writer:
|
|
|
|
for scenarios,demands in district_demand.items():
|
|
|
|
# Convert demands to a DataFrame
|
|
|
|
df_demands = pd.DataFrame(demands)
|
|
|
|
# Convert building_id to string and check its length
|
|
|
|
sheet_name = str(scenarios)
|
|
|
|
if len(sheet_name) > 31:
|
|
|
|
sheet_name = sheet_name[:31] # Truncate to 31 characters if necessary
|
|
|
|
# Write the DataFrame to a specific sheet named after the building_id
|
|
|
|
df_demands.to_excel(writer, sheet_name=sheet_name, index=False)
|
|
|
|
|
|
|
|
|
|
|
|
print("district balance data is exported successfully")
|
|
|
|
|
|
|
|
|
|
|
|
import pandas as pd
|
|
|
|
|
|
|
|
# Assuming your scenario dictionary is already defined as follows:
|
|
|
|
# scenario = {
|
|
|
|
# 'baseline': { ... },
|
|
|
|
# 'efficient with PV': { ... }
|
|
|
|
# }
|
|
|
|
|
|
|
|
|
|
|
|
def dict_to_df_col_wise(building_data):
|
|
|
|
"""
|
|
|
|
Converts a dictionary of building data to a DataFrame.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
building_data (dict): Dictionary containing building data where keys are building ids and values are dictionaries
|
|
|
|
with hourly data for various demand types.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
pd.DataFrame: DataFrame with columns for each building and demand type.
|
|
|
|
"""
|
|
|
|
# Create a dictionary to hold DataFrames for each demand type
|
|
|
|
df_dict= {}
|
|
|
|
|
|
|
|
# Loop over each building
|
|
|
|
for building_id, data in building_data.items():
|
|
|
|
# Create a DataFrame for this building's data
|
|
|
|
building_df = pd.DataFrame(data)
|
|
|
|
|
|
|
|
# Rename columns to include building_id
|
|
|
|
building_df.columns = [f"{building_id}_{col}" for col in building_df.columns]
|
|
|
|
|
|
|
|
# Add this DataFrame to the dictionary
|
|
|
|
df_dict[building_id] = building_df
|
|
|
|
|
|
|
|
# Concatenate all building DataFrames column-wise
|
|
|
|
result_df = pd.concat(df_dict.values(), axis=1)
|
|
|
|
|
|
|
|
return result_df
|
|
|
|
|
|
|
|
# Create DataFrames for each scenario
|
|
|
|
baseline_df = dict_to_df_col_wise(scenario['baseline'])
|
|
|
|
efficient_with_pv_df = dict_to_df_col_wise(scenario['efficient with PV'])
|
|
|
|
efficient_with_pv_hps = dict_to_df_col_wise(scenario['efficient with PV+4Pipe+DHW'])
|
|
|
|
|
|
|
|
|
|
|
|
# Write the DataFrames to an Excel file with two separate sheets
|
|
|
|
with pd.ExcelWriter(r'C:\Users\a_gabald\PycharmProjects\summer_course_2024\out_files\scenario_data.xlsx') as writer:
|
|
|
|
baseline_df.to_excel(writer, sheet_name='baseline', index=True)
|
|
|
|
efficient_with_pv_df.to_excel(writer, sheet_name='efficient with PV', index=True)
|
|
|
|
efficient_with_pv_hps.to_excel(writer, sheet_name='efficient with HPs_2', index=True)
|
|
|
|
|
|
|
|
print("hourly data has been successfully exported per building to scenario_data.xlsx")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import pandas as pd
|
|
|
|
import matplotlib.pyplot as plt
|
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
|
|
|
|
def convert_hourly_to_monthly(hourly_data):
|
|
|
|
"""
|
|
|
|
Converts hourly data to monthly data by summing up the values for each month.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
hourly_data (list): List of hourly data (length 8760).
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
list: List of monthly data (length 12).
|
|
|
|
"""
|
|
|
|
hourly_series = pd.Series(hourly_data, index=pd.date_range(start='1/1/2023', periods=8760, freq='H'))
|
|
|
|
monthly_data = hourly_series.resample('M').sum()
|
|
|
|
return monthly_data.tolist()
|
|
|
|
|
|
|
|
import os
|
|
|
|
def plot_stacked_demands_vs_pv(district_demand, demand_types, output_path, pv_type='hourly_pv_kWh'):
|
|
|
|
"""
|
|
|
|
Plots the stacked monthly demand for each scenario and compares it to the PV data.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
district_demand (dict): Dictionary with scenario keys and demand data.
|
|
|
|
demand_types (list): List of demand types to plot.
|
|
|
|
output_path (str): Path to save the plots.
|
|
|
|
pv_type (str): The PV data type to compare against.
|
|
|
|
"""
|
|
|
|
os.makedirs(output_path, exist_ok=True)
|
|
|
|
|
|
|
|
for scenario_key, demand_data in district_demand.items():
|
|
|
|
# Convert hourly data to monthly data for each demand type
|
|
|
|
monthly_data = {demand_type: convert_hourly_to_monthly(demand_data[demand_type]) for demand_type in
|
|
|
|
demand_types}
|
|
|
|
monthly_pv = convert_hourly_to_monthly(demand_data.get(pv_type, [0] * 8760))
|
|
|
|
|
|
|
|
# Create a DataFrame for easier plotting
|
|
|
|
combined_data = pd.DataFrame(monthly_data)
|
|
|
|
combined_data['Month'] = range(1, 13)
|
|
|
|
combined_data['PV'] = monthly_pv
|
|
|
|
|
|
|
|
# Plotting
|
|
|
|
fig, ax1 = plt.subplots(figsize=(14, 8))
|
|
|
|
|
|
|
|
# Plot stacked demands
|
|
|
|
combined_data.set_index('Month', inplace=True)
|
|
|
|
combined_data[demand_types].plot(kind='bar', stacked=True, ax=ax1, colormap='tab20')
|
|
|
|
|
|
|
|
ax1.set_xlabel('Month')
|
|
|
|
ax1.set_ylabel('Energy Demand (kWh)')
|
|
|
|
ax1.set_title(f'Monthly Energy Demand and PV Generation for {scenario_key}')
|
|
|
|
|
|
|
|
# Plot PV data on the secondary y-axis
|
|
|
|
ax2 = ax1.twinx()
|
|
|
|
ax2.plot(combined_data.index, combined_data['PV'], color='black', linestyle='-', marker='o',
|
|
|
|
label='PV Generation')
|
|
|
|
ax2.set_ylabel('PV Generation (kWh)')
|
|
|
|
|
|
|
|
# Add legends
|
|
|
|
ax1.legend(loc='upper left')
|
|
|
|
ax2.legend(loc='upper right')
|
|
|
|
|
|
|
|
ax1.set_xticks(combined_data.index)
|
|
|
|
ax1.set_xticklabels(['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
|
|
|
|
|
|
|
|
# Save the plot
|
|
|
|
plt.savefig(os.path.join(output_path, f'{scenario_key}_monthly_demand_vs_pv.png'))
|
|
|
|
plt.close()
|
|
|
|
|
|
|
|
|
|
|
|
# Example usage
|
|
|
|
# district_demand = extract_and_sum_demand_data(scenario, demand_types)
|
|
|
|
|
|
|
|
# Specify the demand types and PV type
|
|
|
|
demand_types = [
|
|
|
|
'heating_consumption_kWh',
|
|
|
|
'cooling_consumption_kWh',
|
|
|
|
'domestic_hot_water_consumption_kWh',
|
|
|
|
'appliances_consumption_kWh',
|
|
|
|
'lighting_consumption_kWh'
|
|
|
|
]
|
|
|
|
|
|
|
|
# Plot the data
|
|
|
|
plot_stacked_demands_vs_pv(district_demand, demand_types, output_path)
|
|
|
|
# Plot the data
|
|
|
|
print('test')
|
|
|
|
import csv
|