267 lines
11 KiB
Python
267 lines
11 KiB
Python
import json
|
|
import random
|
|
import datetime
|
|
from pathlib import Path
|
|
import pandas as pd
|
|
import platform
|
|
import os
|
|
|
|
from hub.imports.geometry_factory import GeometryFactory
|
|
from hub.imports.weather_factory import WeatherFactory
|
|
from hub.imports.construction_factory import ConstructionFactory
|
|
from hub.imports.usage_factory import UsageFactory
|
|
from hub.imports.results_factory import ResultFactory
|
|
from hub.exports.energy_building_exports_factory import EnergyBuildingsExportsFactory
|
|
from hub.exports.exports_factory import ExportsFactory
|
|
from hub.helpers.data.montreal_function_to_hub_function import MontrealFunctionToHubFunction
|
|
|
|
from sra import Sra
|
|
from meb import Meb
|
|
from meb_results import Results as MEBResults
|
|
|
|
class EnergyValidation:
|
|
def __init__(self):
|
|
self.weather_file = Path('./data/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw').resolve()
|
|
self.climate_file_name = 'Montreal'
|
|
self.tmp_folder = Path('./tmp').resolve()
|
|
self.storage_path = Path('./storage').resolve()
|
|
self.climate_file = Path(f'{self.storage_path}/{self.climate_file_name}.cli').resolve()
|
|
self.meb_folder = Path('./results/meb').resolve()
|
|
self.ep_folder = Path('./results/ep').resolve()
|
|
self.result_file = Path('./results/energy_validation_results.xlsx').resolve()
|
|
|
|
if platform.system() == 'Windows':
|
|
self.encoding = 'windows-1252'
|
|
else:
|
|
self.encoding = 'utf-8'
|
|
|
|
def _sort_buildings(self, buildings_to_simulate):
|
|
sorted_buildings = {}
|
|
for building in buildings_to_simulate:
|
|
code_utili = building['properties']['CODE_UTILI']
|
|
if not sorted_buildings.get(code_utili):
|
|
sorted_buildings[code_utili] = []
|
|
sorted_buildings[code_utili].append(building)
|
|
return sorted_buildings
|
|
|
|
def _save_meb_results(self, demand, metadata, building_area):
|
|
results = []
|
|
building_name = metadata.iloc[0,0].split(': ')[1]
|
|
|
|
# start by formatting the meb results
|
|
# convert from Wh to kWh/m^2
|
|
demand *= 0.001/building_area
|
|
|
|
# replace indexes with month/day/year format
|
|
months = {'month':
|
|
['1/1/2023',
|
|
'2/1/2023',
|
|
'3/1/2023',
|
|
'4/1/2023',
|
|
'5/1/2023',
|
|
'6/1/2023',
|
|
'7/1/2023',
|
|
'8/1/2023',
|
|
'9/1/2023',
|
|
'10/1/2023',
|
|
'11/1/2023',
|
|
'12/1/2023'
|
|
]
|
|
}
|
|
demand.iloc[:, 0] = pd.DataFrame(months)
|
|
|
|
# insert building_name to first column
|
|
demand.insert(0, 'building_name', building_name)
|
|
|
|
# swap lighting and appliances columns
|
|
demand[f'{building_name} lighting electrical demand Wh'], \
|
|
demand[f'{building_name} appliances electrical demand Wh'] = \
|
|
demand[f'{building_name} appliances electrical demand Wh'], \
|
|
demand[f'{building_name} lighting electrical demand Wh']
|
|
|
|
# insert simulation source to last column
|
|
demand['source'] = 'meb'
|
|
|
|
# format building metadata
|
|
'''
|
|
metadata format:
|
|
building_id
|
|
number_of_storeys
|
|
m2_per_storey
|
|
total_m2
|
|
total_m3
|
|
year_of_construction
|
|
building_usage
|
|
TODO: number_of_adjacent_walls
|
|
'''
|
|
formatted_metadata = pd.DataFrame({
|
|
'metadata':
|
|
[
|
|
metadata.iloc[0, 0].split(': ')[1],
|
|
metadata.iloc[4, 0].split(': ')[1],
|
|
metadata.iloc[3, 0].split(': ')[1],
|
|
building_area,
|
|
metadata.iloc[6, 0].split(': ')[1],
|
|
metadata.iloc[1, 0].split(': ')[1],
|
|
metadata.iloc[2, 0].split(': ')[1]
|
|
]}).transpose()
|
|
|
|
# last, but not least, append our lovely reformatted data to the results spreadsheet
|
|
with pd.ExcelWriter(self.result_file, engine='openpyxl', if_sheet_exists='overlay', mode='a') as writer:
|
|
demand.to_excel(
|
|
writer,
|
|
startrow=writer.sheets['Simulation data'].max_row,
|
|
sheet_name='Simulation data',
|
|
index=False,
|
|
header=False,
|
|
)
|
|
|
|
formatted_metadata.to_excel(
|
|
writer,
|
|
startrow=writer.sheets['Metadata'].max_row,
|
|
sheet_name='Metadata',
|
|
index=False,
|
|
header=False,
|
|
)
|
|
def _save_ep_results(self, demand, building_area, building_name):
|
|
demand.drop('Date/Time', axis=1, inplace=True)
|
|
# convert from J to kWh/m^2
|
|
demand *= 2.77778e-7/building_area
|
|
|
|
# replace indexes with month/day/year format
|
|
months = [
|
|
'1/1/2023',
|
|
'2/1/2023',
|
|
'3/1/2023',
|
|
'4/1/2023',
|
|
'5/1/2023',
|
|
'6/1/2023',
|
|
'7/1/2023',
|
|
'8/1/2023',
|
|
'9/1/2023',
|
|
'10/1/2023',
|
|
'11/1/2023',
|
|
'12/1/2023'
|
|
]
|
|
|
|
demand.insert(0, 'month', months)
|
|
|
|
# insert building_name to first column
|
|
demand.insert(0, 'building_name', building_name)
|
|
|
|
# TODO: add water usage once working from ep
|
|
demand['water_usage'] = 'NA'
|
|
|
|
# add simulation source as ep
|
|
demand['source'] = 'ep'
|
|
|
|
# last, but not least, append our lovely reformatted data to the results spreadsheet
|
|
with pd.ExcelWriter(self.result_file, engine='openpyxl', if_sheet_exists='overlay', mode='a') as writer:
|
|
demand.to_excel(
|
|
writer,
|
|
startrow=writer.sheets['Simulation data'].max_row,
|
|
sheet_name='Simulation data',
|
|
index=False,
|
|
header=False,
|
|
)
|
|
|
|
def run(self, building_set, building_quantities, cleanup=True):
|
|
sorted_buildings = self._sort_buildings(building_set)
|
|
min_m2_satisfied = False
|
|
|
|
for code_utili in building_quantities:
|
|
if code_utili not in sorted_buildings:
|
|
print(f'CODE_UTILI:{code_utili} is not found in the provided dataset.')
|
|
else:
|
|
for building in range(building_quantities[code_utili]):
|
|
building_to_simulate = []
|
|
min_m2_satisfied = False
|
|
|
|
# only select buildings with an area of 500 m^2 or more
|
|
while not min_m2_satisfied:
|
|
building_to_simulate.append(sorted_buildings[code_utili][random.randrange(
|
|
len(sorted_buildings[code_utili]))])
|
|
if building_to_simulate[0]['properties']['bldgarea'] < 500:
|
|
building_to_simulate.clear()
|
|
else:
|
|
min_m2_satisfied = True
|
|
|
|
building_id = building_to_simulate[0]['id']
|
|
|
|
geojson = {
|
|
"type": "FeatureCollection",
|
|
"features": building_to_simulate
|
|
}
|
|
|
|
geojson_file = open(f'tmp/{building_id}_energy_validation.geojson', 'w')
|
|
geojson_file.write(json.dumps(geojson, indent=2))
|
|
geojson_file.close()
|
|
|
|
# run enrichment factories
|
|
city = GeometryFactory('geojson',
|
|
path=f'tmp/{building_id}_energy_validation.geojson',
|
|
height_field='building_height',
|
|
year_of_construction_field='ANNEE_CONS',
|
|
function_field='CODE_UTILI',
|
|
function_to_hub=MontrealFunctionToHubFunction().dictionary).city
|
|
WeatherFactory('epw', city, file_name='./CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw').enrich()
|
|
ConstructionFactory('nrcan', city).enrich()
|
|
UsageFactory('nrcan', city).enrich()
|
|
|
|
if city.climate_reference_city is None:
|
|
city.name = f'{building_id}_energy_validation'
|
|
city.climate_reference_city = city.location
|
|
self.climate_file_name = city.location
|
|
city.climate_file = self.climate_file
|
|
city.name = f'{building_id}_energy_validation'
|
|
|
|
# starting sra
|
|
print(f'{building_id} starting sra')
|
|
ExportsFactory('sra', city, self.tmp_folder,
|
|
weather_file=self.weather_file, weather_format='epw').export()
|
|
sra_file = (self.tmp_folder / f'{city.name}_sra.xml').resolve()
|
|
sra_start = datetime.datetime.now()
|
|
Sra(sra_file, self.tmp_folder).run()
|
|
sra_end = datetime.datetime.now() - sra_start
|
|
ResultFactory('sra', city, self.tmp_folder).enrich()
|
|
|
|
# run meb
|
|
print(f'{building_id} starting meb')
|
|
for building in city.buildings:
|
|
building.attic_heated = 0
|
|
building.basement_heated = 1
|
|
|
|
EnergyBuildingsExportsFactory('insel_monthly_energy_balance', city, self.tmp_folder).export()
|
|
meb_start = datetime.datetime.now()
|
|
Meb(self.tmp_folder).run()
|
|
meb_end = datetime.datetime.now() - meb_start
|
|
ResultFactory('insel_meb', city, self.tmp_folder).enrich()
|
|
results = MEBResults(city, Path('./results/meb/').resolve())
|
|
results.print()
|
|
|
|
# save meb results to energy_validation_results
|
|
total_m2 = city.buildings[0].internal_zones[0].thermal_zones[0].total_floor_area
|
|
meb_results = pd.read_csv(Path(f'{self.meb_folder}/demand.csv').resolve(),
|
|
encoding=self.encoding)
|
|
meb_metadata = pd.read_csv(Path(f'{self.meb_folder}/metadata.csv').resolve(),
|
|
encoding=self.encoding)
|
|
self._save_meb_results(meb_results, meb_metadata, total_m2)
|
|
|
|
# run energyplus
|
|
print(f'{building_id} starting energy plus')
|
|
idf_file = EnergyBuildingsExportsFactory('idf', city, self.ep_folder).export_debug()
|
|
ep_start = datetime.datetime.now()
|
|
idf_file.run()
|
|
ep_end = datetime.datetime.now() - ep_start
|
|
|
|
# save ep results to energy_validation_results
|
|
ep_results = pd.read_csv(Path(f'{self.ep_folder}/{building_id}_energy_validation_mtr.csv').resolve(),
|
|
encoding=self.encoding)
|
|
self._save_ep_results(ep_results, total_m2, city.buildings[0].name)
|
|
|
|
print(f"{building_id} sra time: {sra_end}")
|
|
print(f"{building_id} meb time: {meb_end}")
|
|
print(f"{building_id} ep time: {ep_end}")
|
|
|
|
if cleanup is True:
|
|
[os.remove(os.path.join(self.tmp_folder, file)) for file in os.listdir(self.tmp_folder)] |