Compare commits
1 Commits
main
...
feature/fr
Author | SHA1 | Date | |
---|---|---|---|
9b5d002444 |
88
data/test.geojson
Normal file
88
data/test.geojson
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
{
|
||||||
|
"type": "FeatureCollection",
|
||||||
|
"features": [
|
||||||
|
{
|
||||||
|
"type": "Feature",
|
||||||
|
"geometry": {
|
||||||
|
"type": "Polygon",
|
||||||
|
"coordinates": [
|
||||||
|
[
|
||||||
|
[
|
||||||
|
-81.36226736610345,
|
||||||
|
42.96538327741903
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.36226606408695,
|
||||||
|
42.96539751865368
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.36169147743192,
|
||||||
|
42.96539971389088
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.36169145437769,
|
||||||
|
42.96539577872095
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.36160750096705,
|
||||||
|
42.965396296109915
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.36167659355637,
|
||||||
|
42.96554865177412
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.36143594626003,
|
||||||
|
42.96560572998295
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.3612605719971,
|
||||||
|
42.9652212961923
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.36153890966588,
|
||||||
|
42.96521432093572
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.36172317324467,
|
||||||
|
42.9651651651121
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.3617438333442,
|
||||||
|
42.965204767431615
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.36220126106349,
|
||||||
|
42.96509245874135
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.36231896772036,
|
||||||
|
42.96532755243104
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.36230203685074,
|
||||||
|
42.96533218062799
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.36232060244981,
|
||||||
|
42.9653700064552
|
||||||
|
],
|
||||||
|
[
|
||||||
|
-81.36226736610345,
|
||||||
|
42.96538327741903
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"id": 1,
|
||||||
|
"properties": {
|
||||||
|
"name": "Shifton Head Office",
|
||||||
|
"address": "N/A",
|
||||||
|
"function": "1000",
|
||||||
|
"height": 14,
|
||||||
|
"year_of_construction": 2017,
|
||||||
|
"adjacency": "attached"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
1
data/test_updated.geojson
Normal file
1
data/test_updated.geojson
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"type": "FeatureCollection", "features": [{"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[[-81.36226736610345, 42.96538327741903], [-81.36232060244981, 42.9653700064552], [-81.36230203685074, 42.96533218062799], [-81.36231896772036, 42.96532755243104], [-81.36220126106349, 42.96509245874135], [-81.3617438333442, 42.965204767431615], [-81.36172317324467, 42.9651651651121], [-81.36153890966588, 42.96521432093572], [-81.3612605719971, 42.9652212961923], [-81.36143594626003, 42.96560572998295], [-81.36167659355637, 42.96554865177412], [-81.36160750096705, 42.965396296109915], [-81.36169145437769, 42.96539577872095], [-81.36169147743192, 42.96539971389088], [-81.36226606408695, 42.96539751865368], [-81.36226736610345, 42.96538327741903]]]}, "id": 1, "properties": {"name": "Shifton Head Office", "address": "N/A", "function": "1000", "height": 14, "year_of_construction": 2017, "adjacency": "attached"}}]}
|
@ -10,6 +10,7 @@ import copy
|
|||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
import hub.exports.building_energy.idf_helper as idf_cte
|
import hub.exports.building_energy.idf_helper as idf_cte
|
||||||
import hub.helpers.constants as cte
|
import hub.helpers.constants as cte
|
||||||
@ -47,6 +48,7 @@ class CercIdf(IdfBase):
|
|||||||
_thermostat_added_to_idf = {}
|
_thermostat_added_to_idf = {}
|
||||||
|
|
||||||
def __init__(self, city, output_path, idf_file_path, idd_file_path, epw_file_path, target_buildings=None):
|
def __init__(self, city, output_path, idf_file_path, idd_file_path, epw_file_path, target_buildings=None):
|
||||||
|
self._start = datetime.now()
|
||||||
super().__init__(city, output_path, idf_file_path, idd_file_path, epw_file_path, target_buildings)
|
super().__init__(city, output_path, idf_file_path, idd_file_path, epw_file_path, target_buildings)
|
||||||
self._add_surfaces = IdfSurfaces.add
|
self._add_surfaces = IdfSurfaces.add
|
||||||
self._add_file_schedule = IdfFileSchedule.add
|
self._add_file_schedule = IdfFileSchedule.add
|
||||||
@ -231,6 +233,7 @@ class CercIdf(IdfBase):
|
|||||||
# Merge files
|
# Merge files
|
||||||
self._merge_files()
|
self._merge_files()
|
||||||
self._add_outputs()
|
self._add_outputs()
|
||||||
|
print(f'{len(self._city.buildings)} buildings export completed in: {datetime.now() - self._start}')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _energy_plus(self):
|
def _energy_plus(self):
|
||||||
@ -245,4 +248,5 @@ class CercIdf(IdfBase):
|
|||||||
'--readvars',
|
'--readvars',
|
||||||
'--output-prefix', f'{self._city.name}_',
|
'--output-prefix', f'{self._city.name}_',
|
||||||
self._output_file_path]
|
self._output_file_path]
|
||||||
|
print(cmd)
|
||||||
subprocess.run(cmd, cwd=self._output_path)
|
subprocess.run(cmd, cwd=self._output_path)
|
||||||
|
@ -169,7 +169,7 @@ class EnergyAde:
|
|||||||
def _building_geometry(self, building, building_dic, city):
|
def _building_geometry(self, building, building_dic, city):
|
||||||
|
|
||||||
building_dic['bldg:Building']['bldg:function'] = building.function
|
building_dic['bldg:Building']['bldg:function'] = building.function
|
||||||
building_dic['bldg:Building']['bldg:usage'] = building.usages
|
building_dic['bldg:Building']['bldg:usage'] = building.usages_percentage
|
||||||
building_dic['bldg:Building']['bldg:yearOfConstruction'] = building.year_of_construction
|
building_dic['bldg:Building']['bldg:yearOfConstruction'] = building.year_of_construction
|
||||||
building_dic['bldg:Building']['bldg:roofType'] = building.roof_type
|
building_dic['bldg:Building']['bldg:roofType'] = building.roof_type
|
||||||
building_dic['bldg:Building']['bldg:measuredHeight'] = {
|
building_dic['bldg:Building']['bldg:measuredHeight'] = {
|
||||||
|
@ -8,12 +8,10 @@ Code contributors: Pilar Monsalvete Alvarez de Uribarri pilar.monsalvete@concord
|
|||||||
"""
|
"""
|
||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import shutil
|
import glob
|
||||||
import subprocess
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from geomeppy import IDF
|
from geomeppy import IDF
|
||||||
|
|
||||||
import hub.helpers.constants as cte
|
import hub.helpers.constants as cte
|
||||||
from hub.city_model_structure.attributes.schedule import Schedule
|
from hub.city_model_structure.attributes.schedule import Schedule
|
||||||
from hub.city_model_structure.building_demand.thermal_zone import ThermalZone
|
from hub.city_model_structure.building_demand.thermal_zone import ThermalZone
|
||||||
@ -531,7 +529,6 @@ class Idf:
|
|||||||
self._remove_sizing_periods()
|
self._remove_sizing_periods()
|
||||||
self._rename_building(self._city.name)
|
self._rename_building(self._city.name)
|
||||||
self._lod = self._city.level_of_detail.geometry
|
self._lod = self._city.level_of_detail.geometry
|
||||||
is_target = False
|
|
||||||
for building in self._city.buildings:
|
for building in self._city.buildings:
|
||||||
is_target = building.name in self._target_buildings or building.name in self._adjacent_buildings
|
is_target = building.name in self._target_buildings or building.name in self._adjacent_buildings
|
||||||
for internal_zone in building.internal_zones:
|
for internal_zone in building.internal_zones:
|
||||||
@ -659,20 +656,12 @@ class Idf:
|
|||||||
self._add_surfaces(building, building.name)
|
self._add_surfaces(building, building.name)
|
||||||
return self._idf
|
return self._idf
|
||||||
|
|
||||||
@property
|
|
||||||
def _energy_plus(self):
|
|
||||||
return shutil.which('energyplus')
|
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
cmd = [self._energy_plus,
|
"""
|
||||||
'--weather', self._epw_file_path,
|
Start the energy plus simulation
|
||||||
'--output-directory', self._output_path,
|
"""
|
||||||
'--idd', self._idd_file_path,
|
self._idf.run(expandobjects=False, readvars=True, output_directory=self._output_path,
|
||||||
'--expandobjects',
|
output_prefix=f'{self._city.name}_')
|
||||||
'--readvars',
|
|
||||||
'--output-prefix', f'{self._city.name}_',
|
|
||||||
self._idf_file_path]
|
|
||||||
subprocess.run(cmd, cwd=self._output_path)
|
|
||||||
|
|
||||||
def _add_block(self, building):
|
def _add_block(self, building):
|
||||||
_points = self._matrix_to_2d_list(building.foot_print.coordinates)
|
_points = self._matrix_to_2d_list(building.foot_print.coordinates)
|
||||||
|
@ -77,8 +77,8 @@ class CesiumjsTileset:
|
|||||||
'function': {
|
'function': {
|
||||||
'type': 'STRING'
|
'type': 'STRING'
|
||||||
},
|
},
|
||||||
'usages': {
|
'usages_percentage': {
|
||||||
'type': 'LIST'
|
'type': 'STRING'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -146,7 +146,7 @@ class CesiumjsTileset:
|
|||||||
'max_height': building.max_height,
|
'max_height': building.max_height,
|
||||||
'year_of_construction': building.year_of_construction,
|
'year_of_construction': building.year_of_construction,
|
||||||
'function': building.function,
|
'function': building.function,
|
||||||
'usages': building.usages
|
'usages_percentage': building.usages
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'content': {
|
'content': {
|
||||||
|
@ -58,7 +58,8 @@ class ConstructionHelper:
|
|||||||
'Boucherville': '6',
|
'Boucherville': '6',
|
||||||
'Mascouche': '6',
|
'Mascouche': '6',
|
||||||
'Saint-Leonard': '6',
|
'Saint-Leonard': '6',
|
||||||
'La Prairie': '6'
|
'La Prairie': '6',
|
||||||
|
'London': '6'
|
||||||
}
|
}
|
||||||
|
|
||||||
_reference_city_to_israel_climate_zone = {
|
_reference_city_to_israel_climate_zone = {
|
||||||
|
@ -20,7 +20,7 @@ class EnergyPlus:
|
|||||||
header_parts = header.split(':')
|
header_parts = header.split(':')
|
||||||
building_name = header_parts[0]
|
building_name = header_parts[0]
|
||||||
variable = ':'.join(header_parts[1:]).strip() # concat the rest and ensure that : it's reintroduced just in case
|
variable = ':'.join(header_parts[1:]).strip() # concat the rest and ensure that : it's reintroduced just in case
|
||||||
if variable == '':
|
if variable is '':
|
||||||
continue
|
continue
|
||||||
if building_name not in self._summary_variables:
|
if building_name not in self._summary_variables:
|
||||||
self._building_energy_demands[variable] = [] # initialize the list of variables
|
self._building_energy_demands[variable] = [] # initialize the list of variables
|
||||||
|
@ -9,7 +9,6 @@ import datetime
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from sqlalchemy import Column, Integer, String, Sequence, ForeignKey, Float
|
from sqlalchemy import Column, Integer, String, Sequence, ForeignKey, Float
|
||||||
from sqlalchemy.dialects.postgresql import JSON
|
|
||||||
from sqlalchemy import DateTime
|
from sqlalchemy import DateTime
|
||||||
|
|
||||||
from hub.city_model_structure.building import Building
|
from hub.city_model_structure.building import Building
|
||||||
@ -28,7 +27,7 @@ class CityObject(Models):
|
|||||||
type = Column(String, nullable=False)
|
type = Column(String, nullable=False)
|
||||||
year_of_construction = Column(Integer, nullable=True)
|
year_of_construction = Column(Integer, nullable=True)
|
||||||
function = Column(String, nullable=True)
|
function = Column(String, nullable=True)
|
||||||
usage = Column(JSON, nullable=True)
|
usage = Column(String, nullable=True)
|
||||||
volume = Column(Float, nullable=False)
|
volume = Column(Float, nullable=False)
|
||||||
area = Column(Float, nullable=False)
|
area = Column(Float, nullable=False)
|
||||||
total_heating_area = Column(Float, nullable=False)
|
total_heating_area = Column(Float, nullable=False)
|
||||||
@ -47,7 +46,7 @@ class CityObject(Models):
|
|||||||
self.type = building.type
|
self.type = building.type
|
||||||
self.year_of_construction = building.year_of_construction
|
self.year_of_construction = building.year_of_construction
|
||||||
self.function = building.function
|
self.function = building.function
|
||||||
self.usage = building.usages
|
self.usage = building.usages_percentage
|
||||||
self.volume = building.volume
|
self.volume = building.volume
|
||||||
self.area = building.floor_area
|
self.area = building.floor_area
|
||||||
self.roof_area = sum(roof.solid_polygon.area for roof in building.roofs)
|
self.roof_area = sum(roof.solid_polygon.area for roof in building.roofs)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
"""
|
"""
|
||||||
Hub version number
|
Hub version number
|
||||||
"""
|
"""
|
||||||
__version__ = '0.3.0.5'
|
__version__ = '0.3.0.0'
|
||||||
|
21
main.py
Normal file
21
main.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
from hub.imports.geometry_factory import GeometryFactory
|
||||||
|
from hub.helpers.dictionaries import Dictionaries
|
||||||
|
from hub.imports.construction_factory import ConstructionFactory
|
||||||
|
from hub.imports.results_factory import ResultFactory
|
||||||
|
from hub.exports.exports_factory import ExportsFactory
|
||||||
|
import subprocess
|
||||||
|
from pathlib import Path
|
||||||
|
from hub.imports.weather_factory import WeatherFactory
|
||||||
|
|
||||||
|
input_file = "data/test_updated.geojson"
|
||||||
|
|
||||||
|
city = GeometryFactory(
|
||||||
|
"geojson",
|
||||||
|
input_file,
|
||||||
|
height_field="height",
|
||||||
|
year_of_construction_field="year_of_construction",
|
||||||
|
function_field="function",
|
||||||
|
function_to_hub=Dictionaries().montreal_function_to_hub_function).city
|
||||||
|
ConstructionFactory('nrcan', city).enrich()
|
||||||
|
|
||||||
|
print('done')
|
@ -1,5 +1,5 @@
|
|||||||
xmltodict
|
xmltodict
|
||||||
numpy
|
numpy==1.26.4
|
||||||
trimesh[all]
|
trimesh[all]
|
||||||
pyproj
|
pyproj
|
||||||
pandas
|
pandas
|
||||||
|
1
setup.py
1
setup.py
@ -59,7 +59,6 @@ setup(
|
|||||||
'hub.exports',
|
'hub.exports',
|
||||||
'hub.exports.building_energy',
|
'hub.exports.building_energy',
|
||||||
'hub.exports.building_energy.idf_files',
|
'hub.exports.building_energy.idf_files',
|
||||||
'hub.exports.building_energy.idf_helper',
|
|
||||||
'hub.exports.building_energy.insel',
|
'hub.exports.building_energy.insel',
|
||||||
'hub.exports.energy_systems',
|
'hub.exports.energy_systems',
|
||||||
'hub.exports.formats',
|
'hub.exports.formats',
|
||||||
|
46
shapely_test.py
Normal file
46
shapely_test.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
import json
|
||||||
|
from shapely.geometry import shape, mapping, MultiPolygon, Polygon
|
||||||
|
from shapely.ops import unary_union
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
def enforce_right_hand_rule(geojson):
|
||||||
|
for feature in geojson['features']:
|
||||||
|
geometry = shape(feature['geometry'])
|
||||||
|
if isinstance(geometry, Polygon):
|
||||||
|
if not geometry.exterior.is_ccw:
|
||||||
|
geometry = Polygon(geometry.exterior.coords[::-1], [interior.coords[::-1] for interior in geometry.interiors])
|
||||||
|
elif isinstance(geometry, MultiPolygon):
|
||||||
|
new_polygons = []
|
||||||
|
for polygon in geometry.geoms: # Use .geoms to iterate over the individual polygons
|
||||||
|
if not polygon.exterior.is_ccw:
|
||||||
|
polygon = Polygon(polygon.exterior.coords[::-1], [interior.coords[::-1] for interior in polygon.interiors])
|
||||||
|
new_polygons.append(polygon)
|
||||||
|
geometry = MultiPolygon(new_polygons)
|
||||||
|
feature['geometry'] = mapping(geometry)
|
||||||
|
return geojson
|
||||||
|
|
||||||
|
data_path = Path(__file__).parent.parent / 'hub/data' # Adjust this path as needed
|
||||||
|
# Load the GeoJSON file
|
||||||
|
input_filepath = data_path / 'test.geojson'
|
||||||
|
output_filepath = data_path / 'test_updated.geojson'
|
||||||
|
|
||||||
|
with open(input_filepath, 'r') as f:
|
||||||
|
geojson_data = json.load(f)
|
||||||
|
|
||||||
|
# Iterate through the features and convert MultiPolygons to Polygons
|
||||||
|
for feature in geojson_data['features']:
|
||||||
|
geom = shape(feature['geometry'])
|
||||||
|
if isinstance(geom, MultiPolygon):
|
||||||
|
# Merge the polygons into a single polygon
|
||||||
|
merged_polygon = unary_union(geom)
|
||||||
|
# Convert the merged polygon back to GeoJSON format
|
||||||
|
feature['geometry'] = mapping(merged_polygon)
|
||||||
|
|
||||||
|
# Enforce the right-hand rule
|
||||||
|
geojson_data = enforce_right_hand_rule(geojson_data)
|
||||||
|
|
||||||
|
# Save the updated GeoJSON file
|
||||||
|
with open(output_filepath, 'w') as f:
|
||||||
|
json.dump(geojson_data, f)
|
||||||
|
|
||||||
|
|
@ -144,8 +144,7 @@ class TestExports(TestCase):
|
|||||||
UsageFactory('nrcan', city).enrich()
|
UsageFactory('nrcan', city).enrich()
|
||||||
WeatherFactory('epw', city).enrich()
|
WeatherFactory('epw', city).enrich()
|
||||||
try:
|
try:
|
||||||
_idf = EnergyBuildingsExportsFactory('idf', city, self._output_path).export()
|
idf = EnergyBuildingsExportsFactory('idf', city, self._output_path).export()
|
||||||
_idf.run()
|
|
||||||
except Exception:
|
except Exception:
|
||||||
self.fail("Idf ExportsFactory raised ExceptionType unexpectedly!")
|
self.fail("Idf ExportsFactory raised ExceptionType unexpectedly!")
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user