Compare commits

...

2 Commits

Author SHA1 Message Date
Majid Rezaei
39d5dcfdaa feature: add district heating network creator 2024-06-24 11:48:01 -04:00
Majid Rezaei
c40900bd30 feature: add simultinity factor calculations 2024-06-23 19:17:23 -04:00
6 changed files with 1585533 additions and 0 deletions

1585013
input_files/roads.json Normal file

File diff suppressed because it is too large Load Diff

48
main.py
View File

@ -0,0 +1,48 @@
from pathlib import Path
from scripts.district_heating_network.simultinity_factor import DemandShiftProcessor
from scripts.ep_run_enrich import energy_plus_workflow
from hub.imports.geometry_factory import GeometryFactory
from hub.helpers.dictionaries import Dictionaries
from hub.imports.construction_factory import ConstructionFactory
from hub.imports.usage_factory import UsageFactory
from hub.imports.weather_factory import WeatherFactory
from scripts.district_heating_network.road_processor import road_processor
from scripts.district_heating_network.district_heating_network_creator import DistrictHeatingNetworkCreator
from scripts.district_heating_network.geojson_graph_creator import networkx_to_geojson
import hub.helpers.constants as cte
file_path = (Path(__file__).parent / 'input_files' / 'output_buildings.geojson')
output_path = (Path(__file__).parent / 'out_files').resolve()
# Create city object from GeoJSON file
city = GeometryFactory('geojson',
path=file_path,
height_field='height',
year_of_construction_field='year_of_construction',
function_field='function',
function_to_hub=Dictionaries().montreal_function_to_hub_function).city
# Enrich city data
ConstructionFactory('nrcan', city).enrich()
UsageFactory('nrcan', city).enrich()
WeatherFactory('epw', city).enrich()
energy_plus_workflow(city)
processor = DemandShiftProcessor(city)
processor.process_demands()
location = [45.43658024628209, -73.66134636914408]
roads_file = road_processor(location[1], location[0], 0.002)
dhn_creator = DistrictHeatingNetworkCreator(file_path, roads_file)
network_graph = dhn_creator.run()
networkx_to_geojson(network_graph)
dhn_creator.plot_network_graph()
print("Simultaneity Factor Heating:", city.simultaneity_factor_heating)
print("Simultaneity Factor Cooling:", city.simultaneity_factor_cooling)

View File

@ -0,0 +1,282 @@
import json
import matplotlib.pyplot as plt
from shapely.geometry import Polygon, Point, LineString
import networkx as nx
from typing import List, Tuple
from rtree import index
import math
def haversine(lon1, lat1, lon2, lat2):
"""
Calculate the great-circle distance between two points
on the Earth specified by their longitude and latitude.
"""
R = 6371000 # Radius of the Earth in meters
phi1 = math.radians(lat1)
phi2 = math.radians(lat2)
delta_phi = math.radians(lat2 - lat1)
delta_lambda = math.radians(lon2 - lon1)
a = math.sin(delta_phi / 2.0) ** 2 + \
math.cos(phi1) * math.cos(phi2) * \
math.sin(delta_lambda / 2.0) ** 2
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
return R * c # Output distance in meters
class DistrictHeatingNetworkCreator:
def __init__(self, buildings_file: str, roads_file: str):
"""
Initialize the class with paths to the buildings and roads data files.
:param buildings_file: Path to the GeoJSON file containing building data.
:param roads_file: Path to the GeoJSON file containing roads data.
"""
self.buildings_file = buildings_file
self.roads_file = roads_file
def run(self) -> nx.Graph:
"""
Main method to execute the district heating network creation process.
:return: NetworkX graph with nodes and edges representing the network.
"""
self._load_and_process_data()
self._find_nearest_roads()
self._find_nearest_points()
self._break_down_roads()
self._create_graph()
self._create_mst()
self._iteratively_remove_edges()
self._add_centroids_to_mst()
self._convert_edge_weights_to_meters()
return self.final_mst
def _load_and_process_data(self):
"""
Load and process the building and road data.
"""
# Load building data
with open(self.buildings_file, 'r') as file:
city = json.load(file)
self.centroids = []
self.building_ids = []
buildings = city['features']
for building in buildings:
coordinates = building['geometry']['coordinates'][0]
building_polygon = Polygon(coordinates)
centroid = building_polygon.centroid
self.centroids.append(centroid)
self.building_ids.append(building['id'])
# Load road data
with open(self.roads_file, 'r') as file:
roads = json.load(file)
line_features = [feature for feature in roads['features'] if feature['geometry']['type'] == 'LineString']
self.lines = [LineString(feature['geometry']['coordinates']) for feature in line_features]
self.cleaned_lines = [LineString([line.coords[0], line.coords[-1]]) for line in self.lines]
def _find_nearest_roads(self):
"""
Find the nearest road for each building centroid.
"""
self.closest_roads = []
unique_roads_set = set()
# Create spatial index for roads
idx = index.Index()
for pos, line in enumerate(self.cleaned_lines):
idx.insert(pos, line.bounds)
for centroid in self.centroids:
min_distance = float('inf')
closest_road = None
for pos in idx.nearest(centroid.bounds, 10):
road = self.cleaned_lines[pos]
distance = road.distance(centroid)
if distance < min_distance:
min_distance = distance
closest_road = road
if closest_road and closest_road.wkt not in unique_roads_set:
unique_roads_set.add(closest_road.wkt)
self.closest_roads.append(closest_road)
def _find_nearest_points(self):
"""
Find the nearest point on each closest road for each centroid.
"""
def find_nearest_point_on_line(point: Point, line: LineString) -> Point:
return line.interpolate(line.project(point))
self.nearest_points = []
for centroid in self.centroids:
min_distance = float('inf')
closest_road = None
for road in self.closest_roads:
distance = centroid.distance(road)
if distance < min_distance:
min_distance = distance
closest_road = road
if closest_road:
nearest_point = find_nearest_point_on_line(centroid, closest_road)
self.nearest_points.append(nearest_point)
def _break_down_roads(self):
"""
Break down roads into segments connecting nearest points.
"""
def break_down_roads(closest_roads: List[LineString], nearest_points_list: List[Point]) -> List[LineString]:
new_segments = []
for road in closest_roads:
coords = list(road.coords)
points_on_road = [point for point in nearest_points_list if road.distance(point) < 0.000000001]
sorted_points = sorted(points_on_road, key=lambda point: road.project(point))
sorted_points.insert(0, Point(coords[0]))
sorted_points.append(Point(coords[-1]))
for i in range(len(sorted_points) - 1):
segment = LineString([sorted_points[i], sorted_points[i + 1]])
new_segments.append(segment)
return new_segments
self.new_segments = break_down_roads(self.closest_roads, self.nearest_points)
self.cleaned_lines = [line for line in self.cleaned_lines if line not in self.closest_roads]
self.cleaned_lines.extend(self.new_segments)
def _create_graph(self):
"""
Create a NetworkX graph from the cleaned lines.
"""
self.G = nx.Graph()
for line in self.cleaned_lines:
coords = list(line.coords)
for i in range(len(coords) - 1):
self.G.add_edge(coords[i], coords[i + 1], weight=Point(coords[i]).distance(Point(coords[i + 1])))
def _create_mst(self):
"""
Create a Minimum Spanning Tree (MST) from the graph.
"""
def find_paths_between_nearest_points(g: nx.Graph, nearest_points: List[Point]) -> List[Tuple]:
edges = []
for i, start_point in enumerate(nearest_points):
start = (start_point.x, start_point.y)
for end_point in nearest_points[i + 1:]:
end = (end_point.x, end_point.y)
if nx.has_path(g, start, end):
path = nx.shortest_path(g, source=start, target=end, weight='weight')
path_edges = list(zip(path[:-1], path[1:]))
edges.extend((u, v, g[u][v]['weight']) for u, v in path_edges)
return edges
edges = find_paths_between_nearest_points(self.G, self.nearest_points)
h = nx.Graph()
h.add_weighted_edges_from(edges)
mst = nx.minimum_spanning_tree(h, weight='weight')
final_edges = []
for u, v in mst.edges():
if nx.has_path(self.G, u, v):
path = nx.shortest_path(self.G, source=u, target=v, weight='weight')
path_edges = list(zip(path[:-1], path[1:]))
final_edges.extend((x, y, self.G[x][y]['weight']) for x, y in path_edges)
self.final_mst = nx.Graph()
self.final_mst.add_weighted_edges_from(final_edges)
def _iteratively_remove_edges(self):
"""
Iteratively remove edges that do not have any nearest points and have one end with only one connection.
Also remove nodes that don't have any connections and street nodes with only one connection.
"""
nearest_points_tuples = [(point.x, point.y) for point in self.nearest_points]
def find_edges_to_remove(graph: nx.Graph) -> List[Tuple]:
edges_to_remove = []
for u, v, d in graph.edges(data=True):
if u not in nearest_points_tuples and v not in nearest_points_tuples:
if graph.degree(u) == 1 or graph.degree(v) == 1:
edges_to_remove.append((u, v, d))
return edges_to_remove
def find_nodes_to_remove(graph: nx.Graph) -> List[Tuple]:
nodes_to_remove = []
for node in graph.nodes():
if graph.degree(node) == 0:
nodes_to_remove.append(node)
return nodes_to_remove
edges_to_remove = find_edges_to_remove(self.final_mst)
self.final_mst_steps = [list(self.final_mst.edges(data=True))]
while edges_to_remove or find_nodes_to_remove(self.final_mst):
self.final_mst.remove_edges_from(edges_to_remove)
nodes_to_remove = find_nodes_to_remove(self.final_mst)
self.final_mst.remove_nodes_from(nodes_to_remove)
edges_to_remove = find_edges_to_remove(self.final_mst)
self.final_mst_steps.append(list(self.final_mst.edges(data=True)))
def find_single_connection_street_nodes(graph: nx.Graph) -> List[Tuple]:
single_connection_street_nodes = []
for node in graph.nodes():
if node not in nearest_points_tuples and graph.degree(node) == 1:
single_connection_street_nodes.append(node)
return single_connection_street_nodes
single_connection_street_nodes = find_single_connection_street_nodes(self.final_mst)
while single_connection_street_nodes:
for node in single_connection_street_nodes:
neighbors = list(self.final_mst.neighbors(node))
self.final_mst.remove_node(node)
for neighbor in neighbors:
if self.final_mst.degree(neighbor) == 0:
self.final_mst.remove_node(neighbor)
single_connection_street_nodes = find_single_connection_street_nodes(self.final_mst)
self.final_mst_steps.append(list(self.final_mst.edges(data=True)))
def _add_centroids_to_mst(self):
"""
Add centroids to the final MST graph and connect them to their associated node on the graph.
"""
for i, centroid in enumerate(self.centroids):
centroid_tuple = (centroid.x, centroid.y)
building_id = self.building_ids[i]
self.final_mst.add_node(centroid_tuple, type='building', id=building_id)
nearest_point = None
min_distance = float('inf')
for node in self.final_mst.nodes():
if self.final_mst.nodes[node].get('type') != 'building':
node_point = Point(node)
distance = centroid.distance(node_point)
if distance < min_distance:
min_distance = distance
nearest_point = node
if nearest_point:
self.final_mst.add_edge(centroid_tuple, nearest_point, weight=min_distance)
def _convert_edge_weights_to_meters(self):
"""
Convert all edge weights in the final MST graph to meters using the Haversine formula.
"""
for u, v, data in self.final_mst.edges(data=True):
lon1, lat1 = u
lon2, lat2 = v
distance = haversine(lon1, lat1, lon2, lat2)
self.final_mst[u][v]['weight'] = distance
def plot_network_graph(self):
"""
Plot the network graph using matplotlib and networkx.
"""
plt.figure(figsize=(15, 10))
pos = {node: (node[0], node[1]) for node in self.final_mst.nodes()}
nx.draw_networkx_nodes(self.final_mst, pos, node_color='blue', node_size=50)
nx.draw_networkx_edges(self.final_mst, pos, edge_color='gray')
plt.title('District Heating Network Graph')
plt.axis('off')
plt.show()

View File

@ -0,0 +1,54 @@
import json
from shapely import LineString, Point
import networkx as nx
from pathlib import Path
def networkx_to_geojson(graph: nx.Graph) -> Path:
"""
Convert a NetworkX graph to GeoJSON format.
:param graph: A NetworkX graph.
:return: GeoJSON formatted dictionary.
"""
features = []
for u, v, data in graph.edges(data=True):
line = LineString([u, v])
feature = {
"type": "Feature",
"geometry": {
"type": "LineString",
"coordinates": list(line.coords)
},
"properties": {
"weight": data.get("weight", 1.0)
}
}
features.append(feature)
for node, data in graph.nodes(data=True):
point = Point(node)
feature = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": list(point.coords)[0]
},
"properties": {
"type": data.get("type", "unknown"),
"id": data.get("id", "N/A")
}
}
features.append(feature)
geojson = {
"type": "FeatureCollection",
"features": features
}
output_geojson_file = Path('./out_files/network_graph.geojson').resolve()
with open(output_geojson_file, 'w') as file:
json.dump(geojson, file, indent=4)
return output_geojson_file

View File

@ -0,0 +1,56 @@
from pathlib import Path
from shapely.geometry import Polygon, Point, shape
import json
def road_processor(x, y, diff):
"""
Processes a .JSON file to find roads that have at least one node within a specified polygon.
Parameters:
x (float): The x-coordinate of the center of the selection box.
y (float): The y-coordinate of the center of the selection box.
diff (float): The half-width of the selection box.
Returns:
str: The file path of the output GeoJSON file containing the selected roads.
"""
diff += 2 * diff
# Define the selection polygon
selection_box = Polygon([
[x + diff, y - diff],
[x - diff, y - diff],
[x - diff, y + diff],
[x + diff, y + diff]
])
# Define input and output file paths
geojson_file = Path("./input_files/roads.json").resolve()
output_file = Path('./input_files/output_roads.geojson').resolve()
# Initialize a list to store the roads in the region
roads_in_region = []
# Read the GeoJSON file
with open(geojson_file, 'r') as file:
roads = json.load(file)
line_features = [feature for feature in roads['features'] if feature['geometry']['type'] == 'LineString']
# Check each road feature
for feature in line_features:
road_shape = shape(feature['geometry'])
# Check if any node of the road is inside the selection box
if any(selection_box.contains(Point(coord)) for coord in road_shape.coords):
roads_in_region.append(feature)
# Create a new GeoJSON structure with the selected roads
output_geojson = {
"type": "FeatureCollection",
"features": roads_in_region
}
# Write the selected roads to the output file
with open(output_file, 'w') as outfile:
json.dump(output_geojson, outfile)
return output_file

View File

@ -0,0 +1,80 @@
import pandas as pd
import numpy as np
class DemandShiftProcessor:
def __init__(self, city):
self.city = city
def random_shift(self, series):
shift_amount = np.random.randint(0, round(0.005 * len(series)))
return series.shift(shift_amount).fillna(series.shift(shift_amount - len(series)))
def process_demands(self):
heating_dfs = []
cooling_dfs = []
for building in self.city.buildings:
heating_df = self.convert_building_to_dataframe(building, 'heating')
cooling_df = self.convert_building_to_dataframe(building, 'cooling')
heating_df.set_index('Date/Time', inplace=True)
cooling_df.set_index('Date/Time', inplace=True)
shifted_heating_demands = heating_df.apply(self.random_shift, axis=0)
shifted_cooling_demands = cooling_df.apply(self.random_shift, axis=0)
self.update_building_demands(building, shifted_heating_demands, 'heating')
self.update_building_demands(building, shifted_cooling_demands, 'cooling')
heating_dfs.append(shifted_heating_demands)
cooling_dfs.append(shifted_cooling_demands)
combined_heating_df = pd.concat(heating_dfs, axis=1)
combined_cooling_df = pd.concat(cooling_dfs, axis=1)
self.calculate_and_set_simultaneity_factor(combined_heating_df, 'heating')
self.calculate_and_set_simultaneity_factor(combined_cooling_df, 'cooling')
def convert_building_to_dataframe(self, building, demand_type):
if demand_type == 'heating':
data = {
"Date/Time": self.generate_date_time_index(),
"Heating_Demand": building.heating_demand["hour"]
}
else: # cooling
data = {
"Date/Time": self.generate_date_time_index(),
"Cooling_Demand": building.cooling_demand["hour"]
}
return pd.DataFrame(data)
def generate_date_time_index(self):
# Generate hourly date time index for a full year in 2024
date_range = pd.date_range(start="2013-01-01 00:00:00", end="2013-12-31 23:00:00", freq='H')
return date_range.strftime('%m/%d %H:%M:%S').tolist()
def update_building_demands(self, building, shifted_demands, demand_type):
if demand_type == 'heating':
shifted_series = shifted_demands["Heating_Demand"]
building.heating_demand = self.calculate_new_demands(shifted_series)
else: # cooling
shifted_series = shifted_demands["Cooling_Demand"]
building.cooling_demand = self.calculate_new_demands(shifted_series)
def calculate_new_demands(self, shifted_series):
new_demand = {
"hour": shifted_series.tolist(),
"month": self.calculate_monthly_demand(shifted_series),
"year": [shifted_series.sum()]
}
return new_demand
def calculate_monthly_demand(self, series):
series.index = pd.to_datetime(series.index, format='%m/%d %H:%M:%S')
monthly_demand = series.resample('M').sum()
return monthly_demand.tolist()
def calculate_and_set_simultaneity_factor(self, combined_df, demand_type):
total_demand_original = combined_df.sum(axis=1)
peak_total_demand_original = total_demand_original.max()
individual_peak_demands = combined_df.max(axis=0)
sum_individual_peak_demands = individual_peak_demands.sum()
if demand_type == 'heating':
self.city.simultaneity_factor_heating = peak_total_demand_original / sum_individual_peak_demands
else: # cooling
self.city.simultaneity_factor_cooling = peak_total_demand_original / sum_individual_peak_demands