Compare commits

...

10 Commits

12 changed files with 1586037 additions and 62 deletions

View File

@ -2,7 +2,7 @@
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="C:\Users\marie.proville\miniconda3\envs\Hub" jdkType="Python SDK" />
<orderEntry type="jdk" jdkName="Python 3.9 (hub)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@ -3,5 +3,5 @@
<component name="Black">
<option name="sdkUUID" value="97386509-ec9b-4dd7-929b-7585219c0447" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="hub" project-jdk-type="Python SDK" />
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (hub)" project-jdk-type="Python SDK" />
</project>

View File

@ -198,7 +198,7 @@
<equipments>
<generation_id>3</generation_id>
<distribution_id>8</distribution_id>
g </equipments>
</equipments>
</system>
<system id="5">
<name>Single zone packaged rooftop unit with electrical resistance furnace and baseboards and fuel boiler for acs</name>
@ -240,7 +240,7 @@ g </equipments>
<demand>domestic_hot_water</demand>
</demands>
<equipments>
<generation_id>2</generation_id>
<generation_id>1</generation_id>
<distribution_id>3</distribution_id>
</equipments>
</system>
@ -302,7 +302,7 @@ g </equipments>
</demands>
<equipments>
<generation_id>5</generation_id>
<distribution_id>6</distribution_id>
<distribution_id>4</distribution_id>
</equipments>
</system>
<system id="15">

1585013
input_files/roads.json Normal file

File diff suppressed because it is too large Load Diff

89
main.py
View File

@ -1,4 +1,5 @@
from pathlib import Path
from scripts.district_heating_network.directory_manager import DirectoryManager
import subprocess
from scripts.ep_run_enrich import energy_plus_workflow
from hub.imports.geometry_factory import GeometryFactory
@ -23,64 +24,42 @@ from hub.exports.exports_factory import ExportsFactory
from scripts.pv_feasibility import pv_feasibility
import matplotlib.pyplot as plt
import numpy as np
# Specify the GeoJSON file path
data = {}
input_files_path = (Path(__file__).parent / 'input_files')
input_files_path.mkdir(parents=True, exist_ok=True)
# geojson_file = process_geojson(x=-73.58001358793511, y=45.496445294438715, diff=0.0001)
geojson_file_path = input_files_path / 'test_geojson1.geojson'
output_path = (Path(__file__).parent / 'out_files').resolve()
output_path.mkdir(parents=True, exist_ok=True)
energy_plus_output_path = output_path / 'energy_plus_outputs'
energy_plus_output_path.mkdir(parents=True, exist_ok=True)
simulation_results_path = (Path(__file__).parent / 'out_files' / 'simulation_results').resolve()
simulation_results_path.mkdir(parents=True, exist_ok=True)
sra_output_path = output_path / 'sra_outputs'
sra_output_path.mkdir(parents=True, exist_ok=True)
cost_analysis_output_path = output_path / 'cost_analysis'
cost_analysis_output_path.mkdir(parents=True, exist_ok=True)
base_path = Path(__file__).parent
dir_manager = DirectoryManager(base_path)
# Input files directory
input_files_path = dir_manager.create_directory('input_files')
geojson_file_path = input_files_path / 'output_buildings.geojson'
# Output files directory
output_path = dir_manager.create_directory('out_files')
# Subdirectories for output files
energy_plus_output_path = dir_manager.create_directory('out_files/energy_plus_outputs')
simulation_results_path = dir_manager.create_directory('out_files/simulation_results')
sra_output_path = dir_manager.create_directory('out_files/sra_outputs')
cost_analysis_output_path = dir_manager.create_directory('out_files/cost_analysis')
# Select city area
location = [45.53067276979674, -73.70234652694087]
process_geojson(x=location[1], y=location[0], diff=0.001)
# Create city object
city = GeometryFactory(file_type='geojson',
path=geojson_file_path,
height_field='height',
year_of_construction_field='year_of_construction',
function_field='function',
function_to_hub=Dictionaries().montreal_function_to_hub_function).city
# ConstructionFactory('nrcan', city).enrich()
# UsageFactory('nrcan', city).enrich()
# WeatherFactory('epw', city).enrich()
# energy_plus_workflow(city, energy_plus_output_path)
# data[f'{city.buildings[0].function}'] = city.buildings[0].heating_demand[cte.YEAR][0] / 3.6e9
# city.buildings[0].function = cte.COMMERCIAL
# ConstructionFactory('nrcan', city).enrich()
# UsageFactory('nrcan', city).enrich()
# energy_plus_workflow(city, energy_plus_output_path)
# data[f'{city.buildings[0].function}'] = city.buildings[0].heating_demand[cte.YEAR][0] / 3.6e9
# city.buildings[0].function = cte.MEDIUM_OFFICE
# ConstructionFactory('nrcan', city).enrich()
# UsageFactory('nrcan', city).enrich()
# energy_plus_workflow(city, energy_plus_output_path)
# data[f'{city.buildings[0].function}'] = city.buildings[0].heating_demand[cte.YEAR][0] / 3.6e9
# categories = list(data.keys())
# values = list(data.values())
# # Plotting
# fig, ax = plt.subplots(figsize=(10, 6), dpi=96)
# fig.suptitle('Impact of different usages on yearly heating demand', fontsize=16, weight='bold', alpha=.8)
# ax.bar(categories, values, color=['#2196f3', '#ff5a5f', '#4caf50'], width=0.6, zorder=2)
# ax.grid(which="major", axis='x', color='#DAD8D7', alpha=0.5, zorder=1)
# ax.grid(which="major", axis='y', color='#DAD8D7', alpha=0.5, zorder=1)
# ax.set_xlabel('Building Type', fontsize=12, labelpad=10)
# ax.set_ylabel('Energy Consumption (MWh)', fontsize=14, labelpad=10)
# ax.yaxis.set_major_locator(plt.MaxNLocator(integer=True))
# ax.set_xticks(np.arange(len(categories)))
# ax.set_xticklabels(categories, rotation=45, ha='right')
# ax.bar_label(ax.containers[0], padding=3, color='black', fontsize=12, rotation=0)
# ax.spines[['top', 'left', 'bottom']].set_visible(False)
# ax.spines['right'].set_linewidth(1.1)
# # Set a white background
# fig.patch.set_facecolor('white')
# # Adjust the margins around the plot area
# plt.subplots_adjust(left=0.1, right=0.9, top=0.85, bottom=0.25)
# # Save the plot
# plt.savefig('plot_nrcan.png', bbox_inches='tight')
# plt.close()
print('test')
ConstructionFactory('nrcan', city).enrich()
UsageFactory('nrcan', city).enrich()
WeatherFactory('epw', city).enrich()
# EnergyPlus workflow
energy_plus_workflow(city, energy_plus_output_path)
print('test')

View File

@ -14,7 +14,7 @@ import hub.helpers.constants as cte
from hub.exports.exports_factory import ExportsFactory
from scripts.pv_sizing_and_simulation import PVSizingSimulation
# Specify the GeoJSON file path
geojson_file = process_geojson(x=-73.5681295982132, y=45.49218262677643, diff=0.0005)
geojson_file = process_geojson(x=-73.5681295982132, y=45.49218262677643, diff=0.0001)
file_path = (Path(__file__).parent / 'input_files' / 'output_buildings.geojson')
# Specify the output path for the PDF file
output_path = (Path(__file__).parent / 'out_files').resolve()
@ -34,7 +34,7 @@ ExportsFactory('sra', city, output_path).export()
sra_path = (output_path / f'{city.name}_sra.xml').resolve()
subprocess.run(['sra', str(sra_path)])
ResultFactory('sra', city, output_path).enrich()
energy_plus_workflow(city)
energy_plus_workflow(city, output_path=output_path)
solar_angles = CitySolarAngles(city.name,
city.latitude,
city.longitude,

View File

@ -0,0 +1,16 @@
from pathlib import Path
class DirectoryManager:
def __init__(self, base_path):
self.base_path = Path(base_path)
self.directories = {}
def create_directory(self, relative_path):
full_path = self.base_path / relative_path
full_path.mkdir(parents=True, exist_ok=True)
self.directories[relative_path] = full_path
return full_path
def get_directory(self, relative_path):
return self.directories.get(relative_path, None)

View File

@ -0,0 +1,326 @@
import json
import matplotlib.pyplot as plt
from shapely.geometry import Polygon, Point, LineString
import networkx as nx
from typing import List, Tuple
from rtree import index
import math
import logging
# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logging.getLogger('numexpr').setLevel(logging.ERROR)
def haversine(lon1, lat1, lon2, lat2):
"""
Calculate the great-circle distance between two points
on the Earth specified by their longitude and latitude.
"""
R = 6371000 # Radius of the Earth in meters
phi1 = math.radians(lat1)
phi2 = math.radians(lat2)
delta_phi = math.radians(lat2 - lat1)
delta_lambda = math.radians(lon2 - lon1)
a = math.sin(delta_phi / 2.0) ** 2 + \
math.cos(phi1) * math.cos(phi2) * \
math.sin(delta_lambda / 2.0) ** 2
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
return R * c # Output distance in meters
class DistrictHeatingNetworkCreator:
def __init__(self, buildings_file: str, roads_file: str):
"""
Initialize the class with paths to the buildings and roads data files.
:param buildings_file: Path to the GeoJSON file containing building data.
:param roads_file: Path to the GeoJSON file containing roads data.
"""
self.buildings_file = buildings_file
self.roads_file = roads_file
def run(self) -> nx.Graph:
"""
Main method to execute the district heating network creation process.
:return: NetworkX graph with nodes and edges representing the network.
"""
try:
self._load_and_process_data()
self._find_nearest_roads()
self._find_nearest_points()
self._break_down_roads()
self._create_graph()
self._create_mst()
self._iteratively_remove_edges()
self._add_centroids_to_mst()
self._convert_edge_weights_to_meters()
return self.final_mst
except Exception as e:
logging.error(f"Error during network creation: {e}")
raise
def _load_and_process_data(self):
"""
Load and process the building and road data.
"""
try:
# Load building data
with open(self.buildings_file, 'r') as file:
city = json.load(file)
self.centroids = []
self.building_names = []
buildings = city['features']
for building in buildings:
coordinates = building['geometry']['coordinates'][0]
building_polygon = Polygon(coordinates)
centroid = building_polygon.centroid
self.centroids.append(centroid)
self.building_names.append(str(building['id']))
# Load road data
with open(self.roads_file, 'r') as file:
roads = json.load(file)
line_features = [feature for feature in roads['features'] if feature['geometry']['type'] == 'LineString']
self.lines = [LineString(feature['geometry']['coordinates']) for feature in line_features]
self.cleaned_lines = [LineString([line.coords[0], line.coords[-1]]) for line in self.lines]
except Exception as e:
logging.error(f"Error loading and processing data: {e}")
raise
def _find_nearest_roads(self):
"""
Find the nearest road for each building centroid.
"""
try:
self.closest_roads = []
unique_roads_set = set()
# Create spatial index for roads
idx = index.Index()
for pos, line in enumerate(self.cleaned_lines):
idx.insert(pos, line.bounds)
for centroid in self.centroids:
min_distance = float('inf')
closest_road = None
for pos in idx.nearest(centroid.bounds, 10):
road = self.cleaned_lines[pos]
distance = road.distance(centroid)
if distance < min_distance:
min_distance = distance
closest_road = road
if closest_road and closest_road.wkt not in unique_roads_set:
unique_roads_set.add(closest_road.wkt)
self.closest_roads.append(closest_road)
except Exception as e:
logging.error(f"Error finding nearest roads: {e}")
raise
def _find_nearest_points(self):
"""
Find the nearest point on each closest road for each centroid.
"""
def find_nearest_point_on_line(point: Point, line: LineString) -> Point:
return line.interpolate(line.project(point))
try:
self.nearest_points = []
for centroid in self.centroids:
min_distance = float('inf')
closest_road = None
for road in self.closest_roads:
distance = centroid.distance(road)
if distance < min_distance:
min_distance = distance
closest_road = road
if closest_road:
nearest_point = find_nearest_point_on_line(centroid, closest_road)
self.nearest_points.append(nearest_point)
except Exception as e:
logging.error(f"Error finding nearest points: {e}")
raise
def _break_down_roads(self):
"""
Break down roads into segments connecting nearest points.
"""
def break_down_roads(closest_roads: List[LineString], nearest_points_list: List[Point]) -> List[LineString]:
new_segments = []
for road in closest_roads:
coords = list(road.coords)
points_on_road = [point for point in nearest_points_list if road.distance(point) < 0.000000001]
sorted_points = sorted(points_on_road, key=lambda point: road.project(point))
sorted_points.insert(0, Point(coords[0]))
sorted_points.append(Point(coords[-1]))
for i in range(len(sorted_points) - 1):
segment = LineString([sorted_points[i], sorted_points[i + 1]])
new_segments.append(segment)
return new_segments
try:
self.new_segments = break_down_roads(self.closest_roads, self.nearest_points)
self.cleaned_lines = [line for line in self.cleaned_lines if line not in self.closest_roads]
self.cleaned_lines.extend(self.new_segments)
except Exception as e:
logging.error(f"Error breaking down roads: {e}")
raise
def _create_graph(self):
"""
Create a NetworkX graph from the cleaned lines.
"""
try:
self.G = nx.Graph()
for line in self.cleaned_lines:
coords = list(line.coords)
for i in range(len(coords) - 1):
self.G.add_edge(coords[i], coords[i + 1], weight=Point(coords[i]).distance(Point(coords[i + 1])))
except Exception as e:
logging.error(f"Error creating graph: {e}")
raise
def _create_mst(self):
"""
Create a Minimum Spanning Tree (MST) from the graph.
"""
def find_paths_between_nearest_points(g: nx.Graph, nearest_points: List[Point]) -> List[Tuple]:
edges = []
for i, start_point in enumerate(nearest_points):
start = (start_point.x, start_point.y)
for end_point in nearest_points[i + 1:]:
end = (end_point.x, end_point.y)
if nx.has_path(g, start, end):
path = nx.shortest_path(g, source=start, target=end, weight='weight')
path_edges = list(zip(path[:-1], path[1:]))
edges.extend((u, v, g[u][v]['weight']) for u, v in path_edges)
return edges
try:
edges = find_paths_between_nearest_points(self.G, self.nearest_points)
h = nx.Graph()
h.add_weighted_edges_from(edges)
mst = nx.minimum_spanning_tree(h, weight='weight')
final_edges = []
for u, v in mst.edges():
if nx.has_path(self.G, u, v):
path = nx.shortest_path(self.G, source=u, target=v, weight='weight')
path_edges = list(zip(path[:-1], path[1:]))
final_edges.extend((x, y, self.G[x][y]['weight']) for x, y in path_edges)
self.final_mst = nx.Graph()
self.final_mst.add_weighted_edges_from(final_edges)
except Exception as e:
logging.error(f"Error creating MST: {e}")
raise
def _iteratively_remove_edges(self):
"""
Iteratively remove edges that do not have any nearest points and have one end with only one connection.
Also remove nodes that don't have any connections and street nodes with only one connection.
"""
nearest_points_tuples = [(point.x, point.y) for point in self.nearest_points]
def find_edges_to_remove(graph: nx.Graph) -> List[Tuple]:
edges_to_remove = []
for u, v, d in graph.edges(data=True):
if u not in nearest_points_tuples and v not in nearest_points_tuples:
if graph.degree(u) == 1 or graph.degree(v) == 1:
edges_to_remove.append((u, v, d))
return edges_to_remove
def find_nodes_to_remove(graph: nx.Graph) -> List[Tuple]:
nodes_to_remove = []
for node in graph.nodes():
if graph.degree(node) == 0:
nodes_to_remove.append(node)
return nodes_to_remove
try:
edges_to_remove = find_edges_to_remove(self.final_mst)
self.final_mst_steps = [list(self.final_mst.edges(data=True))]
while edges_to_remove or find_nodes_to_remove(self.final_mst):
self.final_mst.remove_edges_from(edges_to_remove)
nodes_to_remove = find_nodes_to_remove(self.final_mst)
self.final_mst.remove_nodes_from(nodes_to_remove)
edges_to_remove = find_edges_to_remove(self.final_mst)
self.final_mst_steps.append(list(self.final_mst.edges(data=True)))
def find_single_connection_street_nodes(graph: nx.Graph) -> List[Tuple]:
single_connection_street_nodes = []
for node in graph.nodes():
if node not in nearest_points_tuples and graph.degree(node) == 1:
single_connection_street_nodes.append(node)
return single_connection_street_nodes
single_connection_street_nodes = find_single_connection_street_nodes(self.final_mst)
while single_connection_street_nodes:
for node in single_connection_street_nodes:
neighbors = list(self.final_mst.neighbors(node))
self.final_mst.remove_node(node)
for neighbor in neighbors:
if self.final_mst.degree(neighbor) == 0:
self.final_mst.remove_node(neighbor)
single_connection_street_nodes = find_single_connection_street_nodes(self.final_mst)
self.final_mst_steps.append(list(self.final_mst.edges(data=True)))
except Exception as e:
logging.error(f"Error iteratively removing edges: {e}")
raise
def _add_centroids_to_mst(self):
"""
Add centroids to the final MST graph and connect them to their associated node on the graph.
"""
try:
for i, centroid in enumerate(self.centroids):
centroid_tuple = (centroid.x, centroid.y)
building_name = self.building_names[i]
self.final_mst.add_node(centroid_tuple, type='building', id=building_name)
nearest_point = None
min_distance = float('inf')
for node in self.final_mst.nodes():
if self.final_mst.nodes[node].get('type') != 'building':
node_point = Point(node)
distance = centroid.distance(node_point)
if distance < min_distance:
min_distance = distance
nearest_point = node
if nearest_point:
self.final_mst.add_edge(centroid_tuple, nearest_point, weight=min_distance)
except Exception as e:
logging.error(f"Error adding centroids to MST: {e}")
raise
def _convert_edge_weights_to_meters(self):
"""
Convert all edge weights in the final MST graph to meters using the Haversine formula.
"""
try:
for u, v, data in self.final_mst.edges(data=True):
lon1, lat1 = u
lon2, lat2 = v
distance = haversine(lon1, lat1, lon2, lat2)
self.final_mst[u][v]['weight'] = distance
except Exception as e:
logging.error(f"Error converting edge weights to meters: {e}")
raise
def plot_network_graph(self):
"""
Plot the network graph using matplotlib and networkx.
"""
plt.figure(figsize=(15, 10))
pos = {node: (node[0], node[1]) for node in self.final_mst.nodes()}
nx.draw_networkx_nodes(self.final_mst, pos, node_color='blue', node_size=50)
nx.draw_networkx_edges(self.final_mst, pos, edge_color='gray')
plt.title('District Heating Network Graph')
plt.axis('off')
plt.show()

View File

@ -0,0 +1,54 @@
import json
from shapely import LineString, Point
import networkx as nx
from pathlib import Path
def networkx_to_geojson(graph: nx.Graph) -> Path:
"""
Convert a NetworkX graph to GeoJSON format.
:param graph: A NetworkX graph.
:return: GeoJSON formatted dictionary.
"""
features = []
for u, v, data in graph.edges(data=True):
line = LineString([u, v])
feature = {
"type": "Feature",
"geometry": {
"type": "LineString",
"coordinates": list(line.coords)
},
"properties": {
"weight": data.get("weight", 1.0)
}
}
features.append(feature)
for node, data in graph.nodes(data=True):
point = Point(node)
feature = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": list(point.coords)[0]
},
"properties": {
"type": data.get("type", "unknown"),
"id": data.get("id", "N/A")
}
}
features.append(feature)
geojson = {
"type": "FeatureCollection",
"features": features
}
output_geojson_file = Path('./out_files/network_graph.geojson').resolve()
with open(output_geojson_file, 'w') as file:
json.dump(geojson, file, indent=4)
return output_geojson_file

View File

@ -0,0 +1,56 @@
from pathlib import Path
from shapely.geometry import Polygon, Point, shape
import json
def road_processor(x, y, diff):
"""
Processes a .JSON file to find roads that have at least one node within a specified polygon.
Parameters:
x (float): The x-coordinate of the center of the selection box.
y (float): The y-coordinate of the center of the selection box.
diff (float): The half-width of the selection box.
Returns:
str: The file path of the output GeoJSON file containing the selected roads.
"""
diff += 2 * diff
# Define the selection polygon
selection_box = Polygon([
[x + diff, y - diff],
[x - diff, y - diff],
[x - diff, y + diff],
[x + diff, y + diff]
])
# Define input and output file paths
geojson_file = Path("./input_files/roads.json").resolve()
output_file = Path('./input_files/output_roads.geojson').resolve()
# Initialize a list to store the roads in the region
roads_in_region = []
# Read the GeoJSON file
with open(geojson_file, 'r') as file:
roads = json.load(file)
line_features = [feature for feature in roads['features'] if feature['geometry']['type'] == 'LineString']
# Check each road feature
for feature in line_features:
road_shape = shape(feature['geometry'])
# Check if any node of the road is inside the selection box
if any(selection_box.contains(Point(coord)) for coord in road_shape.coords):
roads_in_region.append(feature)
# Create a new GeoJSON structure with the selected roads
output_geojson = {
"type": "FeatureCollection",
"features": roads_in_region
}
# Write the selected roads to the output file
with open(output_file, 'w') as outfile:
json.dump(output_geojson, outfile)
return output_file

View File

@ -0,0 +1,80 @@
import pandas as pd
import numpy as np
class DemandShiftProcessor:
def __init__(self, city):
self.city = city
def random_shift(self, series):
shift_amount = np.random.randint(0, round(0.005 * len(series)))
return series.shift(shift_amount).fillna(series.shift(shift_amount - len(series)))
def process_demands(self):
heating_dfs = []
cooling_dfs = []
for building in self.city.buildings:
heating_df = self.convert_building_to_dataframe(building, 'heating')
cooling_df = self.convert_building_to_dataframe(building, 'cooling')
heating_df.set_index('Date/Time', inplace=True)
cooling_df.set_index('Date/Time', inplace=True)
shifted_heating_demands = heating_df.apply(self.random_shift, axis=0)
shifted_cooling_demands = cooling_df.apply(self.random_shift, axis=0)
self.update_building_demands(building, shifted_heating_demands, 'heating')
self.update_building_demands(building, shifted_cooling_demands, 'cooling')
heating_dfs.append(shifted_heating_demands)
cooling_dfs.append(shifted_cooling_demands)
combined_heating_df = pd.concat(heating_dfs, axis=1)
combined_cooling_df = pd.concat(cooling_dfs, axis=1)
self.calculate_and_set_simultaneity_factor(combined_heating_df, 'heating')
self.calculate_and_set_simultaneity_factor(combined_cooling_df, 'cooling')
def convert_building_to_dataframe(self, building, demand_type):
if demand_type == 'heating':
data = {
"Date/Time": self.generate_date_time_index(),
"Heating_Demand": building.heating_demand["hour"]
}
else: # cooling
data = {
"Date/Time": self.generate_date_time_index(),
"Cooling_Demand": building.cooling_demand["hour"]
}
return pd.DataFrame(data)
def generate_date_time_index(self):
# Generate hourly date time index for a full year in 2024
date_range = pd.date_range(start="2013-01-01 00:00:00", end="2013-12-31 23:00:00", freq='H')
return date_range.strftime('%m/%d %H:%M:%S').tolist()
def update_building_demands(self, building, shifted_demands, demand_type):
if demand_type == 'heating':
shifted_series = shifted_demands["Heating_Demand"]
building.heating_demand = self.calculate_new_demands(shifted_series)
else: # cooling
shifted_series = shifted_demands["Cooling_Demand"]
building.cooling_demand = self.calculate_new_demands(shifted_series)
def calculate_new_demands(self, shifted_series):
new_demand = {
"hour": shifted_series.tolist(),
"month": self.calculate_monthly_demand(shifted_series),
"year": [shifted_series.sum()]
}
return new_demand
def calculate_monthly_demand(self, series):
series.index = pd.to_datetime(series.index, format='%m/%d %H:%M:%S')
monthly_demand = series.resample('M').sum()
return monthly_demand.tolist()
def calculate_and_set_simultaneity_factor(self, combined_df, demand_type):
total_demand_original = combined_df.sum(axis=1)
peak_total_demand_original = total_demand_original.max()
individual_peak_demands = combined_df.max(axis=0)
sum_individual_peak_demands = individual_peak_demands.sum()
if demand_type == 'heating':
self.city.simultaneity_factor_heating = peak_total_demand_original / sum_individual_peak_demands
else: # cooling
self.city.simultaneity_factor_cooling = peak_total_demand_original / sum_individual_peak_demands

451
work_in_progress.ipynb Normal file

File diff suppressed because one or more lines are too long