Skip to content

Commit

Permalink
fix some bugs
Browse files Browse the repository at this point in the history
  • Loading branch information
YueeeeeLi committed Jan 7, 2025
1 parent 646998a commit ee63d13
Show file tree
Hide file tree
Showing 4 changed files with 1,146 additions and 11 deletions.
10 changes: 3 additions & 7 deletions scripts/1_network_flow_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,19 +110,15 @@ def main(num_of_cpu):
columns=[
"origin_node",
"destination_node",
"path",
"flow",
"operating_cost_per_flow",
"time_cost_per_flow",
"toll_cost_per_flow",
],
)
print(f"The total simulation time: {time.time() - start_time}")
# breakpoint()
# export files
road_links.to_parquet(base_path.parent / "outputs" / "edge_flows_33p.pq")
isolation.to_parquet(base_path.parent / "outputs" / "trip_isolation_33p.pq")
odpfc.to_parquet(base_path.parent / "outputs" / "odpfc_33p.pq")
road_links.to_parquet(base_path.parent / "outputs" / "edge_flows_32p.gpq")
isolation.to_parquet(base_path.parent / "outputs" / "trip_isolation_32p.pq")
odpfc.to_parquet(base_path.parent / "outputs" / "odpfc_32p.pq")


if __name__ == "__main__":
Expand Down
133 changes: 133 additions & 0 deletions scripts/1_network_flow_model_validation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
# %%
from pathlib import Path
import sys
import time

import pandas as pd
import geopandas as gpd # type: ignore

from nird.utils import load_config
import nird.road_validation as func

import json
import warnings

warnings.simplefilter("ignore")

base_path = Path(load_config()["paths"]["base_path"])


# %%
def main(num_of_cpu):
start_time = time.time()
# model parameters
with open(base_path / "parameters" / "flow_breakpoint_dict.json", "r") as f:
flow_breakpoint_dict = json.load(f)
with open(base_path / "parameters" / "flow_cap_plph_dict.json", "r") as f:
flow_capacity_dict = json.load(f)
with open(base_path / "parameters" / "free_flow_speed_dict.json", "r") as f:
free_flow_speed_dict = json.load(f)
with open(base_path / "parameters" / "min_speed_cap.json", "r") as f:
min_speed_dict = json.load(f)
with open(base_path / "parameters" / "urban_speed_cap.json", "r") as f:
urban_speed_dict = json.load(f)

# network links -> updated to links with bridges
road_link_file = gpd.read_parquet(
base_path / "networks" / "road" / "GB_road_links_with_bridges.gpq"
)

# od matrix (2021) -> updated to od with bridges
od_node_2021 = pd.read_csv(
base_path
/ "census_datasets"
/ "od_matrix"
/ "od_gb_oa_2021_node_with_bridges.csv"
)
od_node_2021["Car21"] = od_node_2021["Car21"] * 2
# od_node_2021 = od_node_2021.head(10) # for debug
print(f"total flows: {od_node_2021.Car21.sum()}")

# initialise road links
""" adding columns:
- edge properties:
free-flow speeds
min speeds (urban/rural)
max speeds (on flooded roads -> for disruption analysis only)
initial speeds
- edge variables:
acc_speed
acc_flow
acc_capacity
"""
road_links = func.edge_init(
road_link_file,
flow_capacity_dict,
free_flow_speed_dict,
urban_speed_dict,
min_speed_dict,
max_flow_speed_dict=None,
)
# create igraph network
network = func.create_igraph_network(road_links)

# run flow simulation
(
road_links,
isolation,
# odpfc,
) = func.network_flow_model(
road_links,
network,
od_node_2021,
flow_breakpoint_dict,
num_of_cpu,
)

# odpfc = pd.DataFrame(
# odpfc,
# columns=[
# "origin_node",
# "destination_node",
# "path",
# "flow",
# "operating_cost_per_flow",
# "time_cost_per_flow",
# "toll_cost_per_flow",
# ],
# )

# odpfc.path = odpfc.path.apply(tuple)
# odpfc = odpfc.groupby(
# by=["origin_node", "destination_node", "path"], as_index=False
# ).agg(
# {
# "flow": "sum",
# "operating_cost_per_flow": "first",
# "time_cost_per_flow": "first",
# "toll_cost_per_flow": "first",
# }
# )

isolation = pd.DataFrame(
isolation,
columns=[
"origin_node",
"destination_node",
"flow",
],
)
print(f"The total simulation time: {time.time() - start_time}")
breakpoint()
# export files
road_links.to_parquet(base_path.parent / "outputs" / "edge_flows_validation.gpq")
isolation.to_parquet(base_path.parent / "outputs" / "trip_isolation_validation.pq")
# odpfc.to_parquet(base_path.parent / "outputs" / "odpfc_test.pq")


if __name__ == "__main__":
try:
num_of_cpu = int(sys.argv[1])
main(num_of_cpu)
except IndexError or NameError:
print("Please enter the required number of CPUs!")
17 changes: 13 additions & 4 deletions src/nird/road_revised.py
Original file line number Diff line number Diff line change
Expand Up @@ -811,9 +811,6 @@ def network_flow_model(
isolated_flow_matrix,
) = update_od_matrix(temp_flow_matrix, remain_od)

# update the origin-destination-path-cost matrix
odpfc.extend(temp_flow_matrix.to_numpy().tolist())

# update the isolated flows
isolation.extend(isolated_flow_matrix.to_numpy().tolist())

Expand Down Expand Up @@ -860,6 +857,9 @@ def network_flow_model(
print(f"The maximum amount of edge overflow: {max_overflow}")

if max_overflow <= 0:
# update the origin-destination-path-cost matrix
odpfc.extend(temp_flow_matrix.to_numpy().tolist())

# add/update edge key variables: flow/speed/capacity
temp_edge_flow["acc_flow"] = (
temp_edge_flow["flow"] + temp_edge_flow["acc_flow"]
Expand Down Expand Up @@ -923,8 +923,16 @@ def network_flow_model(
break
print(f"r = {r}")

# update the origin-destination-path-cost matrix
odpfc.extend(
(temp_flow_matrix.assign(flow=temp_flow_matrix["flow"] * r))
.to_numpy()
.tolist()
)

# add/update edge key variables: flows/speeds/capacities
temp_edge_flow["adjusted_flow"] = temp_edge_flow["flow"] * r

temp_edge_flow["acc_flow"] = (
temp_edge_flow.acc_flow + temp_edge_flow.adjusted_flow
)
Expand Down Expand Up @@ -964,7 +972,8 @@ def network_flow_model(
# total cost
total_cost += time_equiv_cost + operating_cost + toll_cost

# if remaining supply < 0.5 -> 0
# update remaining od flows
remain_od["Car21"] = remain_od["Car21"] * (1 - r)
remain_od.loc[remain_od.Car21 < 0.5, "Car21"] = 0
total_remain = remain_od.Car21.sum()
print(f"The total remaining supply (after flow adjustment) is: {total_remain}")
Expand Down
Loading

0 comments on commit ee63d13

Please sign in to comment.