8
8
9
9
from calitp_data_analysis import utils
10
10
from segment_speed_utils import time_series_utils
11
- from merge_data import merge_in_standardized_route_names , exclude_private_datasets
11
+ from shared_utils import publish_utils
12
+ from merge_data import merge_in_standardized_route_names
12
13
from update_vars import GTFS_DATA_DICT , SCHED_GCS , RT_SCHED_GCS
13
14
14
15
sort_cols = ["schedule_gtfs_dataset_key" , "service_date" ]
@@ -154,7 +155,7 @@ def operator_category_counts_by_date() -> pd.DataFrame:
154
155
# Drop duplicates created after merging
155
156
op_profiles_df2 = (op_profiles_df1
156
157
.pipe (
157
- exclude_private_datasets ,
158
+ publish_utils . exclude_private_datasets ,
158
159
col = "schedule_gtfs_dataset_key" ,
159
160
public_gtfs_dataset_keys = public_feeds
160
161
).drop_duplicates (subset = list (op_profiles_df1 .columns ))
@@ -169,7 +170,7 @@ def operator_category_counts_by_date() -> pd.DataFrame:
169
170
).pipe (
170
171
merge_in_standardized_route_names
171
172
).pipe (
172
- exclude_private_datasets ,
173
+ publish_utils . exclude_private_datasets ,
173
174
col = "schedule_gtfs_dataset_key" ,
174
175
public_gtfs_dataset_keys = public_feeds
175
176
)
@@ -181,7 +182,7 @@ def operator_category_counts_by_date() -> pd.DataFrame:
181
182
)
182
183
183
184
operator_category_counts = operator_category_counts_by_date ().pipe (
184
- exclude_private_datasets ,
185
+ publish_utils . exclude_private_datasets ,
185
186
col = "schedule_gtfs_dataset_key" ,
186
187
public_gtfs_dataset_keys = public_feeds
187
188
)
0 commit comments