From 29ff602c2f5aa1a909db7ef04a2879c2274d532b Mon Sep 17 00:00:00 2001 From: ZahraGhahremani Date: Sat, 28 Dec 2024 00:07:11 +0000 Subject: [PATCH 1/9] Sierra fix --- tools/rating_curve_comparison.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/rating_curve_comparison.py b/tools/rating_curve_comparison.py index 277375fb..ca263d0f 100755 --- a/tools/rating_curve_comparison.py +++ b/tools/rating_curve_comparison.py @@ -949,7 +949,7 @@ def get_reccur_intervals(site_rc, usgs_crosswalk, nwm_recurr_intervals): if nwm_ids > 0: try: nwm_recurr_intervals = nwm_recurr_intervals.copy().loc[ - nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().item() + nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().loc[0] ] nwm_recurr_intervals['pred_elev'] = np.interp( nwm_recurr_intervals.discharge_cfs.values, @@ -1299,6 +1299,7 @@ def evaluate_results(sierra_results=[], labels=[], save_location=''): ) # using WARNING level to avoid benign? info messages ("Failed to auto identify EPSG: 7") format = ' %(message)s' log_dt_string = start_time.strftime("%Y_%m_%d-%H_%M_%S") + os.makedirs(output_dir, exist_ok=True) handlers = [ logging.FileHandler(os.path.join(output_dir, f'rating_curve_comparison_{log_dt_string}.log')), logging.StreamHandler(), From 2dbc95093422390533075a44c0f4fa850dbc3356 Mon Sep 17 00:00:00 2001 From: ZahraGhahremani Date: Tue, 31 Dec 2024 23:10:59 +0000 Subject: [PATCH 2/9] modified script to work based on ratio --- tools/rating_curve_comparison.py | 57 +++++++++++++++++++++++++++++--- 1 file changed, 53 insertions(+), 4 deletions(-) diff --git a/tools/rating_curve_comparison.py b/tools/rating_curve_comparison.py index ca263d0f..a732e8a7 100755 --- a/tools/rating_curve_comparison.py +++ b/tools/rating_curve_comparison.py @@ -255,7 +255,7 @@ def generate_rating_curve_metrics(args): str_order = np.unique(usgs_rc.order_).item() feature_id = str(gage.feature_id) - usgs_pred_elev = get_reccur_intervals(usgs_rc, usgs_crosswalk, nwm_recurr_intervals_all) + usgs_pred_elev, feature_index = get_reccur_intervals(usgs_rc, usgs_crosswalk, nwm_recurr_intervals_all) # Handle sites missing data if len(usgs_pred_elev) < 1: @@ -282,7 +282,8 @@ def generate_rating_curve_metrics(args): ) continue - fim_pred_elev = get_reccur_intervals(fim_rc, usgs_crosswalk, nwm_recurr_intervals_all) + if feature_index is not None: + fim_pred_elev = get_reccur_intervals_fim(fim_rc, usgs_crosswalk, nwm_recurr_intervals_all, feature_index) # Handle sites missing data if len(fim_pred_elev) < 1: @@ -942,14 +943,14 @@ def generate_rc_and_rem_plots(rc, plot_filename, recurr_data_table, branches_fol plt.close() -def get_reccur_intervals(site_rc, usgs_crosswalk, nwm_recurr_intervals): +def get_reccur_intervals_fim(site_rc, usgs_crosswalk, nwm_recurr_intervals, feature_index): usgs_site = site_rc.merge(usgs_crosswalk, on="location_id") nwm_ids = len(usgs_site.feature_id.drop_duplicates()) if nwm_ids > 0: try: nwm_recurr_intervals = nwm_recurr_intervals.copy().loc[ - nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().loc[0] + nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().loc[feature_index] ] nwm_recurr_intervals['pred_elev'] = np.interp( nwm_recurr_intervals.discharge_cfs.values, @@ -970,6 +971,54 @@ def get_reccur_intervals(site_rc, usgs_crosswalk, nwm_recurr_intervals): else: return [] + +def get_reccur_intervals(site_rc, usgs_crosswalk, nwm_recurr_intervals): + usgs_site = site_rc.merge(usgs_crosswalk, on="location_id") + nwm_ids = len(usgs_site.feature_id.drop_duplicates()) + min_discharge = site_rc.loc[(site_rc.source == 'USGS')].discharge_cfs.min() + max_discharge = site_rc.loc[(site_rc.source == 'USGS')].discharge_cfs.max() + discharge_range = max_discharge - min_discharge + + if nwm_ids > 0: + try: + filtered = nwm_recurr_intervals.copy().loc[ + nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().iloc[0] + ] + min_q_recurr = filtered.discharge_cfs.min() + max_q_recurr = filtered.discharge_cfs.max() + spread_q = max_q_recurr - min_q_recurr + ratio = spread_q / discharge_range + # If there is only one feature_id for each location_id + if nwm_ids == 1: + nwm_recurr_intervals = filtered + feature_index = 0 + + # If there is more one feature_id for each location_id + else: + if ratio > 0.1: + nwm_recurr_intervals = filtered + feature_index = 0 + else: + nwm_recurr_intervals = nwm_recurr_intervals.copy().loc[ + nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().iloc[1] + ] + feature_index = 1 + nwm_recurr_intervals['pred_elev'] = np.interp( + nwm_recurr_intervals.discharge_cfs.values, + usgs_site['discharge_cfs'], + usgs_site['elevation_ft'], + left=np.nan, + right=np.nan, + ) + + return nwm_recurr_intervals, feature_index + except Exception as ex: + summary = traceback.StackSummary.extract(traceback.walk_stack(None)) + print(summary, repr(ex)) + return [] + + else: + return [] def calculate_rc_stats_elev(rc, stat_groups=None): From 75c76e45fe847fc266521a59e42bf076091810fd Mon Sep 17 00:00:00 2001 From: ZahraGhahremani Date: Thu, 2 Jan 2025 16:31:53 +0000 Subject: [PATCH 3/9] updating CHANGELOG --- docs/CHANGELOG.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 4e2da186..e167059b 100755 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -1,6 +1,17 @@ All notable changes to this project will be documented in this file. We follow the [Semantic Versioning 2.0.0](http://semver.org/) format. +## v4.5.x.x - 2025-01-02 - [PR#1388]https://github.com/NOAA-OWP/inundation-mapping/pull/1388) + +Fixed Sierra test bugs to draw the vertical lines. + +### Changes + +- `tools/rating_curve_comparison.py` + +

+ + ## v4.5.13.1 - 2024-12-13 - [PR#1361](https://github.com/NOAA-OWP/inundation-mapping/pull/1361) This PR was triggered by two dep-bot PR's. One for Tornado, one for aiohttp. Upon further research, these two exist only as dependencies for Jupyter and Jupyterlab which were very out of date. Upgrading Jupyter/JupyterLab took care of the other two. From 5a641958c69af7484b0e6819c77fdbc2f4083c3f Mon Sep 17 00:00:00 2001 From: Zahra Ghahremani <1632979513113305@mil> Date: Tue, 7 Jan 2025 11:53:54 -0700 Subject: [PATCH 4/9] applied linting --- tools/rating_curve_comparison.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/tools/rating_curve_comparison.py b/tools/rating_curve_comparison.py index a732e8a7..02e25768 100755 --- a/tools/rating_curve_comparison.py +++ b/tools/rating_curve_comparison.py @@ -255,7 +255,9 @@ def generate_rating_curve_metrics(args): str_order = np.unique(usgs_rc.order_).item() feature_id = str(gage.feature_id) - usgs_pred_elev, feature_index = get_reccur_intervals(usgs_rc, usgs_crosswalk, nwm_recurr_intervals_all) + usgs_pred_elev, feature_index = get_reccur_intervals( + usgs_rc, usgs_crosswalk, nwm_recurr_intervals_all + ) # Handle sites missing data if len(usgs_pred_elev) < 1: @@ -283,7 +285,9 @@ def generate_rating_curve_metrics(args): continue if feature_index is not None: - fim_pred_elev = get_reccur_intervals_fim(fim_rc, usgs_crosswalk, nwm_recurr_intervals_all, feature_index) + fim_pred_elev = get_reccur_intervals_fim( + fim_rc, usgs_crosswalk, nwm_recurr_intervals_all, feature_index + ) # Handle sites missing data if len(fim_pred_elev) < 1: @@ -971,7 +975,8 @@ def get_reccur_intervals_fim(site_rc, usgs_crosswalk, nwm_recurr_intervals, feat else: return [] - + + def get_reccur_intervals(site_rc, usgs_crosswalk, nwm_recurr_intervals): usgs_site = site_rc.merge(usgs_crosswalk, on="location_id") nwm_ids = len(usgs_site.feature_id.drop_duplicates()) @@ -982,8 +987,8 @@ def get_reccur_intervals(site_rc, usgs_crosswalk, nwm_recurr_intervals): if nwm_ids > 0: try: filtered = nwm_recurr_intervals.copy().loc[ - nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().iloc[0] - ] + nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().iloc[0] + ] min_q_recurr = filtered.discharge_cfs.min() max_q_recurr = filtered.discharge_cfs.max() spread_q = max_q_recurr - min_q_recurr @@ -992,7 +997,7 @@ def get_reccur_intervals(site_rc, usgs_crosswalk, nwm_recurr_intervals): if nwm_ids == 1: nwm_recurr_intervals = filtered feature_index = 0 - + # If there is more one feature_id for each location_id else: if ratio > 0.1: @@ -1001,7 +1006,7 @@ def get_reccur_intervals(site_rc, usgs_crosswalk, nwm_recurr_intervals): else: nwm_recurr_intervals = nwm_recurr_intervals.copy().loc[ nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().iloc[1] - ] + ] feature_index = 1 nwm_recurr_intervals['pred_elev'] = np.interp( nwm_recurr_intervals.discharge_cfs.values, From 7dc22d382e29907b5b17e2dd10887595338c67f3 Mon Sep 17 00:00:00 2001 From: ZahraGhahremani Date: Tue, 14 Jan 2025 16:27:54 +0000 Subject: [PATCH 5/9] addressed reviewer comments --- tools/rating_curve_comparison.py | 129 +++++++++++++++++-------------- 1 file changed, 73 insertions(+), 56 deletions(-) diff --git a/tools/rating_curve_comparison.py b/tools/rating_curve_comparison.py index 02e25768..0bc1d36a 100755 --- a/tools/rating_curve_comparison.py +++ b/tools/rating_curve_comparison.py @@ -255,7 +255,7 @@ def generate_rating_curve_metrics(args): str_order = np.unique(usgs_rc.order_).item() feature_id = str(gage.feature_id) - usgs_pred_elev, feature_index = get_reccur_intervals( + usgs_pred_elev, feature_index = get_recurr_intervals( usgs_rc, usgs_crosswalk, nwm_recurr_intervals_all ) @@ -285,7 +285,7 @@ def generate_rating_curve_metrics(args): continue if feature_index is not None: - fim_pred_elev = get_reccur_intervals_fim( + fim_pred_elev, feature_index = get_recurr_intervals( fim_rc, usgs_crosswalk, nwm_recurr_intervals_all, feature_index ) @@ -599,6 +599,7 @@ def generate_single_plot(rc, plot_filename, recurr_data_table): def generate_facet_plot(rc, plot_filename, recurr_data_table): # Filter FIM elevation based on USGS data + gage_max_q = {} for gage in rc.location_id.unique(): # print(recurr_data_table.head) try: @@ -606,6 +607,7 @@ def generate_facet_plot(rc, plot_filename, recurr_data_table): max_elev = rc.loc[(rc.location_id == gage) & (rc.source == 'USGS')].elevation_ft.max() min_q = rc.loc[(rc.location_id == gage) & (rc.source == 'USGS')].discharge_cfs.min() max_q = rc.loc[(rc.location_id == gage) & (rc.source == 'USGS')].discharge_cfs.max() + gage_max_q[gage] = max_q ri100 = recurr_data_table[ (recurr_data_table.location_id == gage) & (recurr_data_table.source == 'FIM') ].discharge_cfs.max() @@ -715,6 +717,7 @@ def generate_facet_plot(rc, plot_filename, recurr_data_table): ## Plot recurrence intervals axes = g.axes_dict + recurr_data_max = {} for gage in axes: ax = axes[gage] plt.sca(ax) @@ -722,6 +725,8 @@ def generate_facet_plot(rc, plot_filename, recurr_data_table): recurr_data = recurr_data_table[ (recurr_data_table.location_id == gage) & (recurr_data_table.source == 'FIM') ].filter(items=['recurr_interval', 'discharge_cfs']) + recurr_q_max = recurr_data['discharge_cfs'].max() + recurr_data_max[gage] = recurr_q_max for i, r in recurr_data.iterrows(): if not r.recurr_interval.isnumeric(): continue # skip catfim flows @@ -741,6 +746,22 @@ def generate_facet_plot(rc, plot_filename, recurr_data_table): logging.info("WARNING: Could not plot recurrence intervals...") logging.info(f'Summary: {summary} \n Exception: \n {repr(ex)}') + padding = 0.05 + for gage in g.axes_dict: + ax = g.axes_dict[gage] + max_q = gage_max_q.get(gage, None) + recurr_q_max = recurr_data_max.get(gage, None) + if max_q is not None and not np.isnan(max_q): + if max_q > recurr_q_max: + max_x = max_q + else: + max_x = recurr_q_max + (0.001 * recurr_q_max) # To make sure vertical lines are displayed in the plot + # For gages without USGS rating curve data + else: + max_x = rc.discharge_cfs.max() + padding_value = max_x * padding + ax.set_xlim(0 - padding_value ,max_x) + # Adjust the arrangement of the plots g.fig.tight_layout(w_pad=1) g.add_legend() @@ -947,67 +968,63 @@ def generate_rc_and_rem_plots(rc, plot_filename, recurr_data_table, branches_fol plt.close() -def get_reccur_intervals_fim(site_rc, usgs_crosswalk, nwm_recurr_intervals, feature_index): +# def get_recurr_intervals_fim(site_rc, usgs_crosswalk, nwm_recurr_intervals, feature_index): +# usgs_site = site_rc.merge(usgs_crosswalk, on="location_id") +# nwm_ids = len(usgs_site.feature_id.drop_duplicates()) + +# if nwm_ids > 0: +# try: +# nwm_recurr_intervals = nwm_recurr_intervals.copy().loc[ +# nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().loc[feature_index] +# ] +# nwm_recurr_intervals['pred_elev'] = np.interp( +# nwm_recurr_intervals.discharge_cfs.values, +# usgs_site['discharge_cfs'], +# usgs_site['elevation_ft'], +# left=np.nan, +# right=np.nan, +# ) + +# return nwm_recurr_intervals +# except Exception as ex: +# summary = traceback.StackSummary.extract(traceback.walk_stack(None)) +# # logging.info("WARNING: get_recurr_intervals failed for some reason....") +# # logging.info(f"*** {ex}") +# # logging.info(''.join(summary.format())) +# print(summary, repr(ex)) +# return [] + +# else: +# return [] + + +def get_recurr_intervals(site_rc, usgs_crosswalk, nwm_recurr_intervals, feature_index=None): usgs_site = site_rc.merge(usgs_crosswalk, on="location_id") nwm_ids = len(usgs_site.feature_id.drop_duplicates()) - if nwm_ids > 0: - try: - nwm_recurr_intervals = nwm_recurr_intervals.copy().loc[ - nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().loc[feature_index] - ] - nwm_recurr_intervals['pred_elev'] = np.interp( - nwm_recurr_intervals.discharge_cfs.values, - usgs_site['discharge_cfs'], - usgs_site['elevation_ft'], - left=np.nan, - right=np.nan, - ) - - return nwm_recurr_intervals - except Exception as ex: - summary = traceback.StackSummary.extract(traceback.walk_stack(None)) - # logging.info("WARNING: get_recurr_intervals failed for some reason....") - # logging.info(f"*** {ex}") - # logging.info(''.join(summary.format())) - print(summary, repr(ex)) - return [] - - else: - return [] - - -def get_reccur_intervals(site_rc, usgs_crosswalk, nwm_recurr_intervals): - usgs_site = site_rc.merge(usgs_crosswalk, on="location_id") - nwm_ids = len(usgs_site.feature_id.drop_duplicates()) - min_discharge = site_rc.loc[(site_rc.source == 'USGS')].discharge_cfs.min() - max_discharge = site_rc.loc[(site_rc.source == 'USGS')].discharge_cfs.max() - discharge_range = max_discharge - min_discharge if nwm_ids > 0: try: - filtered = nwm_recurr_intervals.copy().loc[ - nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().iloc[0] - ] - min_q_recurr = filtered.discharge_cfs.min() - max_q_recurr = filtered.discharge_cfs.max() - spread_q = max_q_recurr - min_q_recurr - ratio = spread_q / discharge_range - # If there is only one feature_id for each location_id - if nwm_ids == 1: - nwm_recurr_intervals = filtered - feature_index = 0 - - # If there is more one feature_id for each location_id - else: - if ratio > 0.1: - nwm_recurr_intervals = filtered + if feature_index is None: + min_discharge = site_rc.loc[(site_rc.source == 'USGS')].discharge_cfs.min() + max_discharge = site_rc.loc[(site_rc.source == 'USGS')].discharge_cfs.max() + discharge_range = max_discharge - min_discharge + filtered = nwm_recurr_intervals.copy().loc[ + nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().iloc[0] + ] + min_q_recurr = filtered.discharge_cfs.min() + max_q_recurr = filtered.discharge_cfs.max() + spread_q = max_q_recurr - min_q_recurr + ratio = spread_q / discharge_range + # If there is only one feature_id for each location_id or the ratio is large enough + if nwm_ids == 1 or ratio > 0.1: feature_index = 0 + # If there is more one feature_id for each location_id and the ratio is not large enough else: - nwm_recurr_intervals = nwm_recurr_intervals.copy().loc[ - nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().iloc[1] - ] feature_index = 1 + nwm_recurr_intervals = nwm_recurr_intervals.copy().loc[ + nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().iloc[feature_index] + ] nwm_recurr_intervals['pred_elev'] = np.interp( nwm_recurr_intervals.discharge_cfs.values, usgs_site['discharge_cfs'], @@ -1020,10 +1037,10 @@ def get_reccur_intervals(site_rc, usgs_crosswalk, nwm_recurr_intervals): except Exception as ex: summary = traceback.StackSummary.extract(traceback.walk_stack(None)) print(summary, repr(ex)) - return [] + return [], None else: - return [] + return [], None def calculate_rc_stats_elev(rc, stat_groups=None): From 40fa68f98fb227db4996006819a824904758683e Mon Sep 17 00:00:00 2001 From: ZahraGhahremani Date: Tue, 14 Jan 2025 16:31:37 +0000 Subject: [PATCH 6/9] linting applied --- tools/rating_curve_comparison.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/tools/rating_curve_comparison.py b/tools/rating_curve_comparison.py index 0bc1d36a..22afccb8 100755 --- a/tools/rating_curve_comparison.py +++ b/tools/rating_curve_comparison.py @@ -752,16 +752,18 @@ def generate_facet_plot(rc, plot_filename, recurr_data_table): max_q = gage_max_q.get(gage, None) recurr_q_max = recurr_data_max.get(gage, None) if max_q is not None and not np.isnan(max_q): - if max_q > recurr_q_max: + if max_q > recurr_q_max: max_x = max_q else: - max_x = recurr_q_max + (0.001 * recurr_q_max) # To make sure vertical lines are displayed in the plot + max_x = recurr_q_max + ( + 0.001 * recurr_q_max + ) # To make sure vertical lines are displayed in the plot # For gages without USGS rating curve data else: max_x = rc.discharge_cfs.max() padding_value = max_x * padding - ax.set_xlim(0 - padding_value ,max_x) - + ax.set_xlim(0 - padding_value, max_x) + # Adjust the arrangement of the plots g.fig.tight_layout(w_pad=1) g.add_legend() @@ -1002,7 +1004,6 @@ def get_recurr_intervals(site_rc, usgs_crosswalk, nwm_recurr_intervals, feature_ usgs_site = site_rc.merge(usgs_crosswalk, on="location_id") nwm_ids = len(usgs_site.feature_id.drop_duplicates()) - if nwm_ids > 0: try: if feature_index is None: @@ -1024,7 +1025,7 @@ def get_recurr_intervals(site_rc, usgs_crosswalk, nwm_recurr_intervals, feature_ feature_index = 1 nwm_recurr_intervals = nwm_recurr_intervals.copy().loc[ nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().iloc[feature_index] - ] + ] nwm_recurr_intervals['pred_elev'] = np.interp( nwm_recurr_intervals.discharge_cfs.values, usgs_site['discharge_cfs'], From cf5eee824d109779b4e881d7d216d9f1e7af41ef Mon Sep 17 00:00:00 2001 From: ZahraGhahremani Date: Tue, 14 Jan 2025 19:18:01 +0000 Subject: [PATCH 7/9] minor changes --- tools/rating_curve_comparison.py | 52 -------------------------------- 1 file changed, 52 deletions(-) diff --git a/tools/rating_curve_comparison.py b/tools/rating_curve_comparison.py index 22afccb8..e5a83a15 100755 --- a/tools/rating_curve_comparison.py +++ b/tools/rating_curve_comparison.py @@ -599,7 +599,6 @@ def generate_single_plot(rc, plot_filename, recurr_data_table): def generate_facet_plot(rc, plot_filename, recurr_data_table): # Filter FIM elevation based on USGS data - gage_max_q = {} for gage in rc.location_id.unique(): # print(recurr_data_table.head) try: @@ -607,7 +606,6 @@ def generate_facet_plot(rc, plot_filename, recurr_data_table): max_elev = rc.loc[(rc.location_id == gage) & (rc.source == 'USGS')].elevation_ft.max() min_q = rc.loc[(rc.location_id == gage) & (rc.source == 'USGS')].discharge_cfs.min() max_q = rc.loc[(rc.location_id == gage) & (rc.source == 'USGS')].discharge_cfs.max() - gage_max_q[gage] = max_q ri100 = recurr_data_table[ (recurr_data_table.location_id == gage) & (recurr_data_table.source == 'FIM') ].discharge_cfs.max() @@ -717,7 +715,6 @@ def generate_facet_plot(rc, plot_filename, recurr_data_table): ## Plot recurrence intervals axes = g.axes_dict - recurr_data_max = {} for gage in axes: ax = axes[gage] plt.sca(ax) @@ -726,7 +723,6 @@ def generate_facet_plot(rc, plot_filename, recurr_data_table): (recurr_data_table.location_id == gage) & (recurr_data_table.source == 'FIM') ].filter(items=['recurr_interval', 'discharge_cfs']) recurr_q_max = recurr_data['discharge_cfs'].max() - recurr_data_max[gage] = recurr_q_max for i, r in recurr_data.iterrows(): if not r.recurr_interval.isnumeric(): continue # skip catfim flows @@ -746,24 +742,6 @@ def generate_facet_plot(rc, plot_filename, recurr_data_table): logging.info("WARNING: Could not plot recurrence intervals...") logging.info(f'Summary: {summary} \n Exception: \n {repr(ex)}') - padding = 0.05 - for gage in g.axes_dict: - ax = g.axes_dict[gage] - max_q = gage_max_q.get(gage, None) - recurr_q_max = recurr_data_max.get(gage, None) - if max_q is not None and not np.isnan(max_q): - if max_q > recurr_q_max: - max_x = max_q - else: - max_x = recurr_q_max + ( - 0.001 * recurr_q_max - ) # To make sure vertical lines are displayed in the plot - # For gages without USGS rating curve data - else: - max_x = rc.discharge_cfs.max() - padding_value = max_x * padding - ax.set_xlim(0 - padding_value, max_x) - # Adjust the arrangement of the plots g.fig.tight_layout(w_pad=1) g.add_legend() @@ -970,36 +948,6 @@ def generate_rc_and_rem_plots(rc, plot_filename, recurr_data_table, branches_fol plt.close() -# def get_recurr_intervals_fim(site_rc, usgs_crosswalk, nwm_recurr_intervals, feature_index): -# usgs_site = site_rc.merge(usgs_crosswalk, on="location_id") -# nwm_ids = len(usgs_site.feature_id.drop_duplicates()) - -# if nwm_ids > 0: -# try: -# nwm_recurr_intervals = nwm_recurr_intervals.copy().loc[ -# nwm_recurr_intervals.feature_id == usgs_site.feature_id.drop_duplicates().loc[feature_index] -# ] -# nwm_recurr_intervals['pred_elev'] = np.interp( -# nwm_recurr_intervals.discharge_cfs.values, -# usgs_site['discharge_cfs'], -# usgs_site['elevation_ft'], -# left=np.nan, -# right=np.nan, -# ) - -# return nwm_recurr_intervals -# except Exception as ex: -# summary = traceback.StackSummary.extract(traceback.walk_stack(None)) -# # logging.info("WARNING: get_recurr_intervals failed for some reason....") -# # logging.info(f"*** {ex}") -# # logging.info(''.join(summary.format())) -# print(summary, repr(ex)) -# return [] - -# else: -# return [] - - def get_recurr_intervals(site_rc, usgs_crosswalk, nwm_recurr_intervals, feature_index=None): usgs_site = site_rc.merge(usgs_crosswalk, on="location_id") nwm_ids = len(usgs_site.feature_id.drop_duplicates()) From ba5c730e343e17692fbee4dfe2d10c3f53f28d11 Mon Sep 17 00:00:00 2001 From: ZahraGhahremani Date: Tue, 14 Jan 2025 19:29:42 +0000 Subject: [PATCH 8/9] changed changelog --- docs/CHANGELOG.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 51ecc86e..8e1fd9cc 100755 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -7,7 +7,10 @@ Fixed Sierra test bugs to draw the vertical lines. ### Changes -- `tools/rating_curve_comparison.py` +- `tools/rating_curve_comparison.py`: Modified the script to make sure vertical lines are displayed + +

+ ## v4.5.13.7 - 2025-01-10 - [PR#1379](https://github.com/NOAA-OWP/inundation-mapping/pull/1379) There are many sites in non-CONUS regions (AK, PR, HI) where we would like to run CatFIM but they are being excluded because they are not NWM forecast points. This update brings back the double API pull and adds in some code to filter out duplicate (and NULL) lids from the metadata lists. From 639a3de893871ffa730401bedbb450c5587c67ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CRobHanna-NOAA=E2=80=9D?= <“Robert.Hanna@NOAA.gov”> Date: Fri, 24 Jan 2025 20:13:57 +0000 Subject: [PATCH 9/9] update changelog --- docs/CHANGELOG.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index cc025a6e..e44c4dcf 100755 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -1,13 +1,17 @@ All notable changes to this project will be documented in this file. We follow the [Semantic Versioning 2.0.0](http://semver.org/) format. -## v4.5.x.x - 2025-01-02 - [PR#1388]https://github.com/NOAA-OWP/inundation-mapping/pull/1388) +## v4.5.3.10 - 2025-01-24 - [PR#1388]https://github.com/NOAA-OWP/inundation-mapping/pull/1388) Fixed Sierra test bugs to draw the vertical lines. ### Changes - `tools/rating_curve_comparison.py`: Modified the script to make sure vertical lines are displayed + +

+ + ## v4.5.13.9 - 2025-01-24 - [PR#1399](https://github.com/NOAA-OWP/inundation-mapping/pull/1399) This update improves stage-based CatFIM by detecting and correcting instances where the stage value provided in the WRDS database is actually stage + elevation (which is actually water surface elevation and, uncaught, causes overflooding).