Skip to content

Commit 649faec

Browse files
authored
Merge pull request #1067 from MetOffice/1030_collapse_by_validity_time
Collapse by validity time
2 parents a54f24c + de1d671 commit 649faec

File tree

2 files changed

+115
-0
lines changed

2 files changed

+115
-0
lines changed

src/CSET/operators/collapse.py

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -183,6 +183,80 @@ def collapse_by_hour_of_day(
183183
return collapsed_cube
184184

185185

186+
def collapse_by_validity_time(
187+
cube: iris.cube.Cube | iris.cube.CubeList,
188+
method: str,
189+
additional_percent: float = None,
190+
**kwargs,
191+
) -> iris.cube.Cube:
192+
"""Collapse a cube around validity time for multiple cases.
193+
194+
First checks if the data can be aggregated easily. Then creates a new cube
195+
by slicing over the time dimensions, removing the time dimensions,
196+
re-merging the data, and creating a new time coordinate. It then collapses
197+
by the new time coordinate for a specified method using the collapse
198+
function.
199+
200+
Arguments
201+
---------
202+
cube: iris.cube.Cube | iris.cube.CubeList
203+
Cube to collapse by validity time or CubeList that will be converted
204+
to a cube before collapsing by validity time.
205+
method: str
206+
Type of collapse i.e. method: 'MEAN', 'MAX', 'MIN', 'MEDIAN',
207+
'PERCENTILE'. For 'PERCENTILE' the additional_percent must be specified.
208+
209+
Returns
210+
-------
211+
cube: iris.cube.Cube
212+
Single variable collapsed by lead time based on chosen method.
213+
214+
Raises
215+
------
216+
ValueError
217+
If additional_percent wasn't supplied while using PERCENTILE method.
218+
"""
219+
if method == "PERCENTILE" and additional_percent is None:
220+
raise ValueError("Must specify additional_percent")
221+
# Ensure the cube can be aggregated over multiple times.
222+
cube_to_collapse = ensure_aggregatable_across_cases(cube)
223+
# Convert to a cube that is split by validity time.
224+
# Slice over cube by both time dimensions to create a CubeList.
225+
new_cubelist = iris.cube.CubeList(
226+
cube_to_collapse.slices_over(["forecast_period", "forecast_reference_time"])
227+
)
228+
# Remove forecast_period and forecast_reference_time coordinates.
229+
for sub_cube in new_cubelist:
230+
sub_cube.remove_coord("forecast_period")
231+
sub_cube.remove_coord("forecast_reference_time")
232+
# Create new CubeList by merging with unique = False to produce a validity
233+
# time cube.
234+
merged_list_1 = new_cubelist.merge(unique=False)
235+
# Create a new "fake" coordinate and apply to each remaining cube to allow
236+
# final merging to take place into a single cube.
237+
equalised_validity_time = iris.coords.AuxCoord(
238+
points=0, long_name="equalised_validity_time", units="1"
239+
)
240+
for sub_cube, eq_valid_time in zip(
241+
merged_list_1, range(len(merged_list_1)), strict=True
242+
):
243+
sub_cube.add_aux_coord(equalised_validity_time.copy(points=eq_valid_time))
244+
245+
# Merge CubeList to create final cube.
246+
final_cube = merged_list_1.merge_cube()
247+
# Collapse over fake_time_coord to represent collapsing over validity time.
248+
if method == "PERCENTILE":
249+
collapsed_cube = collapse(
250+
final_cube,
251+
"equalised_validity_time",
252+
method,
253+
additional_percent=additional_percent,
254+
)
255+
else:
256+
collapsed_cube = collapse(final_cube, "equalised_validity_time", method)
257+
return collapsed_cube
258+
259+
186260
# TODO
187261
# Collapse function that calculates means, medians etc across members of an
188262
# ensemble or stratified groups. Need to allow collapse over realisation

tests/operators/test_collapse.py

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -160,3 +160,44 @@ def test_collapse_by_lead_time_cube_list_percentile(
160160
rtol=1e-06,
161161
atol=1e-02,
162162
)
163+
164+
165+
def test_collapse_by_validity_time(long_forecast_multi_day):
166+
"""Reduce a dimension of a cube by validity time."""
167+
collapsed_cube = collapse.collapse_by_validity_time(long_forecast_multi_day, "MEAN")
168+
expected_cube = "<iris 'Cube' of air_temperature / (K) (time: 145; grid_latitude: 3; grid_longitude: 3)>"
169+
assert repr(collapsed_cube) == expected_cube
170+
171+
172+
def test_collapse_by_validity_time_cubelist(long_forecast_many_cubes):
173+
"""Convert to cube and reduce a dimension by validity time."""
174+
collapsed_cube = collapse.collapse_by_validity_time(
175+
long_forecast_many_cubes, "MEAN"
176+
)
177+
expected_cube = "<iris 'Cube' of air_temperature / (K) (time: 145; grid_latitude: 3; grid_longitude: 3)>"
178+
assert repr(collapsed_cube) == expected_cube
179+
180+
181+
def test_collapse_by_validity_time_percentile(long_forecast_multi_day):
182+
"""Reduce by validity time with percentiles."""
183+
# Test successful collapsing by validity time.
184+
collapsed_cube = collapse.collapse_by_validity_time(
185+
long_forecast_multi_day, "PERCENTILE", additional_percent=[25, 75]
186+
)
187+
expected_cube = "<iris 'Cube' of air_temperature / (K) (percentile_over_equalised_validity_time: 2; time: 145; grid_latitude: 3; grid_longitude: 3)>"
188+
assert repr(collapsed_cube) == expected_cube
189+
190+
191+
def test_collapse_by_validity_time_percentile_fail(long_forecast_multi_day):
192+
"""Test not specifying additional percent fails."""
193+
with pytest.raises(ValueError):
194+
collapse.collapse_by_validity_time(long_forecast_multi_day, "PERCENTILE")
195+
196+
197+
def test_collapse_by_validity_time_cubelist_percentile(long_forecast_many_cubes):
198+
"""Convert to cube and reduce by validity time with percentiles."""
199+
collapsed_cube = collapse.collapse_by_validity_time(
200+
long_forecast_many_cubes, "PERCENTILE", additional_percent=[25, 75]
201+
)
202+
expected_cube = "<iris 'Cube' of air_temperature / (K) (percentile_over_equalised_validity_time: 2; time: 145; grid_latitude: 3; grid_longitude: 3)>"
203+
assert repr(collapsed_cube) == expected_cube

0 commit comments

Comments
 (0)