Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix LightningTimeCompositor failing when data outside of the time range is passed #3057

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 15 additions & 17 deletions satpy/composites/lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,11 @@


class LightningTimeCompositor(CompositeBase):
"""Class used to create the flash_age compositor usefull for lighting event visualisation.
"""Compositor class for lightning visualisation based on time.

The datas used are dates related to the lightning event that should be normalised between
0 and 1. The value 1 corresponds to the latest lightning event and the value 0 corresponds
to the latest lightning event - time_range. The time_range is defined in the satpy/etc/composites/li.yaml
The compositor normalises the lightning event times between 0 and 1.
The value 1 corresponds to the latest lightning event and the value 0 corresponds
to the latest lightning event - time_range. The time_range is defined in the composite recipe
and is in minutes.
"""
def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwargs):
Expand All @@ -49,7 +49,8 @@ def _normalize_time(self, data:xr.DataArray, attrs:dict) -> xr.DataArray:

The range of the normalised data is between 0 and 1 where 0 corresponds to the date end_time - time_range
and 1 to the end_time. Where end_times represent the latest lightning event and time_range is the range of
time you want to see the event.The dates that are earlier to end_time - time_range are removed.
time in minutes visualised in the composite.
The dates that are earlier to end_time - time_range are set to NaN.

Args:
data (xr.DataArray): datas containing dates to be normalised
Expand All @@ -62,16 +63,14 @@ def _normalize_time(self, data:xr.DataArray, attrs:dict) -> xr.DataArray:
end_time = np.array(np.datetime64(data.attrs[self.reference_time_attr]))
# Compute the minimum time value based on the time range
begin_time = end_time - np.timedelta64(self.time_range, "m")
# Drop values that are bellow begin_time
# Invalidate values that are before begin_time
condition_time = data >= begin_time
condition_time_computed = condition_time.compute()
data = data.where(condition_time_computed, drop=True)
# exit if data is empty afer filtering
if data.size == 0 :
LOG.error(f"All the flash_age events happened before {begin_time}")
raise ValueError(f"Invalid data: data size is zero. All flash_age "
f"events occurred before the specified start time ({begin_time})."
)
data = data.where(condition_time)

# raise a warning if data is empty after filtering
if np.all(np.isnan(data)) :
LOG.warning(f"All the flash_age events happened before {begin_time}, the composite will be empty.")

# Normalize the time values
normalized_data = (data - begin_time) / (end_time - begin_time)
# Ensure the result is still an xarray.DataArray
Expand All @@ -91,15 +90,14 @@ def _redefine_metadata(self,attrs:dict)->dict:
attrs (dict): data's attributes

Returns:
dict: atualised attributes
dict: updated attributes
"""
attrs["name"] = self.standard_name
attrs["standard_name"] = self.standard_name
# Attributes to describe the values range
return attrs


def __call__(self,projectables, nonprojectables=None, **attrs):
def __call__(self, projectables, nonprojectables=None, **attrs):
"""Normalise the dates."""
data = projectables[0]
new_attrs = data.attrs.copy()
Expand Down
57 changes: 25 additions & 32 deletions satpy/tests/compositor_tests/test_lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@

import dask.array as da
import numpy as np
import pytest
import xarray as xr

from satpy.composites.lightning import LightningTimeCompositor
Expand All @@ -32,7 +31,7 @@
def test_flash_age_compositor():
"""Test the flash_age compsitor by comparing two xarrays object."""
comp = LightningTimeCompositor("flash_age",prerequisites=["flash_time"],
standard_name="ligtning_time",
standard_name="lightning_time",
time_range=60,
reference_time="end_time")
attrs_flash_age = {"variable_name": "flash_time","name": "flash_time",
Expand All @@ -41,53 +40,47 @@ def test_flash_age_compositor():
flash_age_value = da.array(["2024-08-01T09:00:00",
"2024-08-01T10:00:00", "2024-08-01T10:30:00","2024-08-01T11:00:00"], dtype="datetime64[ns]")
flash_age = xr.DataArray(
flash_age_value,
dims=["y"],
coords={
"crs": "8B +proj=longlat +ellps=WGS84 +type=crs"
},attrs = attrs_flash_age,name="flash_time")
flash_age_value,
dims=["y"],
coords={
"crs": "8B +proj=longlat +ellps=WGS84 +type=crs"},
attrs = attrs_flash_age,
name="flash_time")
res = comp([flash_age])
expected_attrs = {"variable_name": "flash_time","name": "lightning_time",
"start_time": datetime.datetime(2024, 8, 1, 10, 50, 0),
"end_time": datetime.datetime(2024, 8, 1, 11, 0, 0),"reader": "li_l2_nc",
"standard_name": "ligtning_time"
"standard_name": "lightning_time"
}
expected_array = xr.DataArray(
da.array([0.0,0.5,1.0]),
dims=["y"],
coords={
"crs": "8B +proj=longlat +ellps=WGS84 +type=crs"
},attrs = expected_attrs,name="flash_time")
expected_array = xr.DataArray(da.array([np.nan, 0.0,0.5,1.0]),
dims=["y"],
coords={
"crs": "8B +proj=longlat +ellps=WGS84 +type=crs"},
attrs = expected_attrs,
name="flash_time")
xr.testing.assert_equal(res,expected_array)

def test_empty_array_error(caplog):
"""Test when the filtered array is empty."""
comp = LightningTimeCompositor("flash_age",prerequisites=["flash_time"],
standard_name="ligtning_time",
standard_name="lightning_time",
time_range=60,
reference_time="end_time")
attrs_flash_age = {"variable_name": "flash_time","name": "flash_time",
"start_time": np.datetime64(datetime.datetime(2024, 8, 1, 10, 0, 0)),
"end_time": datetime.datetime(2024, 8, 1, 11, 0, 0),
"reader": "li_l2_nc"}
flash_age_value = da.array(["2024-08-01T09:00:00"], dtype="datetime64[ns]")
flash_age = xr.DataArray(
flash_age_value,
dims=["y"],
coords={
"crs": "8B +proj=longlat +ellps=WGS84 +type=crs"
},attrs = attrs_flash_age,name="flash_time")
with caplog.at_level(logging.ERROR):
# Simulate the operation that raises the exception
with pytest.raises(ValueError, match="data size is zero") as excinfo:
_ = comp([flash_age])

# Assert the exception message
assert str(excinfo.value) == (
f"Invalid data: data size is zero. All flash_age events occurred before "
f"the specified start time ({attrs_flash_age['start_time']})."
)
assert "All the flash_age events happened before 2024-08-01T10:00:00" in caplog.text
flash_age = xr.DataArray(flash_age_value,
dims=["y"],
coords={
"crs": "8B +proj=longlat +ellps=WGS84 +type=crs"},
attrs = attrs_flash_age,
name="flash_time")
with caplog.at_level(logging.WARNING):
_ = comp([flash_age])
# Assert that the log contains the expected warning message
assert "All the flash_age events happened before" in caplog.text

def test_update_missing_metadata():
"""Test the _update_missing_metadata method."""
Expand Down
Loading