diff --git a/.github/workflows/subsurface.yml b/.github/workflows/subsurface.yml
index cd1e60c9e..01f478696 100644
--- a/.github/workflows/subsurface.yml
+++ b/.github/workflows/subsurface.yml
@@ -57,7 +57,6 @@ jobs:
# Testing against our latest release (including pre-releases)
pip install --pre --upgrade webviz-config webviz-core-components webviz-subsurface-components
-
- name: 📦 Install test dependencies
run: |
pip install .[tests]
@@ -101,7 +100,7 @@ jobs:
git clone --depth 1 --branch $TESTDATA_REPO_BRANCH https://github.com/$TESTDATA_REPO_OWNER/webviz-subsurface-testdata.git
# Copy any clientside script to the test folder before running tests
mkdir ./tests/assets && cp ./webviz_subsurface/_assets/js/* ./tests/assets
- pytest ./tests --headless --forked --testdata-folder ./webviz-subsurface-testdata
+ pytest ./tests --headless --forked -s --testdata-folder ./webviz-subsurface-testdata
rm -rf ./tests/assets
- name: 🐳 Build Docker example image
diff --git a/README.md b/README.md
index 5951c71d4..e0765a33f 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,13 @@
+
This package will be deprecated - we move instead all collaboration focus to the reusable React and Dash components in:
+
+ - https://github.com/equinor/webviz-subsurface-components
+ - https://github.com/equinor/webviz-core-components
+
+and the FMU use case to https://github.com/equinor/webviz
+
+
+
+
[](https://badge.fury.io/py/webviz-subsurface)
[](https://github.com/equinor/webviz-subsurface/actions?query=branch%3Amaster)
[](https://www.python.org/)
diff --git a/setup.py b/setup.py
index 9688ea0b4..980d66393 100644
--- a/setup.py
+++ b/setup.py
@@ -107,9 +107,8 @@
"statsmodels>=0.12.1", # indirect dependency through https://plotly.com/python/linear-fits/
"xtgeo>=2.20.0",
"vtk>=9.2.2",
- "webviz-config",
- "webviz-core-components>=0.6",
- "webviz-subsurface-components==1.0.2",
+ "webviz-config>=0.6.5",
+ "webviz-subsurface-components>=1.0.3",
],
extras_require={"tests": TESTS_REQUIRE},
setup_requires=["setuptools_scm~=3.2"],
diff --git a/tests/integration_tests/plugin_tests/__init__.py b/tests/integration_tests/plugin_tests/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/tests/integration_tests/plugin_tests/test_bhp_qc.py b/tests/integration_tests/plugin_tests/test_bhp_qc.py
deleted file mode 100644
index 5840f0626..000000000
--- a/tests/integration_tests/plugin_tests/test_bhp_qc.py
+++ /dev/null
@@ -1,12 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import BhpQc
-from webviz_config.testing import WebvizComposite
-
-
-def test_bhp_qc(_webviz_duo: WebvizComposite, shared_settings: dict) -> None:
- plugin = BhpQc(
- shared_settings["HM_SETTINGS"], ensembles=shared_settings["HM_ENSEMBLES"]
- )
-
- _webviz_duo.start_server(plugin)
- assert not _webviz_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_history_match.py b/tests/integration_tests/plugin_tests/test_history_match.py
deleted file mode 100644
index 52b3be711..000000000
--- a/tests/integration_tests/plugin_tests/test_history_match.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import HistoryMatch
-
-
-def test_history_match(dash_duo, app, testdata_folder, shared_settings) -> None:
- plugin = HistoryMatch(
- shared_settings["HM_SETTINGS"],
- ensembles=shared_settings["HM_ENSEMBLES"],
- observation_file=testdata_folder
- / "01_drogon_ahm"
- / "share"
- / "observations"
- / "tables"
- / "ert_observations.yml",
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- assert not dash_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_line_plotter_fmu.py b/tests/integration_tests/plugin_tests/test_line_plotter_fmu.py
deleted file mode 100644
index a5c9ba697..000000000
--- a/tests/integration_tests/plugin_tests/test_line_plotter_fmu.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import LinePlotterFMU
-
-
-def test_line_plotter_fmu(dash_duo, app, testdata_folder, shared_settings) -> None:
- plugin = LinePlotterFMU(
- app,
- shared_settings["HM_SETTINGS"],
- aggregated_csvfile=testdata_folder
- / "reek_test_data"
- / "aggregated_data"
- / "smry_hm.csv",
- aggregated_parameterfile=testdata_folder
- / "reek_test_data"
- / "aggregated_data"
- / "parameters_hm.csv",
- observation_file=testdata_folder / "reek_test_data" / "observations.yml",
- observation_group="smry",
- remap_observation_values={"DATE": "date"},
- initial_data={
- "x": "DATE",
- "y": "FOPR",
- "ensembles": ["iter-0", "iter-3"],
- "colors": {"iter-0": "red", "iter-3": "blue"},
- },
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- assert not dash_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_parameter_analysis.py b/tests/integration_tests/plugin_tests/test_parameter_analysis.py
deleted file mode 100644
index db20565c5..000000000
--- a/tests/integration_tests/plugin_tests/test_parameter_analysis.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import warnings
-
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import ParameterAnalysis
-from webviz_config.testing import WebvizComposite
-
-
-def test_parameter_analysis(
- _webviz_duo: WebvizComposite, shared_settings: dict
-) -> None:
- plugin = ParameterAnalysis(
- shared_settings["HM_SETTINGS"],
- ensembles=shared_settings["HM_ENSEMBLES"],
- column_keys=["WWCT:*"],
- time_index="monthly",
- drop_constants=True,
- )
- _webviz_duo.start_server(plugin)
-
- logs = []
- for log in _webviz_duo.get_logs() or []:
- if "dash_renderer" in log.get("message"):
- warnings.warn(log.get("message"))
- else:
- logs.append(log)
- assert not logs
diff --git a/tests/integration_tests/plugin_tests/test_parameter_correlation.py b/tests/integration_tests/plugin_tests/test_parameter_correlation.py
deleted file mode 100644
index 4b75d2b69..000000000
--- a/tests/integration_tests/plugin_tests/test_parameter_correlation.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import ParameterCorrelation
-from webviz_config.testing import WebvizComposite
-
-
-def test_parameter_correlation(_webviz_duo: WebvizComposite, shared_settings) -> None:
- parameter_correlation = ParameterCorrelation(
- shared_settings["HM_SETTINGS"],
- ensembles=shared_settings["HM_ENSEMBLES"],
- )
-
- _webviz_duo.start_server(parameter_correlation)
-
- _webviz_duo.toggle_webviz_settings_drawer()
- _webviz_duo.toggle_webviz_settings_group(
- parameter_correlation.view("paracorr").settings_group_unique_id("settings")
- )
- # Using str literals directly, not IDs from the plugin as intended because
- # the run test did not accept the imports
-
- my_component_id = _webviz_duo.view_settings_group_unique_component_id(
- "paracorr", "settings", "shared-ensemble"
- )
- _webviz_duo.wait_for_contains_text(my_component_id, "iter-0")
- assert not _webviz_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_parameter_distribution.py b/tests/integration_tests/plugin_tests/test_parameter_distribution.py
deleted file mode 100644
index d653d6fff..000000000
--- a/tests/integration_tests/plugin_tests/test_parameter_distribution.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import ParameterDistribution
-
-
-def test_parameter_distribution(dash_duo, app, shared_settings) -> None:
- plugin = ParameterDistribution(
- app,
- shared_settings["HM_SETTINGS"],
- ensembles=shared_settings["HM_ENSEMBLES"],
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- assert not dash_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_parameter_parallel_coordinates.py b/tests/integration_tests/plugin_tests/test_parameter_parallel_coordinates.py
deleted file mode 100644
index f0597122d..000000000
--- a/tests/integration_tests/plugin_tests/test_parameter_parallel_coordinates.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import ParameterParallelCoordinates
-
-
-def test_parameter_parallel_coordinates(dash_duo, app, shared_settings) -> None:
- plugin = ParameterParallelCoordinates(
- app,
- shared_settings["HM_SETTINGS"],
- ensembles=shared_settings["HM_ENSEMBLES"],
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- assert not dash_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_parameter_response_correlation.py b/tests/integration_tests/plugin_tests/test_parameter_response_correlation.py
deleted file mode 100644
index 97d8d61b5..000000000
--- a/tests/integration_tests/plugin_tests/test_parameter_response_correlation.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import ParameterResponseCorrelation
-
-
-def test_parameter_response_correlation(dash_duo, app, shared_settings) -> None:
- plugin = ParameterResponseCorrelation(
- app,
- shared_settings["HM_SETTINGS"],
- ensembles=shared_settings["HM_ENSEMBLES"],
- response_file="share/results/volumes/geogrid--vol.csv",
- response_filters={"ZONE": "multi", "REGION": "multi"},
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- assert not dash_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_property_statistics.py b/tests/integration_tests/plugin_tests/test_property_statistics.py
deleted file mode 100644
index 4fc314ade..000000000
--- a/tests/integration_tests/plugin_tests/test_property_statistics.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import warnings
-
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import PropertyStatistics
-
-
-def test_property_statistics(dash_duo, app, shared_settings) -> None:
- plugin = PropertyStatistics(
- app,
- shared_settings["HM_SETTINGS"],
- ensembles=shared_settings["HM_ENSEMBLES"],
- statistics_file="share/results/tables/grid_property_statistics_geogrid.csv",
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- logs = []
- for log in dash_duo.get_logs() or []:
- if "dash_renderer" in log.get("message"):
- warnings.warn(log.get("message"))
- else:
- logs.append(log)
- assert not logs
diff --git a/tests/integration_tests/plugin_tests/test_pvt_plot.py b/tests/integration_tests/plugin_tests/test_pvt_plot.py
deleted file mode 100644
index 29a73b5ed..000000000
--- a/tests/integration_tests/plugin_tests/test_pvt_plot.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import PvtPlot
-from webviz_config.testing import WebvizComposite
-
-
-def test_pvt_plot(_webviz_duo: WebvizComposite, shared_settings: dict) -> None:
- plugin = PvtPlot(
- shared_settings["HM_SETTINGS"],
- ensembles=shared_settings["HM_ENSEMBLES"],
- pvt_relative_file_path="share/results/tables/pvt.csv",
- )
-
- _webviz_duo.start_server(plugin)
-
- assert not _webviz_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_relative_permeability.py b/tests/integration_tests/plugin_tests/test_relative_permeability.py
deleted file mode 100644
index 2dddda5cb..000000000
--- a/tests/integration_tests/plugin_tests/test_relative_permeability.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import RelativePermeability
-
-
-def test_relative_permeability(dash_duo, app, shared_settings) -> None:
- plugin = RelativePermeability(
- app,
- shared_settings["HM_SETTINGS"],
- ensembles=shared_settings["HM_ENSEMBLES"],
- relpermfile="share/results/tables/relperm.csv",
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- assert not dash_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries.py b/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries.py
deleted file mode 100644
index 3ff1481b2..000000000
--- a/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import warnings
-
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import ReservoirSimulationTimeSeries
-
-
-def test_reservoir_simulation_timeseries(
- dash_duo, app, shared_settings, testdata_folder
-) -> None:
- plugin = ReservoirSimulationTimeSeries(
- app,
- shared_settings["HM_SETTINGS"],
- ensembles=shared_settings["HM_ENSEMBLES"],
- obsfile=testdata_folder
- / "01_drogon_ahm"
- / "share"
- / "observations"
- / "tables"
- / "ert_observations.yml",
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
-
- logs = []
- for log in dash_duo.get_logs() or []:
- if "dash_renderer" in log.get("message"):
- warnings.warn(log.get("message"))
- else:
- logs.append(log)
- assert not logs
diff --git a/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries_onebyone.py b/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries_onebyone.py
deleted file mode 100644
index 698a205f7..000000000
--- a/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries_onebyone.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import ReservoirSimulationTimeSeriesOneByOne
-
-
-def test_reservoir_simulation_timeseries_onebyone(
- dash_duo, app, shared_settings
-) -> None:
- plugin = ReservoirSimulationTimeSeriesOneByOne(
- app,
- shared_settings["SENS_SETTINGS"],
- ensembles=shared_settings["SENS_ENSEMBLES"],
- initial_vector="FOPT",
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- assert not dash_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries_regional.py b/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries_regional.py
deleted file mode 100644
index f4d740fda..000000000
--- a/tests/integration_tests/plugin_tests/test_reservoir_simulation_timeseries_regional.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import ReservoirSimulationTimeSeriesRegional
-
-
-def test_reservoir_simulation_timeseries_regional(
- dash_duo, app, shared_settings, testdata_folder
-) -> None:
- plugin = ReservoirSimulationTimeSeriesRegional(
- app,
- shared_settings["HM_SETTINGS"],
- ensembles=shared_settings["HM_ENSEMBLES"],
- fipfile=testdata_folder
- / "01_drogon_ahm"
- / "realization-0"
- / "iter-0"
- / "share"
- / "results"
- / "tables"
- / "fip.yml",
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- assert not dash_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_rft_plotter.py b/tests/integration_tests/plugin_tests/test_rft_plotter.py
deleted file mode 100644
index 6019ce698..000000000
--- a/tests/integration_tests/plugin_tests/test_rft_plotter.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import RftPlotter
-from webviz_config.testing import WebvizComposite
-
-
-def test_rft_plotter(
- _webviz_duo: WebvizComposite, shared_settings, testdata_folder
-) -> None:
- plugin = RftPlotter(
- shared_settings["HM_SETTINGS"],
- ensembles=shared_settings["HM_ENSEMBLES"],
- formations=testdata_folder
- / "01_drogon_ahm"
- / "realization-0"
- / "iter-0"
- / "share"
- / "results"
- / "tables"
- / "formations_res_only.csv",
- faultlines=testdata_folder
- / "01_drogon_ahm"
- / "realization-0"
- / "iter-0"
- / "share"
- / "results"
- / "polygons"
- / "toptherys--gl_faultlines_extract_postprocess.csv",
- )
-
- _webviz_duo.start_server(plugin)
-
- _webviz_duo.toggle_webviz_settings_drawer()
- _webviz_duo.toggle_webviz_settings_group(
- plugin.view("map-view").settings_group_unique_id("map-settings")
- )
- # Using str literals directly, not IDs from the plugin as intended because
- # the run test did not accept the imports
-
- my_component_id = _webviz_duo.view_settings_group_unique_component_id(
- "map-view", "map-settings", "map-ensemble"
- )
- _webviz_duo.wait_for_contains_text(my_component_id, "iter-0")
- assert not _webviz_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_segy_viewer.py b/tests/integration_tests/plugin_tests/test_segy_viewer.py
deleted file mode 100644
index d867ac5e0..000000000
--- a/tests/integration_tests/plugin_tests/test_segy_viewer.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import SegyViewer
-
-
-def test_segy_viewer(dash_duo, app, shared_settings, testdata_folder) -> None:
- plugin = SegyViewer(
- app,
- shared_settings["HM_SETTINGS"],
- segyfiles=[
- testdata_folder
- / "01_drogon_ahm"
- / "realization-0"
- / "iter-0"
- / "share"
- / "results"
- / "seismic"
- / "seismic--amplitude_depth--20180701_20180101.segy"
- ],
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- assert not dash_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_simulation_timeseries_onebyone.py b/tests/integration_tests/plugin_tests/test_simulation_timeseries_onebyone.py
deleted file mode 100644
index 644f972da..000000000
--- a/tests/integration_tests/plugin_tests/test_simulation_timeseries_onebyone.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import warnings
-
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import SimulationTimeSeriesOneByOne
-from webviz_config.testing import WebvizComposite
-
-
-def test_simulation_timeseries_onebyone(
- _webviz_duo: WebvizComposite, shared_settings: dict
-) -> None:
- plugin = SimulationTimeSeriesOneByOne(
- webviz_settings=shared_settings["SENS_SETTINGS"],
- ensembles=shared_settings["SENS_ENSEMBLES"],
- initial_vector="FOPT",
- )
- _webviz_duo.start_server(plugin)
- logs = []
- for log in _webviz_duo.get_logs() or []:
- if "dash_renderer" in log.get("message"):
- warnings.warn(log.get("message"))
- else:
- logs.append(log)
- assert not logs
diff --git a/tests/integration_tests/plugin_tests/test_structural_uncertainty.py b/tests/integration_tests/plugin_tests/test_structural_uncertainty.py
deleted file mode 100644
index 0b10f5546..000000000
--- a/tests/integration_tests/plugin_tests/test_structural_uncertainty.py
+++ /dev/null
@@ -1,244 +0,0 @@
-import json
-
-from dash import Input, Output, State, html
-from webviz_config import WebvizSettings
-
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import StructuralUncertainty
-from webviz_config.themes import default_theme
-
-# pylint: enable=no-name-in-module
-
-
-def stringify_object_id(uuid) -> str:
- """Object ids must be sorted and converted to
- css strings to be recognized as dom elements"""
- sorted_uuid_obj = json.loads(
- json.dumps(
- uuid,
- sort_keys=True,
- separators=(",", ":"),
- )
- )
- string = ["{"]
- for idx, (key, value) in enumerate(sorted_uuid_obj.items()):
- string.append(f'\\"{key}\\"\\:\\"{value}\\"\\')
- if idx == len(sorted_uuid_obj) - 1:
- string.append("}")
- else:
- string.append(",")
- return ("").join(string)
-
-
-# pylint: disable=too-many-locals
-def test_default_configuration(dash_duo, app, testdata_folder) -> None:
- webviz_settings = WebvizSettings(
- shared_settings={
- "scratch_ensembles": {
- "iter-0": str(testdata_folder / "01_drogon_ahm/realization-*/iter-0")
- }
- },
- theme=default_theme,
- )
- plugin = StructuralUncertainty(
- app,
- webviz_settings,
- ensembles=["iter-0"],
- surface_attributes=["ds_extract_postprocess"],
- surface_name_filter=[
- "topvolon",
- "toptherys",
- "topvolantis",
- "basevolantis",
- ],
- wellsuffix=".rmswell",
- wellfolder=testdata_folder / "observed_data" / "wells",
- )
-
- app.layout = plugin.layout
- dash_duo.start_server(app)
-
- intersection_data_id = plugin.uuid("intersection-data")
- dialog_id = plugin.uuid("dialog")
- # Check some initialization
- # Check dropdowns
- for element, return_val in zip(
- ["well", "surface_attribute"], ["55_33-1", "ds_extract_postprocess"]
- ):
- uuid = stringify_object_id(
- uuid={"element": element, "id": intersection_data_id}
- )
- assert dash_duo.wait_for_element(f"#\\{uuid} .Select-value").text == return_val
-
- # Check Selects
- for element, return_val in zip(
- ["surface_names"],
- [["topvolon", "toptherys", "topvolantis", "basevolantis"]],
- ):
- uuid = stringify_object_id(
- uuid={"element": element, "id": intersection_data_id}
- )
- assert (
- dash_duo.wait_for_element(f"#\\{uuid} select").text.splitlines()
- == return_val
- )
-
- # Check Calculation checkbox
- uuid = stringify_object_id(
- uuid={"element": "calculation", "id": intersection_data_id}
- )
- calculation_element = dash_duo.driver.find_elements_by_css_selector(
- f"#\\{uuid} > label > input"
- )
- assert len(calculation_element) == len(
- ["Min", "Max", "Mean", "Realizations", "Uncertainty envelope"]
- )
- for checkbox, selected in zip(
- calculation_element,
- ["true", "true", "true", None, None],
- ):
- assert checkbox.get_attribute("selected") == selected
-
- # Check realizations
- real_filter_btn_uuid = stringify_object_id(
- {
- "id": dialog_id,
- "dialog_id": "realization-filter",
- "element": "button-open",
- }
- )
- real_uuid = stringify_object_id(
- uuid={"element": "realizations", "id": intersection_data_id}
- )
-
- ### Open realization filter and check realizations
- dash_duo.wait_for_element_by_id(real_filter_btn_uuid).click()
- real_selector = dash_duo.wait_for_element_by_id(real_uuid)
- assert real_selector.text.splitlines() == ["0", "1"]
-
- assert not dash_duo.get_logs(), "browser console should contain no error"
-
-
-def test_full_configuration(dash_duo, app, testdata_folder) -> None:
- webviz_settings = WebvizSettings(
- shared_settings={
- "scratch_ensembles": {
- "iter-0": str(testdata_folder / "01_drogon_ahm/realization-*/iter-0"),
- }
- },
- theme=default_theme,
- )
- plugin = StructuralUncertainty(
- app,
- webviz_settings,
- ensembles=["iter-0"],
- surface_attributes=["ds_extract_postprocess"],
- surface_name_filter=["topvolon", "toptherys", "topvolantis", "basevolantis"],
- wellfolder=testdata_folder / "observed_data" / "wells",
- wellsuffix=".rmswell",
- zonelog="Zone",
- initial_settings={
- "intersection_data": {
- "surface_names": ["topvolon", "toptherys", "topvolantis"],
- "surface_attribute": "ds_extract_postprocess",
- "ensembles": [
- "iter-0",
- ],
- "calculation": ["Mean", "Min", "Max"],
- # - Uncertainty envelope
- "well": "55_33-1",
- "realizations": [0, 1],
- "colors": {
- "topvolon": {"iter-0": "#2C82C9"},
- "toptherys": {
- "iter-0": "#512E34",
- },
- "topvolantis": {
- "iter-0": "#EEE657",
- },
- },
- },
- "intersection_layout": {
- "yaxis": {
- "range": [1700, 1550],
- "title": "True vertical depth [m]",
- },
- "xaxis": {"title": "Lateral distance [m]"},
- },
- },
- )
-
- app.layout = plugin.layout
-
- # Injecting a div that will be updated when the plot data stores are
- # changed. Since the plot data are stored in LocalStorage and Selenium
- # has no functionality to wait for LocalStorage to equal some value we
- # instead populate this injected div with some data before we check the content
- # of Localstorage.
- @app.callback(
- Output(plugin.uuid("layout"), "children"),
- Input(plugin.uuid("intersection-graph-layout"), "data"),
- State(plugin.uuid("layout"), "children"),
- )
- def _add_or_update_div(data, children):
- plot_is_updated = html.Div(
- id=plugin.uuid("plot_is_updated"), children=data.get("title")
- )
- if len(children) == 6:
- children[5] = plot_is_updated
- else:
- children.append(plot_is_updated)
-
- return children
-
- dash_duo.start_server(app)
-
- intersection_data_id = plugin.uuid("intersection-data")
-
- # Check some initialization
- # Check dropdowns
- for element, return_val in zip(
- ["well", "surface_attribute"], ["55_33-1", "ds_extract_postprocess"]
- ):
- uuid = stringify_object_id(
- uuid={"element": element, "id": intersection_data_id}
- )
- assert dash_duo.wait_for_text_to_equal(f"#\\{uuid} .Select-value", return_val)
-
- # Wait for the callbacks to execute
- dash_duo.wait_for_text_to_equal(
- f'#{plugin.uuid("plot_is_updated")}',
- "Intersection along well: 55_33-1",
- timeout=30,
- )
-
- # Check that graph data is stored
- graph_data = dash_duo.get_session_storage(plugin.uuid("intersection-graph-data"))
- assert len(graph_data) == 14
- graph_layout = dash_duo.get_session_storage(
- plugin.uuid("intersection-graph-layout")
- )
- assert isinstance(graph_layout, dict)
- assert graph_layout.get("title") == "Intersection along well: 55_33-1"
-
- ### Change well and check graph
- well_uuid = stringify_object_id(
- uuid={"element": "well", "id": intersection_data_id}
- )
-
- apply_btn = dash_duo.wait_for_element_by_id(
- plugin.uuid("apply-intersection-data-selections")
- )
- well_dropdown = dash_duo.wait_for_element_by_id(well_uuid)
- dash_duo.select_dcc_dropdown(well_dropdown, value="55_33-2")
- apply_btn.click()
-
- # dash_duo.wait_for_text_to_equal(
- # f'#{plugin.uuid("plot_is_updated")}',
- # "Intersection along well: 55_33-1",
- # timeout=100,
- # )
- graph_layout = dash_duo.get_session_storage(
- plugin.uuid("intersection-graph-layout")
- )
- # assert graph_layout.get("title") == "Intersection along well: 55_33-2"
diff --git a/tests/integration_tests/plugin_tests/test_surface_viewer_fmu.py b/tests/integration_tests/plugin_tests/test_surface_viewer_fmu.py
deleted file mode 100644
index 1876fe95f..000000000
--- a/tests/integration_tests/plugin_tests/test_surface_viewer_fmu.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import SurfaceViewerFMU
-
-
-def test_surface_viewer_fmu(dash_duo, app, shared_settings, testdata_folder) -> None:
- plugin = SurfaceViewerFMU(
- app,
- shared_settings["HM_SETTINGS"],
- ensembles=shared_settings["HM_ENSEMBLES"],
- wellsuffix=".rmswell",
- wellfolder=testdata_folder
- / "01_drogon_ahm"
- / "realization-0"
- / "iter-0"
- / "share"
- / "results"
- / "wells",
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- assert not dash_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_surface_with_grid_crossection.py b/tests/integration_tests/plugin_tests/test_surface_with_grid_crossection.py
deleted file mode 100644
index efe038bc3..000000000
--- a/tests/integration_tests/plugin_tests/test_surface_with_grid_crossection.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import SurfaceWithGridCrossSection
-
-
-def test_surface_with_grid_crosssection(
- dash_duo, app, shared_settings, testdata_folder
-) -> None:
- plugin = SurfaceWithGridCrossSection(
- app,
- shared_settings["HM_SETTINGS"],
- gridfile=(
- testdata_folder
- / "01_drogon_ahm"
- / "realization-0"
- / "iter-0"
- / "share"
- / "results"
- / "grids"
- / "geogrid.roff"
- ),
- gridparameterfiles=[
- testdata_folder
- / "01_drogon_ahm"
- / "realization-0"
- / "iter-0"
- / "share"
- / "results"
- / "grids"
- / "geogrid--phit.roff"
- ],
- surfacefiles=[
- testdata_folder
- / "01_drogon_ahm"
- / "realization-0"
- / "iter-0"
- / "share"
- / "results"
- / "maps"
- / "topvolon--ds_extract_geogrid.gri"
- ],
- surfacenames=["Top Volon"],
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- assert not dash_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_surface_with_seismic_crossection.py b/tests/integration_tests/plugin_tests/test_surface_with_seismic_crossection.py
deleted file mode 100644
index befd67368..000000000
--- a/tests/integration_tests/plugin_tests/test_surface_with_seismic_crossection.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import SurfaceWithSeismicCrossSection
-
-
-def test_surface_with_seismic_crosssection(
- dash_duo, app, shared_settings, testdata_folder
-) -> None:
- plugin = SurfaceWithSeismicCrossSection(
- app,
- shared_settings["HM_SETTINGS"],
- segyfiles=[
- testdata_folder
- / "01_drogon_ahm"
- / "realization-0"
- / "iter-0"
- / "share"
- / "results"
- / "seismic"
- / "seismic--amplitude_depth--20180701_20180101.segy"
- ],
- surfacefiles=[
- testdata_folder
- / "01_drogon_ahm"
- / "realization-0"
- / "iter-0"
- / "share"
- / "results"
- / "maps"
- / "topvolon--ds_extract_geogrid.gri"
- ],
- surfacenames=["Top Volon"],
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- assert not dash_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_tornado_plotter_fmu.py b/tests/integration_tests/plugin_tests/test_tornado_plotter_fmu.py
deleted file mode 100644
index d7c0ff84b..000000000
--- a/tests/integration_tests/plugin_tests/test_tornado_plotter_fmu.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import TornadoPlotterFMU
-
-
-def test_tornado_plotter_fmu(dash_duo, app, shared_settings) -> None:
- plugin = TornadoPlotterFMU(
- shared_settings["SENS_SETTINGS"],
- ensemble=shared_settings["SENS_ENSEMBLES"][0],
- csvfile="share/results/volumes/geogrid--vol.csv",
- multi_value_selectors=["REGION", "ZONE"],
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- assert not dash_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_vfp_analysis.py b/tests/integration_tests/plugin_tests/test_vfp_analysis.py
deleted file mode 100644
index b1f2377a8..000000000
--- a/tests/integration_tests/plugin_tests/test_vfp_analysis.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import VfpAnalysis
-from webviz_config.testing import WebvizComposite
-
-
-def test_vfp_analysis(_webviz_duo: WebvizComposite, shared_settings: dict) -> None:
- plugin = VfpAnalysis(
- shared_settings["HM_SETTINGS"], vfp_file_pattern="tests/data/vfp.arrow"
- )
-
- _webviz_duo.start_server(plugin)
-
- assert not _webviz_duo.get_logs()
diff --git a/tests/integration_tests/plugin_tests/test_volumetric_analysis.py b/tests/integration_tests/plugin_tests/test_volumetric_analysis.py
deleted file mode 100644
index 6846bc6d6..000000000
--- a/tests/integration_tests/plugin_tests/test_volumetric_analysis.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import warnings
-
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import VolumetricAnalysis
-
-
-def test_volumetrics_no_sens(dash_duo, app, shared_settings) -> None:
- plugin = VolumetricAnalysis(
- shared_settings["HM_SETTINGS"],
- ensembles=shared_settings["HM_ENSEMBLES"],
- volfiles={"geogrid": "geogrid--vol.csv", "simgrid": "simgrid--vol.csv"},
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- logs = []
- for log in dash_duo.get_logs() or []:
- if "dash_renderer" in log.get("message"):
- warnings.warn(log.get("message"))
- else:
- logs.append(log)
- assert not logs
-
-
-def test_volumetrics_sens(dash_duo, app, shared_settings) -> None:
- plugin = VolumetricAnalysis(
- shared_settings["SENS_SETTINGS"],
- ensembles=shared_settings["SENS_ENSEMBLES"],
- volfiles={"geogrid": "geogrid--vol.csv", "simgrid": "simgrid--vol.csv"},
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- logs = []
- for log in dash_duo.get_logs() or []:
- if "dash_renderer" in log.get("message"):
- warnings.warn(log.get("message"))
- else:
- logs.append(log)
- assert not logs
diff --git a/tests/integration_tests/plugin_tests/test_well_log_viewer.py b/tests/integration_tests/plugin_tests/test_well_log_viewer.py
deleted file mode 100644
index 74c510770..000000000
--- a/tests/integration_tests/plugin_tests/test_well_log_viewer.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# pylint: disable=no-name-in-module
-from webviz_config.plugins import WellLogViewer
-
-
-def test_well_log_viewer(dash_duo, app, testdata_folder) -> None:
- wellfolder = testdata_folder / "observed_data" / "wells/"
- plugin = WellLogViewer(
- app,
- wellfolder=wellfolder,
- wellsuffix=".rmswell",
- mdlog="MDepth",
- logtemplates=[f"{testdata_folder}/webviz_examples/all_logs_template.yml"],
- )
- app.layout = plugin.layout
- dash_duo.start_server(app)
- assert not dash_duo.get_logs()
diff --git a/webviz_subsurface/_models/parameter_model.py b/webviz_subsurface/_models/parameter_model.py
index aa7f927e1..b4900ef89 100644
--- a/webviz_subsurface/_models/parameter_model.py
+++ b/webviz_subsurface/_models/parameter_model.py
@@ -151,6 +151,7 @@ def _check_if_sensitivity_run(self) -> bool:
# if mix of gen_kw and sensitivity ensembles add
# dummy sensitivvity columns to gen_kw ensembles
gen_kw_mask = self._dataframe["SENSNAME"].isnull()
+ self._dataframe["SENSNAME"] = self._dataframe["SENSNAME"].astype(str)
self._dataframe.loc[gen_kw_mask, "SENSNAME"] = "🎲"
self._dataframe.loc[gen_kw_mask, "SENSCASE"] = "p10_p90"
diff --git a/webviz_subsurface/_providers/ensemble_grid_provider/_xtgeo_to_vtk_explicit_structured_grid.py b/webviz_subsurface/_providers/ensemble_grid_provider/_xtgeo_to_vtk_explicit_structured_grid.py
index b723de6d3..1e9144580 100644
--- a/webviz_subsurface/_providers/ensemble_grid_provider/_xtgeo_to_vtk_explicit_structured_grid.py
+++ b/webviz_subsurface/_providers/ensemble_grid_provider/_xtgeo_to_vtk_explicit_structured_grid.py
@@ -75,7 +75,7 @@ def _create_vtk_esgrid_from_verts_and_conn(
vtk_cell_array.SetData(8, conn_idarr)
vtk_esgrid = vtkExplicitStructuredGrid()
- vtk_esgrid.SetDimensions(point_dims.tolist())
+ vtk_esgrid.SetDimensions(point_dims.tolist()) # type: ignore
vtk_esgrid.SetPoints(vtk_points)
vtk_esgrid.SetCells(vtk_cell_array)
diff --git a/webviz_subsurface/_providers/ensemble_grid_provider/grid_viz_service.py b/webviz_subsurface/_providers/ensemble_grid_provider/grid_viz_service.py
index cbe75d2c2..a7cda5172 100644
--- a/webviz_subsurface/_providers/ensemble_grid_provider/grid_viz_service.py
+++ b/webviz_subsurface/_providers/ensemble_grid_provider/grid_viz_service.py
@@ -365,15 +365,15 @@ def cut_along_polyline(
plane = vtkPlane()
plane.SetOrigin([x_0, y_0, 0])
- plane.SetNormal(right_vec.tolist())
+ plane.SetNormal(right_vec.tolist()) # type: ignore
plane_0 = vtkPlane()
plane_0.SetOrigin([x_0, y_0, 0])
- plane_0.SetNormal(fwd_vec.tolist())
+ plane_0.SetNormal(fwd_vec.tolist()) # type: ignore
plane_1 = vtkPlane()
plane_1.SetOrigin([x_1, y_1, 0])
- plane_1.SetNormal((-fwd_vec).tolist())
+ plane_1.SetNormal((-fwd_vec).tolist()) # type: ignore
cutter_alg.SetPlane(plane)
cutter_alg.Update()
@@ -478,7 +478,9 @@ def ray_pick(
i_ref = reference(0)
j_ref = reference(0)
k_ref = reference(0)
- grid.ComputeCellStructuredCoords(cell_id, i_ref, j_ref, k_ref, True) # type: ignore[arg-type]
+ grid.ComputeCellStructuredCoords(
+ cell_id, i_ref, j_ref, k_ref, True # type: ignore[arg-type]
+ )
cell_property_val: Optional[np.ndarray] = None
if property_spec:
diff --git a/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_lazy.py b/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_lazy.py
index 377a671f3..c19a22249 100644
--- a/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_lazy.py
+++ b/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_lazy.py
@@ -56,7 +56,7 @@ def _find_first_non_increasing_date_pair(
) -> Tuple[Optional[np.datetime64], Optional[np.datetime64]]:
dates_np = table.column("DATE").to_numpy()
offending_indices = np.asarray(np.diff(dates_np) <= np.timedelta64(0)).nonzero()[0]
- if not offending_indices:
+ if len(offending_indices) == 0:
return (None, None)
return (dates_np[offending_indices[0]], dates_np[offending_indices[0] + 1])
@@ -313,7 +313,7 @@ def dates(
f"find_unique={et_find_unique_ms}ms)"
)
- return intersected_dates.astype(datetime.datetime).tolist()
+ return intersected_dates.astype(datetime.datetime).tolist() # type: ignore
def get_vectors_df(
self,
@@ -377,7 +377,7 @@ def get_vectors_for_date_df(
table = table.filter(real_mask)
et_filter_ms = timer.lap_ms()
- np_lookup_date = np.datetime64(date, "ms")
+ np_lookup_date = np.datetime64(date).astype("M8[ms]")
table = sample_segmented_multi_real_table_at_date(table, np_lookup_date)
et_resample_ms = timer.lap_ms()
diff --git a/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_presampled.py b/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_presampled.py
index 1caf0211d..0177b673c 100644
--- a/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_presampled.py
+++ b/webviz_subsurface/_providers/ensemble_summary_provider/_provider_impl_arrow_presampled.py
@@ -374,7 +374,7 @@ def dates(
f"find_unique={et_find_unique_ms}ms)"
)
- return intersected_dates.astype(datetime.datetime).tolist()
+ return intersected_dates.astype(datetime.datetime).tolist() # type: ignore
def get_vectors_df(
self,
diff --git a/webviz_subsurface/_providers/ensemble_summary_provider/_resampling.py b/webviz_subsurface/_providers/ensemble_summary_provider/_resampling.py
index d06cbdf7f..364fa6a07 100644
--- a/webviz_subsurface/_providers/ensemble_summary_provider/_resampling.py
+++ b/webviz_subsurface/_providers/ensemble_summary_provider/_resampling.py
@@ -17,7 +17,7 @@ def _truncate_day_to_monday(datetime_day: np.datetime64) -> np.datetime64:
def _quarter_start_month(datetime_day: np.datetime64) -> np.datetime64:
# A bit hackish, utilizes the fact that datetime64 is relative to epoch
# 1970-01-01 which is the first day in Q1.
- datetime_month = np.datetime64(datetime_day, "M")
+ datetime_month = datetime_day.astype("M8[M]")
return datetime_month - (datetime_month.astype(int) % 3)
@@ -30,44 +30,52 @@ def generate_normalized_sample_dates(
"""
if freq == Frequency.DAILY:
- start = np.datetime64(min_date, "D")
- stop = np.datetime64(max_date, "D")
+ start = min_date.astype("M8[D]")
+ stop = max_date.astype("M8[D]")
if stop < max_date:
- stop += 1
- sampledates = np.arange(start, stop + 1)
+ stop += np.timedelta64(1, "D")
+ sampledates = np.arange(start, stop + np.timedelta64(1, "D"))
+
elif freq == Frequency.WEEKLY:
- start = _truncate_day_to_monday(np.datetime64(min_date, "D"))
- stop = _truncate_day_to_monday(np.datetime64(max_date, "D"))
+ start = _truncate_day_to_monday(min_date.astype("M8[D]"))
+ stop = _truncate_day_to_monday(max_date.astype("M8[D]"))
if start > min_date:
- start -= 7
+ start -= np.timedelta64(7, "D")
if stop < max_date:
- stop += 7
- sampledates = np.arange(start, stop + 1, 7)
+ stop += np.timedelta64(7, "D")
+ sampledates = np.arange(
+ start, stop + np.timedelta64(1, "D"), np.timedelta64(7, "D")
+ )
+
elif freq == Frequency.MONTHLY:
- start = np.datetime64(min_date, "M")
- stop = np.datetime64(max_date, "M")
+ start = min_date.astype("M8[M]")
+ stop = max_date.astype("M8[M]")
if stop < max_date:
- stop += 1
- sampledates = np.arange(start, stop + 1)
+ stop += np.timedelta64(1, "M")
+ sampledates = np.arange(start, stop + np.timedelta64(1, "M"))
+
elif freq == Frequency.QUARTERLY:
start = _quarter_start_month(min_date)
stop = _quarter_start_month(max_date)
if stop < max_date:
- stop += 3
- sampledates = np.arange(start, stop + 1, 3)
+ stop += np.timedelta64(3, "M")
+ sampledates = np.arange(
+ start, stop + np.timedelta64(1, "M"), np.timedelta64(3, "M")
+ )
+
elif freq == Frequency.YEARLY:
- start = np.datetime64(min_date, "Y")
- stop = np.datetime64(max_date, "Y")
+ start = min_date.astype("M8[Y]")
+ stop = max_date.astype("M8[Y]")
if stop < max_date:
- stop += 1
- sampledates = np.arange(start, stop + 1)
+ stop += np.timedelta64(1, "Y")
+ sampledates = np.arange(start, stop + np.timedelta64(1, "Y"))
+
else:
raise NotImplementedError(
f"Currently not supporting resampling to frequency {freq}."
)
- sampledates = sampledates.astype("datetime64[ms]")
-
+ sampledates = sampledates.astype("M8[ms]")
return sampledates
diff --git a/webviz_subsurface/_providers/ensemble_summary_provider/dev_resampling_perf_testing.py b/webviz_subsurface/_providers/ensemble_summary_provider/dev_resampling_perf_testing.py
deleted file mode 100644
index cdb8b5b8c..000000000
--- a/webviz_subsurface/_providers/ensemble_summary_provider/dev_resampling_perf_testing.py
+++ /dev/null
@@ -1,112 +0,0 @@
-import logging
-import time
-
-import numpy as np
-import pyarrow as pa
-
-from webviz_subsurface._providers.ensemble_summary_provider._resampling import (
- sample_segmented_multi_real_table_at_date,
-)
-
-
-def _create_table(
- num_reals: int, start_date: np.datetime64, end_date: np.datetime64, num_columns: int
-) -> pa.Table:
- date_arr_np = np.empty(0, np.datetime64)
- real_arr_np = np.empty(0, np.int32)
-
- for real in range(0, num_reals):
- dates_for_this_real = np.arange(start_date, end_date + 1)
- dates_for_this_real = dates_for_this_real.astype("datetime64[ms]")
- real_arr_np = np.concatenate(
- (real_arr_np, np.full(len(dates_for_this_real), real))
- )
- date_arr_np = np.concatenate((date_arr_np, dates_for_this_real))
-
- print(
- f"real_arr_np (num unique={len(np.unique(real_arr_np))} len={len(real_arr_np)}):"
- )
- print(real_arr_np)
- print(
- f"date_arr_np (num unique={len(np.unique(date_arr_np))} len={len(date_arr_np)}):"
- )
- print(date_arr_np)
-
- field_list = []
- columndata_list = []
- field_list.append(pa.field("DATE", pa.timestamp("ms")))
- field_list.append(pa.field("REAL", pa.int64()))
- columndata_list.append(pa.array(date_arr_np))
- columndata_list.append(pa.array(real_arr_np))
-
- num_rows = len(real_arr_np)
-
- for colnum in range(0, num_columns):
- if (colnum % 2) == 0:
- metadata = {b"is_rate": b'{"is_rate": False}'}
- else:
- metadata = {b"is_rate": b'{"is_rate": True}'}
-
- field_list.append(pa.field(f"c_{colnum}", pa.float32(), metadata=metadata))
-
- valarr = np.linspace(colnum, colnum + num_rows, num_rows)
- columndata_list.append(pa.array(valarr))
-
- schema = pa.schema(field_list)
- return pa.table(columndata_list, schema=schema)
-
-
-def main() -> None:
- print()
- print("## Running resampling performance tests")
- print("## =================================================")
-
- logging.basicConfig(
- level=logging.WARNING,
- format="%(asctime)s %(levelname)-3s [%(name)s]: %(message)s",
- )
- logging.getLogger("webviz_subsurface").setLevel(level=logging.INFO)
- logging.getLogger("webviz_subsurface").setLevel(level=logging.DEBUG)
-
- # table = _create_table(
- # num_reals=3,
- # start_date=np.datetime64("2020-12-30"),
- # end_date=np.datetime64("2021-01-05"),
- # num_columns=4,
- # )
-
- table = _create_table(
- num_reals=100,
- start_date=np.datetime64("2000-01-01", "M"),
- end_date=np.datetime64("2099-12-31", "M"),
- num_columns=10000,
- )
-
- print("## table shape (rows,columns):", table.shape)
- # print(table.to_pandas())
-
- start_tim = time.perf_counter()
-
- res = sample_segmented_multi_real_table_at_date(
- table, np.datetime64("2098-01-03", "ms")
- )
-
- # res = sample_segmented_multi_real_table_at_date(
- # table, np.datetime64("2098-01-01", "ms")
- # )
-
- elapsed_time_ms = int(1000 * (time.perf_counter() - start_tim))
-
- # print(res)
- # print(res.to_pandas())
-
- print("## res shape:", res.shape)
-
- print(f"## sample at date took: {elapsed_time_ms}ms")
-
-
-# Running:
-# python -m webviz_subsurface._providers.ensemble_summary_provider.dev_resampling_perf_testing
-# -------------------------------------------------------------------------
-if __name__ == "__main__":
- main()
diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/_provider_impl_file.py b/webviz_subsurface/_providers/ensemble_surface_provider/_provider_impl_file.py
index ae8d3ef2c..f9d0b685e 100644
--- a/webviz_subsurface/_providers/ensemble_surface_provider/_provider_impl_file.py
+++ b/webviz_subsurface/_providers/ensemble_surface_provider/_provider_impl_file.py
@@ -11,7 +11,6 @@
from webviz_subsurface._utils.enum_shim import StrEnum
from webviz_subsurface._utils.perf_timer import PerfTimer
-from ._stat_surf_cache import StatSurfCache
from ._surface_discovery import SurfaceFileInfo
from .ensemble_surface_provider import (
EnsembleSurfaceProvider,
@@ -26,7 +25,6 @@
REL_SIM_DIR = "sim"
REL_OBS_DIR = "obs"
-REL_STAT_CACHE_DIR = "stat_cache"
# pylint: disable=too-few-public-methods
@@ -53,8 +51,6 @@ def __init__(
self._provider_dir = provider_dir
self._inventory_df = surface_inventory_df
- self._stat_surf_cache = StatSurfCache(self._provider_dir / REL_STAT_CACHE_DIR)
-
@staticmethod
# pylint: disable=too-many-locals
def write_backing_store(
@@ -237,22 +233,10 @@ def _get_or_create_statistical_surface(
) -> Optional[xtgeo.RegularSurface]:
timer = PerfTimer()
- surf = self._stat_surf_cache.fetch(address)
- if surf:
- LOGGER.debug(
- f"Fetched statistical surface from cache in: {timer.elapsed_s():.2f}s"
- )
- return surf
-
surf = self._create_statistical_surface(address)
- et_create_s = timer.lap_s()
-
- self._stat_surf_cache.store(address, surf)
- et_write_cache_s = timer.lap_s()
LOGGER.debug(
- f"Created and wrote statistical surface to cache in: {timer.elapsed_s():.2f}s ("
- f"create={et_create_s:.2f}s, store={et_write_cache_s:.2f}s), "
+ f"Created statistical surface in: {timer.elapsed_s():.2f}s ("
f"[stat={address.statistic}, "
f"attr={address.attribute}, name={address.name}, date={address.datestr}]"
)
diff --git a/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_float32_array.py b/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_float32_array.py
index eb90b8b49..9c0631faf 100644
--- a/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_float32_array.py
+++ b/webviz_subsurface/_providers/ensemble_surface_provider/_surface_to_float32_array.py
@@ -6,7 +6,7 @@
def surface_to_float32_array(surface: xtgeo.RegularSurface) -> io.BytesIO:
values = surface.values.astype(np.float32)
- values.fill_value = np.NaN
+ values.fill_value = np.nan
values = np.ma.filled(values)
# Rotate 90 deg left.
diff --git a/webviz_subsurface/_utils/parameter_response.py b/webviz_subsurface/_utils/parameter_response.py
index c741eda62..b9054d32d 100644
--- a/webviz_subsurface/_utils/parameter_response.py
+++ b/webviz_subsurface/_utils/parameter_response.py
@@ -37,7 +37,9 @@ def filter_and_sum_responses(
if aggregation == "sum":
return df.groupby("REAL").sum().reset_index()[["REAL", response]]
if aggregation == "mean":
- return df.groupby("REAL").mean().reset_index()[["REAL", response]]
+ return (
+ df.groupby("REAL").mean(numeric_only=True).reset_index()[["REAL", response]]
+ )
raise ValueError(f"Unknown aggregation '{aggregation}'.")
diff --git a/webviz_subsurface/plugins/_bhp_qc/views/_view_functions.py b/webviz_subsurface/plugins/_bhp_qc/views/_view_functions.py
index b29a6c1fc..214d29c6b 100644
--- a/webviz_subsurface/plugins/_bhp_qc/views/_view_functions.py
+++ b/webviz_subsurface/plugins/_bhp_qc/views/_view_functions.py
@@ -20,7 +20,7 @@ def filter_df(df: pd.DataFrame, ensemble: str, wells: List[str]) -> pd.DataFrame
in statistics.
"""
columns = ["ENSEMBLE"] + [f"WBHP:{well}" for well in wells]
- return df.loc[df["ENSEMBLE"] == ensemble][columns].replace(0, np.NaN)
+ return df.loc[df["ENSEMBLE"] == ensemble][columns].replace(0, np.nan)
def calc_statistics(df: pd.DataFrame) -> pd.DataFrame:
diff --git a/webviz_subsurface/plugins/_co2_leakage/_utilities/plume_extent.py b/webviz_subsurface/plugins/_co2_leakage/_utilities/plume_extent.py
index 7fa00d276..fb9e8cadd 100644
--- a/webviz_subsurface/plugins/_co2_leakage/_utilities/plume_extent.py
+++ b/webviz_subsurface/plugins/_co2_leakage/_utilities/plume_extent.py
@@ -105,4 +105,4 @@ def _find_contours(
def _simplify(poly: np.ndarray, simplify_dist: float) -> List[List[float]]:
simplified = shapely.geometry.LineString(poly).simplify(simplify_dist)
- return np.array(simplified.coords).tolist()
+ return np.array(simplified.coords).tolist() # type: ignore
diff --git a/webviz_subsurface/plugins/_co2_leakage/views/mainview/mainview.py b/webviz_subsurface/plugins/_co2_leakage/views/mainview/mainview.py
index 61e72b40a..5d7411ddb 100644
--- a/webviz_subsurface/plugins/_co2_leakage/views/mainview/mainview.py
+++ b/webviz_subsurface/plugins/_co2_leakage/views/mainview/mainview.py
@@ -6,7 +6,7 @@
from dash.development.base_component import Component
from webviz_config.utils import StrEnum
from webviz_config.webviz_plugin_subclasses import ViewABC, ViewElementABC
-from webviz_subsurface_components import DashSubsurfaceViewer
+from webviz_subsurface_components import SubsurfaceViewer
class MainView(ViewABC):
@@ -52,7 +52,7 @@ def inner_layout(self) -> Component:
children=[
html.Div(
[
- DashSubsurfaceViewer(
+ SubsurfaceViewer(
id=self.register_component_unique_id(
self.Ids.DECKGL_MAP
),
diff --git a/webviz_subsurface/plugins/_grid_viewer_fmu/views/view_3d/view_elements/_vtk_view_3d_element.py b/webviz_subsurface/plugins/_grid_viewer_fmu/views/view_3d/view_elements/_vtk_view_3d_element.py
index 4fecd762b..435fe3473 100644
--- a/webviz_subsurface/plugins/_grid_viewer_fmu/views/view_3d/view_elements/_vtk_view_3d_element.py
+++ b/webviz_subsurface/plugins/_grid_viewer_fmu/views/view_3d/view_elements/_vtk_view_3d_element.py
@@ -2,7 +2,7 @@
from dash.development.base_component import Component
from webviz_config.utils import StrEnum
from webviz_config.webviz_plugin_subclasses import ViewElementABC
-from webviz_subsurface_components import DashSubsurfaceViewer
+from webviz_subsurface_components import SubsurfaceViewer
class VTKView3D(ViewElementABC):
@@ -45,7 +45,7 @@ def inner_layout(self) -> Component:
html.Div(
style={"position": "absolute", "width": "100%", "height": "90%"},
children=[
- DashSubsurfaceViewer(
+ SubsurfaceViewer(
id=self.register_component_unique_id(VTKView3D.Ids.VIEW),
layers=[
{
diff --git a/webviz_subsurface/plugins/_history_match.py b/webviz_subsurface/plugins/_history_match.py
index 43b5b2c34..8c1f9d5c2 100644
--- a/webviz_subsurface/plugins/_history_match.py
+++ b/webviz_subsurface/plugins/_history_match.py
@@ -155,7 +155,7 @@ def _get_sorted_edges(number_observation_groups: int) -> Dict[str, list]:
np.random.chisquare(df=1, size=number_observation_groups)
)
- sorted_values = np.flip(sorted_values, 0)
+ sorted_values = np.flip(sorted_values, 0) # type: ignore
p10 = np.percentile(sorted_values, 90, axis=1)
p90 = np.percentile(sorted_values, 10, axis=1)
diff --git a/webviz_subsurface/plugins/_map_viewer_fmu/_tmp_well_pick_provider.py b/webviz_subsurface/plugins/_map_viewer_fmu/_tmp_well_pick_provider.py
index 0ed96028b..6b5aa9e77 100644
--- a/webviz_subsurface/plugins/_map_viewer_fmu/_tmp_well_pick_provider.py
+++ b/webviz_subsurface/plugins/_map_viewer_fmu/_tmp_well_pick_provider.py
@@ -3,6 +3,7 @@
import geojson
import pandas as pd
+from webviz_subsurface._utils.colors import hex_to_rgb
from webviz_subsurface._utils.enum_shim import StrEnum
@@ -64,10 +65,11 @@ def get_geojson(
point = geojson.Point(coordinates=coords, validate=validate_geometry)
geocoll = geojson.GeometryCollection(geometries=[point])
-
properties = {
"name": row[WellPickTableColumns.WELL],
"attribute": str(row[attribute]),
+ "point_color": hex_to_rgb(row.get("point_color", "#000")),
+ "text_color": hex_to_rgb(row.get("text_color", "#000")),
}
feature = geojson.Feature(
diff --git a/webviz_subsurface/plugins/_map_viewer_fmu/_types.py b/webviz_subsurface/plugins/_map_viewer_fmu/_types.py
index 7f786e743..3535d47f9 100644
--- a/webviz_subsurface/plugins/_map_viewer_fmu/_types.py
+++ b/webviz_subsurface/plugins/_map_viewer_fmu/_types.py
@@ -9,6 +9,7 @@ class LayerTypes(StrEnum):
WELLTOPSLAYER = "GeoJsonLayer"
DRAWING = "DrawingLayer"
FAULTPOLYGONS = "FaultPolygonsLayer"
+ FIELD_OUTLINE = "GeoJsonLayer"
GEOJSON = "GeoJsonLayer"
diff --git a/webviz_subsurface/plugins/_map_viewer_fmu/_utils.py b/webviz_subsurface/plugins/_map_viewer_fmu/_utils.py
index 6e5d29d0a..6fe552194 100644
--- a/webviz_subsurface/plugins/_map_viewer_fmu/_utils.py
+++ b/webviz_subsurface/plugins/_map_viewer_fmu/_utils.py
@@ -1,8 +1,10 @@
import base64
import io
import math
-from typing import List
+from typing import Dict, List
+import geojson
+import xtgeo
from PIL import Image, ImageDraw
@@ -39,3 +41,18 @@ def create_colormap_image_string(
draw.rectangle([(x_0, 0), (x_1, height)], fill=rgb_to_hex(color))
return f"data:image/png;base64,{image_to_base64(img)}"
+
+
+def xtgeo_polygons_to_geojson(polygons: xtgeo.Polygons) -> Dict:
+ feature_arr = []
+ for name, polygon in polygons.dataframe.groupby("POLY_ID"):
+ coords = [list(zip(polygon.X_UTME, polygon.Y_UTMN))]
+ feature = geojson.Feature(
+ geometry=geojson.Polygon(coords),
+ properties={
+ "name": f"id:{name}",
+ "color": [200, 200, 200],
+ },
+ )
+ feature_arr.append(feature)
+ return geojson.FeatureCollection(features=feature_arr)
diff --git a/webviz_subsurface/plugins/_map_viewer_fmu/callbacks.py b/webviz_subsurface/plugins/_map_viewer_fmu/callbacks.py
index 85f08e7a2..8f8e61d0d 100644
--- a/webviz_subsurface/plugins/_map_viewer_fmu/callbacks.py
+++ b/webviz_subsurface/plugins/_map_viewer_fmu/callbacks.py
@@ -9,6 +9,7 @@
import numpy as np
import webviz_subsurface_components as wsc
+import xtgeo
from dash import ALL, MATCH, Input, Output, State, callback, callback_context, no_update
from dash.exceptions import PreventUpdate
from webviz_config import EncodedFile
@@ -32,7 +33,7 @@
from ._layer_model import DeckGLMapLayersModel
from ._tmp_well_pick_provider import WellPickProvider
from ._types import LayerTypes, SurfaceMode
-from ._utils import round_to_significant
+from ._utils import round_to_significant, xtgeo_polygons_to_geojson
from .layout import (
DefaultSettings,
LayoutElements,
@@ -51,6 +52,8 @@ def plugin_callbacks(
surface_server: Union[SurfaceArrayServer, SurfaceImageServer],
ensemble_fault_polygons_providers: Dict[str, EnsembleFaultPolygonsProvider],
fault_polygons_server: FaultPolygonsServer,
+ field_outline_polygons: xtgeo.Polygons,
+ field_outline_color: Tuple[float, float, float],
map_surface_names_to_fault_polygons: Dict[str, str],
well_picks_provider: Optional[WellPickProvider],
fault_polygon_attribute: Optional[str],
@@ -518,9 +521,27 @@ def _update_map(
layer_data={
"data": well_picks_provider.get_geojson(
selected_wells, horizon_name
- )
+ ),
+ "getLineColor": "@@=properties.point_color",
+ "getFillColor": "@@=properties.point_color",
+ "getTextColor": "@@=properties.text_color",
+ },
+ )
+ if (
+ LayoutLabels.SHOW_FIELD_OUTLINE in options
+ and field_outline_polygons is not None
+ ):
+ layer_model.update_layer_by_id(
+ layer_id=f"{LayoutElements.FIELD_OUTLINE_LAYER}-{idx}",
+ layer_data={
+ "data": xtgeo_polygons_to_geojson(field_outline_polygons),
+ "filled": False,
+ "depthTest": False,
+ "lineWidthMinPixels": 2,
+ "getLineColor": field_outline_color,
},
)
+
viewports = []
view_annotations = []
for idx, data in enumerate(surface_elements):
@@ -550,10 +571,13 @@ def _update_map(
"show3D": False,
"isSync": True,
"layerIds": [
- f"{LayoutElements.MAP3D_LAYER}-{idx}"
- if isinstance(surface_server, SurfaceArrayServer)
- else f"{LayoutElements.COLORMAP_LAYER}-{idx}",
+ (
+ f"{LayoutElements.MAP3D_LAYER}-{idx}"
+ if isinstance(surface_server, SurfaceArrayServer)
+ else f"{LayoutElements.COLORMAP_LAYER}-{idx}"
+ ),
f"{LayoutElements.FAULTPOLYGONS_LAYER}-{idx}",
+ f"{LayoutElements.FIELD_OUTLINE_LAYER}-{idx}",
f"{LayoutElements.WELLS_LAYER}-{idx}",
],
"name": make_viewport_label(data, tab_name, multi),
@@ -851,13 +875,15 @@ def _update_color_component_properties(
"colormap": {"value": colormap, "options": colormaps},
"color_range": {
"value": color_range,
- "step": calculate_slider_step(
- min_value=value_range[0],
- max_value=value_range[1],
- steps=100,
- )
- if value_range[0] != value_range[1]
- else 0,
+ "step": (
+ calculate_slider_step(
+ min_value=value_range[0],
+ max_value=value_range[1],
+ steps=100,
+ )
+ if value_range[0] != value_range[1]
+ else 0
+ ),
"range": value_range,
},
}
diff --git a/webviz_subsurface/plugins/_map_viewer_fmu/layout.py b/webviz_subsurface/plugins/_map_viewer_fmu/layout.py
index 492b55499..ca90c6450 100644
--- a/webviz_subsurface/plugins/_map_viewer_fmu/layout.py
+++ b/webviz_subsurface/plugins/_map_viewer_fmu/layout.py
@@ -3,7 +3,7 @@
import webviz_core_components as wcc
from dash import dcc, html
-from webviz_subsurface_components import DashSubsurfaceViewer # type: ignore
+from webviz_subsurface_components import SubsurfaceViewer # type: ignore
from webviz_subsurface._utils.enum_shim import StrEnum
@@ -37,15 +37,17 @@ class LayoutElements(StrEnum):
RANGE_RESET = "color-range-reset-button"
RESET_BUTTOM_CLICK = "color-range-reset-stored-state"
FAULTPOLYGONS = "fault-polygon-toggle"
+ FIELD_OUTLINE_TOGGLE = "field-outline-toggle"
WRAPPER = "wrapper-for-selector-component"
COLORWRAPPER = "wrapper-for-color-selector-component"
OPTIONS = "options"
COLORMAP_LAYER = "deckglcolormaplayer"
- HILLSHADING_LAYER = "deckglhillshadinglayer"
+
WELLS_LAYER = "deckglwelllayer"
MAP3D_LAYER = "deckglmap3dlayer"
FAULTPOLYGONS_LAYER = "deckglfaultpolygonslayer"
+ FIELD_OUTLINE_LAYER = "deckglfieldoutlinelayer"
REALIZATIONS_FILTER = "realization-filter-selector"
OPTIONS_DIALOG = "options-dialog"
@@ -69,8 +71,8 @@ class LayoutLabels(StrEnum):
LINK = "🔗 Link"
FAULTPOLYGONS = "Fault polygons"
SHOW_FAULTPOLYGONS = "Show fault polygons"
+ SHOW_FIELD_OUTLINE = "Show field outline"
SHOW_WELLS = "Show wells"
- SHOW_HILLSHADING = "Hillshading"
COMMON_SELECTIONS = "Options and global filters"
REAL_FILTER = "Realization filter"
WELL_FILTER = "Well filter"
@@ -183,7 +185,7 @@ def main_layout(
realizations: List[int],
color_tables: List[Dict],
show_fault_polygons: bool = True,
- hillshading_enabled: bool = True,
+ show_field_outline: bool = False,
render_surfaces_as_images: bool = True,
) -> html.Div:
return html.Div(
@@ -240,9 +242,9 @@ def main_layout(
DialogLayout(
get_uuid,
show_fault_polygons,
+ show_field_outline,
well_names,
realizations,
- hillshading_enabled,
),
]
)
@@ -269,7 +271,7 @@ def __init__(
) -> None:
super().__init__(
children=html.Div(
- DashSubsurfaceViewer(
+ SubsurfaceViewer(
id={"id": get_uuid(LayoutElements.DECKGLMAP), "tab": tab},
layers=update_map_layers(1, render_surfaces_as_images),
colorTables=color_tables,
@@ -304,18 +306,20 @@ def __init__(
self,
get_uuid: Callable,
show_fault_polygons: bool,
+ show_field_outline: bool,
well_names: List[str],
realizations: List[int],
- hillshading_enabled: bool = True,
) -> None:
- checklist_options = [LayoutLabels.SHOW_HILLSHADING]
- checklist_values = (
- [LayoutLabels.SHOW_HILLSHADING] if hillshading_enabled else []
- )
+ checklist_options = []
+ checklist_values = []
if show_fault_polygons:
checklist_options.append(LayoutLabels.SHOW_FAULTPOLYGONS)
checklist_values.append(LayoutLabels.SHOW_FAULTPOLYGONS)
+ if show_field_outline:
+ checklist_options.append(LayoutLabels.SHOW_FIELD_OUTLINE)
+ checklist_values.append(LayoutLabels.SHOW_FIELD_OUTLINE)
+
if well_names:
checklist_options.append(LayoutLabels.SHOW_WELLS)
checklist_values.append(LayoutLabels.SHOW_FAULTPOLYGONS)
@@ -358,9 +362,11 @@ def __init__(self, tab: Tabs, get_uuid: Callable, selector: str) -> None:
clicked = selector in DefaultSettings.LINKED_SELECTORS.get(tab, [])
super().__init__(
id={
- "id": get_uuid(LayoutElements.LINK)
- if selector not in ["color_range", "colormap"]
- else get_uuid(LayoutElements.COLORLINK),
+ "id": (
+ get_uuid(LayoutElements.LINK)
+ if selector not in ["color_range", "colormap"]
+ else get_uuid(LayoutElements.COLORLINK)
+ ),
"tab": tab,
"selector": selector,
},
@@ -570,9 +576,11 @@ def __init__(
) -> None:
super().__init__(
style={
- "display": "none"
- if tab == Tabs.STATS and selector == MapSelector.MODE
- else "block"
+ "display": (
+ "none"
+ if tab == Tabs.STATS and selector == MapSelector.MODE
+ else "block"
+ )
},
children=wcc.Selectors(
label=label,
@@ -805,7 +813,13 @@ def update_map_layers(
"parameters": {"depthTest": False},
}
)
-
+ layers.append(
+ {
+ "@@type": LayerTypes.FIELD_OUTLINE,
+ "id": f"{LayoutElements.FIELD_OUTLINE_LAYER}-{idx}",
+ "data": {"type": "FeatureCollection", "features": []},
+ }
+ )
if include_well_layer:
layers.append(
{
diff --git a/webviz_subsurface/plugins/_map_viewer_fmu/map_viewer_fmu.py b/webviz_subsurface/plugins/_map_viewer_fmu/map_viewer_fmu.py
index 21b386343..5116f929b 100644
--- a/webviz_subsurface/plugins/_map_viewer_fmu/map_viewer_fmu.py
+++ b/webviz_subsurface/plugins/_map_viewer_fmu/map_viewer_fmu.py
@@ -2,6 +2,7 @@
from pathlib import Path
from typing import Callable, Dict, List, Optional, Tuple, Union
+import xtgeo
from dash import Dash, html
from webviz_config import WebvizPluginABC, WebvizSettings
@@ -18,7 +19,8 @@
from webviz_subsurface._providers.ensemble_surface_provider.surface_image_server import (
SurfaceImageServer,
)
-from webviz_subsurface._utils.webvizstore_functions import read_csv
+from webviz_subsurface._utils.colors import hex_to_rgb
+from webviz_subsurface._utils.webvizstore_functions import get_path, read_csv
from ._tmp_well_pick_provider import WellPickProvider
from .callbacks import plugin_callbacks
@@ -39,6 +41,8 @@ class MapViewerFMU(WebvizPluginABC):
Default value is 'share/results/maps'.
* **`well_pick_file`:** A csv file with well picks. See data input.
* **`fault_polygon_attribute`:** Which set of fault polygons to use.
+* **`field_outline_polygons_file_path`:** Full filepath to a field outline polygons file.
+* **`field_outline_color:** Color of the field outline polygons (hex).
* **`map_surface_names_to_well_pick_names`:** Allows mapping of file surface names
to the relevant well pick name
* **`map_surface_names_to_fault_polygons`:** Allows mapping of file surface names
@@ -69,7 +73,11 @@ class MapViewerFMU(WebvizPluginABC):
01_drogon_ahm/realization-0/iter-0/share/results/polygons/\
toptherys--gl_faultlines_extract_postprocess.pol) for an example.
+Field outline polygons have the same format as fault polygons.
+
Well picks are provided as a csv file with columns `X_UTME,Y_UTMN,Z_TVDSS,MD,WELL,HORIZON`.
+Additionally the columns `point_color` and `text_color` can be used to specify the color of the
+point and text respectively. Use hex color codes for this (e.g. #ffffff).
See [wellpicks.csv](https://github.com/equinor/webviz-subsurface-testdata/tree/master/\
observed_data/drogon_well_picks/wellpicks.csv) for an example.
Well picks can be exported from RMS using this script: [extract_well_picks_from_rms.py]\
@@ -91,6 +99,8 @@ def __init__(
attributes: list = None,
well_pick_file: Path = None,
fault_polygon_attribute: Optional[str] = None,
+ field_outline_polygons_file_path: Path = None,
+ field_outline_color: str = "#e51000",
map_surface_names_to_fault_polygons: Dict[str, str] = None,
map_surface_names_to_well_pick_names: Dict[str, str] = None,
rel_surface_folder: str = "share/results/maps",
@@ -155,7 +165,16 @@ def __init__(
self._fault_polygons_server = FaultPolygonsServer.instance(app)
for fault_polygons_provider in self._ensemble_fault_polygons_providers.values():
self._fault_polygons_server.add_provider(fault_polygons_provider)
-
+ self.field_outline_polygons = None
+ self.field_outline_polygons_file_path = field_outline_polygons_file_path
+ if self.field_outline_polygons_file_path is not None:
+ try:
+ self.field_outline_polygons = xtgeo.polygons_from_file(
+ get_path(self.field_outline_polygons_file_path)
+ )
+ except ValueError:
+ print("Error reading field outline polygons file")
+ self.field_outline_color = hex_to_rgb(field_outline_color)
self.map_surface_names_to_fault_polygons = (
map_surface_names_to_fault_polygons
if map_surface_names_to_fault_polygons is not None
@@ -175,10 +194,13 @@ def layout(self) -> html.Div:
reals.extend([x for x in provider.realizations() if x not in reals])
return main_layout(
get_uuid=self.uuid,
- well_names=self.well_pick_provider.well_names()
- if self.well_pick_provider is not None
- else [],
+ well_names=(
+ self.well_pick_provider.well_names()
+ if self.well_pick_provider is not None
+ else []
+ ),
realizations=reals,
+ show_field_outline=self.field_outline_polygons is not None,
color_tables=self.color_tables,
render_surfaces_as_images=self.render_surfaces_as_images,
)
@@ -191,6 +213,8 @@ def set_callbacks(self) -> None:
ensemble_fault_polygons_providers=self._ensemble_fault_polygons_providers,
fault_polygon_attribute=self.fault_polygon_attribute,
fault_polygons_server=self._fault_polygons_server,
+ field_outline_polygons=self.field_outline_polygons,
+ field_outline_color=self.field_outline_color,
map_surface_names_to_fault_polygons=self.map_surface_names_to_fault_polygons,
well_picks_provider=self.well_pick_provider,
color_tables=self.color_tables,
@@ -202,4 +226,8 @@ def add_webvizstore(self) -> List[Tuple[Callable, list]]:
store_functions = []
if self.well_pick_file is not None:
store_functions.append((read_csv, [{"csv_file": self.well_pick_file}]))
+ if self.field_outline_polygons_file_path is not None:
+ store_functions.append(
+ (get_path, [{"path": self.field_outline_polygons_file_path}])
+ )
return store_functions
diff --git a/webviz_subsurface/plugins/_relative_permeability.py b/webviz_subsurface/plugins/_relative_permeability.py
index 72582150a..d84ceef84 100644
--- a/webviz_subsurface/plugins/_relative_permeability.py
+++ b/webviz_subsurface/plugins/_relative_permeability.py
@@ -971,7 +971,7 @@ def p90(x):
traces = []
for ens_no, (ens, ens_df) in enumerate(
- df[["ENSEMBLE", "REAL", "SATNUM", sataxis] + curves].groupby(["ENSEMBLE"])
+ df[["ENSEMBLE", "REAL", "SATNUM", sataxis] + curves].groupby("ENSEMBLE")
):
for satnum_no, (satnum, satnum_df) in enumerate(
ens_df[["REAL", "SATNUM", sataxis] + curves].groupby("SATNUM")
diff --git a/webviz_subsurface/plugins/_rft_plotter/_plugin.py b/webviz_subsurface/plugins/_rft_plotter/_plugin.py
index f73df557c..a77fd53d3 100644
--- a/webviz_subsurface/plugins/_rft_plotter/_plugin.py
+++ b/webviz_subsurface/plugins/_rft_plotter/_plugin.py
@@ -102,6 +102,8 @@ class Ids(StrEnum):
def __init__(
self,
webviz_settings: WebvizSettings,
+ rft: str = "share/results/tables/rft.csv",
+ rft_ert: str = "share/results/tables/rft_ert.csv",
csvfile_rft: Path = None,
csvfile_rft_ert: Path = None,
ensembles: Optional[List[str]] = None,
@@ -109,11 +111,14 @@ def __init__(
obsdata: Path = None,
faultlines: Path = None,
) -> None:
+ # pylint: disable = too-many-arguments
super().__init__()
self._datamodel = RftPlotterDataModel(
webviz_settings,
ensembles,
+ rft,
+ rft_ert,
formations,
faultlines,
obsdata,
diff --git a/webviz_subsurface/plugins/_rft_plotter/_utils/_rft_plotter_data_model.py b/webviz_subsurface/plugins/_rft_plotter/_utils/_rft_plotter_data_model.py
index 53e6c4b3d..efaf9139d 100644
--- a/webviz_subsurface/plugins/_rft_plotter/_utils/_rft_plotter_data_model.py
+++ b/webviz_subsurface/plugins/_rft_plotter/_utils/_rft_plotter_data_model.py
@@ -27,12 +27,15 @@ def __init__(
self,
webviz_settings: WebvizSettings,
ensembles: Optional[List[str]],
+ rft: str,
+ rft_ert: str,
formations: Path = None,
faultlines: Path = None,
obsdata: Path = None,
csvfile_rft: Path = None,
csvfile_rft_ert: Path = None,
):
+ # pylint: disable = too-many-arguments
self.formations = formations
self.faultlines = faultlines
self.obsdata = obsdata
@@ -67,12 +70,12 @@ def __init__(
self.ertdatadf = create_csvfile_providerset_from_paths(
ens_paths,
- "share/results/tables/rft_ert.csv",
+ rft_ert,
).get_aggregated_dataframe()
try:
self.simdf = create_csvfile_providerset_from_paths(
- ens_paths, "share/results/tables/rft.csv"
+ ens_paths, rft
).get_aggregated_dataframe()
except ValueError as err:
@@ -116,6 +119,9 @@ def __init__(
self.ertdatadf_inactive = filter_frame(self.ertdatadf, {"ACTIVE": 0})
self.ertdatadf = filter_frame(self.ertdatadf, {"ACTIVE": 1})
+ if self.ertdatadf.empty:
+ raise ValueError("There are no active RFT points in the input data.")
+
self.ertdatadf["STDDEV"] = self.ertdatadf.groupby(
["WELL", "DATE", "ZONE", "ENSEMBLE", "TVD"]
)["SIMULATED"].transform("std")
diff --git a/webviz_subsurface/plugins/_rft_plotter/_views/_sim_vs_obs_view/_utils/_crossplot_figure.py b/webviz_subsurface/plugins/_rft_plotter/_views/_sim_vs_obs_view/_utils/_crossplot_figure.py
index 85d29e21c..fbcd0795a 100644
--- a/webviz_subsurface/plugins/_rft_plotter/_views/_sim_vs_obs_view/_utils/_crossplot_figure.py
+++ b/webviz_subsurface/plugins/_rft_plotter/_views/_sim_vs_obs_view/_utils/_crossplot_figure.py
@@ -17,7 +17,10 @@ def update_crossplot(
for _ens, ensdf in df.groupby("ENSEMBLE"):
dframe = (
- ensdf.groupby(["WELL", "DATE", "ZONE", "TVD"]).mean().reset_index().copy()
+ ensdf.groupby(["WELL", "DATE", "ZONE", "TVD"])
+ .mean(numeric_only=True)
+ .reset_index()
+ .copy()
)
trace = {
"x": dframe["OBSERVED"],
diff --git a/webviz_subsurface/plugins/_running_time_analysis_fmu.py b/webviz_subsurface/plugins/_running_time_analysis_fmu.py
index 644a225b5..06457a79b 100644
--- a/webviz_subsurface/plugins/_running_time_analysis_fmu.py
+++ b/webviz_subsurface/plugins/_running_time_analysis_fmu.py
@@ -557,9 +557,9 @@ def ensemble_post_processing() -> list:
if len(set(range(min(reals), max(reals) + 1))) > len(set(reals)):
missing_df = ens_dfs[0].copy()
missing_df["STATUS"] = "Realization not started"
- missing_df["RUNTIME"] = np.NaN
- missing_df["JOB_SCALED_RUNTIME"] = np.NaN
- missing_df["ENS_SCALED_RUNTIME"] = np.NaN
+ missing_df["RUNTIME"] = np.nan
+ missing_df["JOB_SCALED_RUNTIME"] = np.nan
+ missing_df["ENS_SCALED_RUNTIME"] = np.nan
for missing_real in set(range(min(reals), max(reals) + 1)).difference(
set(reals)
):
diff --git a/webviz_subsurface/plugins/_segy_viewer.py b/webviz_subsurface/plugins/_segy_viewer.py
index 681110c40..1486ca337 100644
--- a/webviz_subsurface/plugins/_segy_viewer.py
+++ b/webviz_subsurface/plugins/_segy_viewer.py
@@ -49,7 +49,7 @@ def __init__(
self.zunit = zunit
self.segyfiles: List[str] = [str(segy) for segy in segyfiles]
- self.initial_colors = (
+ self.colors = (
colors
if colors
else [
@@ -68,7 +68,6 @@ def __init__(
]
)
self.init_state = self.update_state(self.segyfiles[0])
- self.init_state.get("colorscale", self.initial_colors)
self.init_state.get("uirevision", str(uuid4()))
self.plotly_theme = webviz_settings.theme.plotly_theme
@@ -86,7 +85,6 @@ def update_state(self, cubepath: str, **kwargs: Any) -> Dict[str, Any]:
"color_min_value": float(f"{round(cube.values.min(), 2):2f}"),
"color_max_value": float(f"{round(cube.values.max(), 2):2f}"),
"uirevision": str(uuid4()),
- "colorscale": self.initial_colors,
}
if kwargs:
for key, value in kwargs.items():
@@ -121,10 +119,6 @@ def tour_steps(self) -> List[dict]:
"id": self.uuid("zslice"),
"content": "Selected zslice for the seismic cube.",
},
- {
- "id": self.uuid("color-scale"),
- "content": "Click this button to change colorscale",
- },
{
"id": self.uuid("color-values"),
"content": "Drag either node of slider to truncate color ranges.",
@@ -160,18 +154,6 @@ def settings_layout(self) -> wcc.FlexBox:
clearable=False,
),
),
- html.Div(
- children=[
- wcc.Label(
- children="Set colorscale",
- ),
- wcc.ColorScales(
- id=self.uuid("color-scale"),
- colorscale=self.initial_colors,
- nSwatches=12,
- ),
- ],
- ),
html.Div(
children=[
wcc.RangeSlider(
@@ -183,7 +165,8 @@ def settings_layout(self) -> wcc.FlexBox:
self.init_state["min_value"],
self.init_state["max_value"],
],
- tooltip={"placement": "bottom"},
+ marks=None,
+ tooltip={"placement": "bottom", "always_visible": True},
step=calculate_slider_step(
min_value=self.init_state["min_value"],
max_value=self.init_state["max_value"],
@@ -257,7 +240,6 @@ def set_callbacks(self, app: Dash) -> None:
Input(self.uuid("xline"), "clickData"),
Input(self.uuid("zslice"), "clickData"),
Input(self.uuid("color-values"), "value"),
- Input(self.uuid("color-scale"), "colorscale"),
Input(self.uuid("zoom"), "n_clicks"),
Input(self.uuid("color-reset"), "n_clicks"),
],
@@ -272,7 +254,6 @@ def _update_state(
xcd: Union[dict, None],
zcd: Union[dict, None],
color_values: List[float],
- colorscale: List[float],
_zoom_btn: Union[int, None],
_reset_range_btn: Union[int, None],
zfig: Union[dict, None],
@@ -311,7 +292,6 @@ def _update_state(
else:
store["color_min_value"] = color_values[0]
store["color_max_value"] = color_values[1]
- store["colorscale"] = colorscale
return json.dumps(store)
@app.callback(
@@ -358,7 +338,7 @@ def _set_zslice(state_data_str: Union[str, None]) -> dict:
reverse_y=False,
zmin=state["color_min_value"],
zmax=state["color_max_value"],
- colorscale=state["colorscale"],
+ colorscale=self.colors,
uirevision=state["uirevision"],
)
fig["layout"]["shapes"] = shapes
@@ -406,7 +386,7 @@ def _set_iline(state_data_str: Union[str, None]) -> dict:
yaxis_title=self.zunit,
zmin=state["color_min_value"],
zmax=state["color_max_value"],
- colorscale=state["colorscale"],
+ colorscale=self.colors,
uirevision=state["uirevision"],
)
fig["layout"]["shapes"] = shapes
@@ -451,7 +431,7 @@ def _set_xline(state_data_str: Union[str, None]) -> dict:
yaxis_title=self.zunit,
zmin=state["color_min_value"],
zmax=state["color_max_value"],
- colorscale=state["colorscale"],
+ colorscale=self.colors,
uirevision=state["uirevision"],
)
fig["layout"]["shapes"] = shapes
diff --git a/webviz_subsurface/plugins/_surface_with_grid_cross_section.py b/webviz_subsurface/plugins/_surface_with_grid_cross_section.py
index cf48dcc30..3c1a23739 100644
--- a/webviz_subsurface/plugins/_surface_with_grid_cross_section.py
+++ b/webviz_subsurface/plugins/_surface_with_grid_cross_section.py
@@ -93,7 +93,7 @@ def __init__(
Path(gridfile).stem for gridfile in gridparameterfiles
]
self.plotly_theme = webviz_settings.theme.plotly_theme
- self.initial_colors = (
+ self.colors = (
colors
if colors is not None
else [
@@ -154,10 +154,6 @@ def tour_steps(self):
"id": self.ids("gridparameter"),
"content": "The visualized grid parameter.",
},
- {
- "id": self.ids("color-scale"),
- "content": ("Click this button to change colorscale"),
- },
{
"id": self.ids("color-values"),
"content": ("Drag either node of slider to truncate color ranges"),
@@ -224,14 +220,6 @@ def layout(self):
value=self.gridparafiles[0],
clearable=False,
),
- wcc.Label(
- children="Set colorscale",
- ),
- wcc.ColorScales(
- id=self.ids("color-scale"),
- colorscale=self.initial_colors,
- nSwatches=12,
- ),
wcc.RangeSlider(
label="Set color range",
id=self.ids("color-values"),
@@ -335,10 +323,9 @@ def _render_surface(
Input(self.ids("gridparameter"), "value"),
Input(self.ids("surface"), "value"),
Input(self.ids("color-values"), "value"),
- Input(self.ids("color-scale"), "colorscale"),
],
)
- def _render_fence(coords, gridparameter, surfacepath, color_values, colorscale):
+ def _render_fence(coords, gridparameter, surfacepath, color_values):
if not coords:
raise PreventUpdate
grid = load_grid(get_path(self.gridfile))
@@ -369,7 +356,7 @@ def _render_fence(coords, gridparameter, surfacepath, color_values, colorscale):
s_arr=s_arr,
theme=self.plotly_theme,
s_name=self.surfacenames[self.surfacefiles.index(surfacepath)],
- colorscale=colorscale,
+ colorscale=self.colors,
xmin=hmin,
xmax=hmax,
ymin=vmin,
diff --git a/webviz_subsurface/plugins/_surface_with_seismic_cross_section.py b/webviz_subsurface/plugins/_surface_with_seismic_cross_section.py
index eb9fb67b5..5b458251d 100644
--- a/webviz_subsurface/plugins/_surface_with_seismic_cross_section.py
+++ b/webviz_subsurface/plugins/_surface_with_seismic_cross_section.py
@@ -85,7 +85,7 @@ def __init__(
else:
self.segynames = [Path(segyfile).stem for segyfile in segyfiles]
self.plotly_theme = webviz_settings.theme.plotly_theme
- self.initial_colors = (
+ self.colors = (
colors
if colors is not None
else [
@@ -148,10 +148,6 @@ def tour_steps(self):
"id": self.ids("cube"),
"content": "The visualized cube.",
},
- {
- "id": self.ids("color-scale"),
- "content": ("Click this button to change colorscale"),
- },
{
"id": self.ids("color-values"),
"content": ("Drag either node of slider to truncate color ranges"),
@@ -216,14 +212,6 @@ def layout(self):
value=self.segyfiles[0],
clearable=False,
),
- wcc.Label(
- children="Set colorscale",
- ),
- wcc.ColorScales(
- id=self.ids("color-scale"),
- colorscale=self.initial_colors,
- nSwatches=12,
- ),
wcc.RangeSlider(
label="Set color range",
id=self.ids("color-values"),
@@ -320,10 +308,9 @@ def _render_surface(
Input(self.ids("cube"), "value"),
Input(self.ids("surface"), "value"),
Input(self.ids("color-values"), "value"),
- Input(self.ids("color-scale"), "colorscale"),
],
)
- def _render_fence(coords, cubepath, surfacepath, color_values, colorscale):
+ def _render_fence(coords, cubepath, surfacepath, color_values):
if not coords:
raise PreventUpdate
cube = load_cube_data(get_path(cubepath))
@@ -337,7 +324,7 @@ def _render_fence(coords, cubepath, surfacepath, color_values, colorscale):
s_arr=s_arr,
theme=self.plotly_theme,
s_name=self.surfacenames[self.surfacefiles.index(surfacepath)],
- colorscale=colorscale,
+ colorscale=self.colors,
xmin=hmin,
xmax=hmax,
ymin=vmin,
diff --git a/webviz_subsurface/plugins/_volumetric_analysis/controllers/distribution_controllers.py b/webviz_subsurface/plugins/_volumetric_analysis/controllers/distribution_controllers.py
index 725d7f1e6..2da384fd1 100644
--- a/webviz_subsurface/plugins/_volumetric_analysis/controllers/distribution_controllers.py
+++ b/webviz_subsurface/plugins/_volumetric_analysis/controllers/distribution_controllers.py
@@ -1,4 +1,4 @@
-from typing import Callable, Optional
+from typing import Callable, List, Optional
import numpy as np
import pandas as pd
@@ -32,7 +32,7 @@
# pylint: disable=too-many-statements
def distribution_controllers(
- get_uuid: Callable, volumemodel: InplaceVolumesModel
+ get_uuid: Callable, volumemodel: InplaceVolumesModel, colors: List[str]
) -> None:
@callback(
Output({"id": get_uuid("main-voldist"), "page": "custom"}, "children"),
@@ -105,18 +105,18 @@ def _update_page_custom(selections: dict, page_selected: str) -> tuple:
nbins=selections["hist_bins"],
facet_col=selections["Subplots"],
color=selections["Color by"],
- color_discrete_sequence=selections["Colorscale"],
- color_continuous_scale=selections["Colorscale"],
- color_discrete_map=FLUID_COLORS
- if selections["Color by"] == "FLUID_ZONE"
- else None,
+ color_discrete_sequence=colors,
+ color_continuous_scale=colors,
+ color_discrete_map=(
+ FLUID_COLORS if selections["Color by"] == "FLUID_ZONE" else None
+ ),
barmode=selections["barmode"],
boxmode=selections["barmode"],
- text_auto=get_text_format_bar_plot(
- selected_data, selections, volumemodel
- )
- if selections["Plot type"] == "bar"
- else False,
+ text_auto=(
+ get_text_format_bar_plot(selected_data, selections, volumemodel)
+ if selections["Plot type"] == "bar"
+ else False
+ ),
layout={
"title": (
{
@@ -151,18 +151,22 @@ def _update_page_custom(selections: dict, page_selected: str) -> tuple:
return custom_plotting_layout(
figure=figure,
- tables=make_tables(
- dframe=dframe,
- responses=list({selections["X Response"], selections["Y Response"]}),
- groups=groups,
- volumemodel=volumemodel,
- page_selected=page_selected,
- selections=selections,
- table_type="Statistics table",
- view_height=37,
- )
- if selections["bottom_viz"] == "table"
- else None,
+ tables=(
+ make_tables(
+ dframe=dframe,
+ responses=list(
+ {selections["X Response"], selections["Y Response"]}
+ ),
+ groups=groups,
+ volumemodel=volumemodel,
+ page_selected=page_selected,
+ selections=selections,
+ table_type="Statistics table",
+ view_height=37,
+ )
+ if selections["bottom_viz"] == "table"
+ else None
+ ),
)
@callback(
@@ -233,7 +237,7 @@ def _update_page_per_zr(selections: dict, page_selected: str) -> list:
data_frame=dframe,
values=selections["X Response"],
names=selector,
- color_discrete_sequence=selections["Colorscale"],
+ color_discrete_sequence=colors,
color=selector,
)
.update_traces(marker_line={"color": "#000000", "width": 1})
@@ -250,7 +254,7 @@ def _update_page_per_zr(selections: dict, page_selected: str) -> list:
title=f"{selections['X Response']} per {selector}",
barmode="overlay" if selector == selections["Color by"] else "group",
layout={"bargap": 0.05},
- color_discrete_sequence=selections["Colorscale"],
+ color_discrete_sequence=colors,
color=selections["Color by"],
xaxis={
"type": "category",
@@ -263,9 +267,9 @@ def _update_page_per_zr(selections: dict, page_selected: str) -> list:
selections=selections,
volumemodel=volumemodel,
),
- color_discrete_map=FLUID_COLORS
- if selections["Color by"] == "FLUID_ZONE"
- else None,
+ color_discrete_map=(
+ FLUID_COLORS if selections["Color by"] == "FLUID_ZONE" else None
+ ),
).update_layout(margin_t=35)
if selections["X Response"] not in volumemodel.hc_responses:
diff --git a/webviz_subsurface/plugins/_volumetric_analysis/controllers/selections_controllers.py b/webviz_subsurface/plugins/_volumetric_analysis/controllers/selections_controllers.py
index c1e68c6bf..330af79b4 100644
--- a/webviz_subsurface/plugins/_volumetric_analysis/controllers/selections_controllers.py
+++ b/webviz_subsurface/plugins/_volumetric_analysis/controllers/selections_controllers.py
@@ -21,10 +21,6 @@ def selections_controllers(
{"id": get_uuid("filters"), "tab": ALL, "selector": ALL, "type": ALL},
"value",
),
- Input(
- {"id": get_uuid("selections"), "tab": ALL, "settings": "Colorscale"},
- "colorscale",
- ),
Input(get_uuid("initial-load-info"), "data"),
State(get_uuid("page-selected"), "data"),
State(get_uuid("tabs"), "value"),
@@ -37,7 +33,6 @@ def selections_controllers(
def _update_selections(
selectors: list,
filters: list,
- colorscale: str,
initial_load: dict,
selected_page: str,
selected_tab: str,
@@ -63,7 +58,6 @@ def _update_selections(
if id_value["tab"] == selected_tab
}
- page_selections.update(Colorscale=colorscale[0] if colorscale else None)
page_selections.update(ctx_clicked=ctx["prop_id"])
# check if a page needs to be updated due to page refresh or
@@ -240,9 +234,9 @@ def _plot_options(
settings["bottom_viz"] = {
"options": visualization_options,
- "value": "none"
- if selected_page != "custom"
- else selections.get("bottom_viz"),
+ "value": (
+ "none" if selected_page != "custom" else selections.get("bottom_viz")
+ ),
}
return tuple(
@@ -336,9 +330,11 @@ def _update_filter_options(
selector_is_multi = page_filter_settings[selector]["multi"]
if not multi and selector_is_multi:
values = [
- "rms_seed"
- if selector == "SENSNAME_CASE" and "rms_seed" in options
- else options[0]
+ (
+ "rms_seed"
+ if selector == "SENSNAME_CASE" and "rms_seed" in options
+ else options[0]
+ )
]
elif multi and not selector_is_multi:
values = options
diff --git a/webviz_subsurface/plugins/_volumetric_analysis/views/main_view.py b/webviz_subsurface/plugins/_volumetric_analysis/views/main_view.py
index 0f230e37d..e78df053e 100644
--- a/webviz_subsurface/plugins/_volumetric_analysis/views/main_view.py
+++ b/webviz_subsurface/plugins/_volumetric_analysis/views/main_view.py
@@ -3,7 +3,6 @@
import pandas as pd
import webviz_core_components as wcc
from dash import dcc
-from webviz_config import WebvizConfigTheme
from webviz_subsurface._models import InplaceVolumesModel
@@ -22,7 +21,6 @@
def main_view(
get_uuid: Callable,
volumemodel: InplaceVolumesModel,
- theme: WebvizConfigTheme,
disjoint_set_df: Optional[pd.DataFrame] = None,
) -> dcc.Tabs:
tabs = []
@@ -37,7 +35,6 @@ def main_view(
uuid=get_uuid("selections"),
tab="voldist",
volumemodel=volumemodel,
- theme=theme,
),
filter_layout(
uuid=get_uuid("filters"), tab="voldist", volumemodel=volumemodel
diff --git a/webviz_subsurface/plugins/_volumetric_analysis/views/selections_view.py b/webviz_subsurface/plugins/_volumetric_analysis/views/selections_view.py
index a3d74a234..6574ac206 100644
--- a/webviz_subsurface/plugins/_volumetric_analysis/views/selections_view.py
+++ b/webviz_subsurface/plugins/_volumetric_analysis/views/selections_view.py
@@ -2,7 +2,6 @@
import webviz_core_components as wcc
from dash import dcc, html
-from webviz_config import WebvizConfigTheme
from webviz_subsurface._models import InplaceVolumesModel
@@ -10,7 +9,6 @@
def selections_layout(
uuid: str,
volumemodel: InplaceVolumesModel,
- theme: WebvizConfigTheme,
tab: str,
) -> html.Div:
selectors = "/".join(
@@ -33,7 +31,7 @@ def selections_layout(
],
),
plot_selections_layout(uuid, volumemodel, tab),
- settings_layout(volumemodel, uuid, theme, tab),
+ settings_layout(volumemodel, uuid, tab),
]
)
@@ -157,9 +155,8 @@ def plot_selector_dropdowns(
def settings_layout(
- volumemodel: InplaceVolumesModel, uuid: str, theme: WebvizConfigTheme, tab: str
+ volumemodel: InplaceVolumesModel, uuid: str, tab: str
) -> wcc.Selectors:
- theme_colors = theme.plotly_theme.get("layout", {}).get("colorway", [])
return wcc.Selectors(
label="⚙️ SETTINGS",
open_details=False,
@@ -168,13 +165,6 @@ def settings_layout(
subplot_xaxis_range(uuid=uuid, tab=tab),
histogram_options(uuid=uuid, tab=tab),
bar_text_options(uuid=uuid, tab=tab),
- html.Span("Colors", style={"font-weight": "bold"}),
- wcc.ColorScales(
- id={"id": uuid, "tab": tab, "settings": "Colorscale"},
- colorscale=theme_colors,
- fixSwatches=True,
- nSwatches=12,
- ),
],
)
diff --git a/webviz_subsurface/plugins/_volumetric_analysis/volumetric_analysis.py b/webviz_subsurface/plugins/_volumetric_analysis/volumetric_analysis.py
index f75bbd7c5..cc162aab2 100644
--- a/webviz_subsurface/plugins/_volumetric_analysis/volumetric_analysis.py
+++ b/webviz_subsurface/plugins/_volumetric_analysis/volumetric_analysis.py
@@ -94,6 +94,7 @@ class VolumetricAnalysis(WebvizPluginABC):
* **`non_net_facies`:** List of facies which are non-net.
* **`fipfile`:** Path to a yaml-file that defines a match between FIPNUM regions
and human readable regions, zones and etc to be used as filters.
+* **`colors`:** List of hex colors use.
---
?> The input files must follow FMU standards.
@@ -153,6 +154,7 @@ def __init__(
non_net_facies: Optional[List[str]] = None,
fipfile: Path = None,
drop_failed_realizations: bool = True,
+ colors: List[str] = None,
):
super().__init__()
WEBVIZ_ASSETS.add(
@@ -169,7 +171,46 @@ def __init__(
f" Plugin argument drop_failed_realizations is set to {drop_failed_realizations}. "
"An 'OK' file in the realization runpath is used as success criteria"
)
-
+ self.colors = (
+ colors
+ if colors is not None
+ else [
+ "#1F77B4",
+ "#FF7F0E",
+ "#2CA02C",
+ "#D62728",
+ "#9467BD",
+ "#8C564B",
+ "#E377C2",
+ "#7F7F7F",
+ "#BCBD22",
+ "#17BECF",
+ "#FD3216",
+ "#00FE35",
+ "#6A76FC",
+ "#FED4C4",
+ "#FE00CE",
+ "#0DF9FF",
+ "#F6F926",
+ "#FF9616",
+ "#479B55",
+ "#EEA6FB",
+ "#DC587D",
+ "#D626FF",
+ "#6E899C",
+ "#00B5F7",
+ "#B68E00",
+ "#C9FBE5",
+ "#FF0092",
+ "#22FFA7",
+ "#E3EE9E",
+ "#86CE00",
+ "#BC7196",
+ "#7E7DCD",
+ "#FC6955",
+ "#E48F72",
+ ]
+ )
if csvfile_vol:
table_provider = EnsembleTableProviderFactory.instance()
volumes_table = table_provider.create_from_ensemble_csv_file(csvfile_vol)
@@ -221,7 +262,6 @@ def layout(self) -> html.Div:
main_view(
get_uuid=self.uuid,
volumemodel=self.volmodel,
- theme=self.theme,
disjoint_set_df=self.disjoint_set_df,
),
],
@@ -229,7 +269,9 @@ def layout(self) -> html.Div:
def set_callbacks(self) -> None:
selections_controllers(get_uuid=self.uuid, volumemodel=self.volmodel)
- distribution_controllers(get_uuid=self.uuid, volumemodel=self.volmodel)
+ distribution_controllers(
+ get_uuid=self.uuid, volumemodel=self.volmodel, colors=self.colors
+ )
tornado_controllers(
get_uuid=self.uuid, volumemodel=self.volmodel, theme=self.theme
)
diff --git a/webviz_subsurface/plugins/_well_log_viewer/well_log_viewer.py b/webviz_subsurface/plugins/_well_log_viewer/well_log_viewer.py
index 619702d14..c45117959 100644
--- a/webviz_subsurface/plugins/_well_log_viewer/well_log_viewer.py
+++ b/webviz_subsurface/plugins/_well_log_viewer/well_log_viewer.py
@@ -171,7 +171,7 @@ def layout(self) -> html.Div:
self.initial_well_name
)
),
- colorTables=self.colortables,
+ colorMapFunctions=self.colortables,
axisMnemos={"MD": ["MD"], "TVD": ["TVD"]},
),
)