Skip to content

Commit 8b8b744

Browse files
Merge pull request #280 from bessagroup/pr/1.5.4
Pr/1.5.4
2 parents f9dbeb4 + 5d7e318 commit 8b8b744

File tree

10 files changed

+34
-16
lines changed

10 files changed

+34
-16
lines changed

Diff for: VERSION

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
1.5.3
1+
1.5.4

Diff for: docs/source/conf.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,8 @@
2727
project = 'f3dasm'
2828
author = 'Martin van der Schelling'
2929
copyright = '2024, Martin van der Schelling'
30-
version = '1.5.3'
31-
release = '1.5.3'
30+
version = '1.5.4'
31+
release = '1.5.4'
3232

3333

3434
# -- General configuration ----------------------------------------------------

Diff for: src/f3dasm/__version__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__: str = "1.5.3"
1+
__version__: str = "1.5.4"

Diff for: src/f3dasm/_src/design/domain.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -411,15 +411,19 @@ def add(self, name: str,
411411
f"Unknown type {type}!"
412412
f"Possible types are: 'float', 'int', 'category', 'constant'.")
413413

414-
def add_output(self, name: str, to_disk: bool, exist_ok=False):
414+
def add_output(self, name: str, to_disk: bool = False,
415+
exist_ok: bool = False):
415416
"""Add a new output parameter to the domain.
416417
417418
Parameters
418419
----------
419420
name : str
420421
Name of the output parameter.
421422
to_disk : bool
422-
Whether to store the output parameter on disk.
423+
Whether to store the output parameter on disk, by default False.
424+
exist_ok: bool
425+
Whether to raise an error if the output parameter already exists,
426+
by default False.
423427
424428
Example
425429
-------

Diff for: src/f3dasm/_src/experimentdata/_data.py

+6-1
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,12 @@ def __add__(self, other: _Data | Dict[str, Any]) -> _Data:
108108
[self.data, other_data_copy]), columns=self.columns)
109109

110110
def __eq__(self, __o: _Data) -> bool:
111-
return self.data.equals(__o.data)
111+
try:
112+
pd.testing.assert_frame_equal(self.data, __o.data)
113+
except AssertionError:
114+
return False
115+
116+
return True
112117

113118
def _repr_html_(self) -> str:
114119
return self.to_dataframe()._repr_html_()

Diff for: src/f3dasm/_src/experimentdata/experimentdata.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -489,7 +489,7 @@ def get_input_data(self,
489489
if parameter_names is None:
490490
return ExperimentData(input_data=self._input_data,
491491
jobs=self._jobs,
492-
domain=self.domain,
492+
domain=self.domain.select(self.domain.names),
493493
project_dir=self.project_dir)
494494
else:
495495
return ExperimentData(input_data=self._input_data.select_columns(

Diff for: src/f3dasm/_src/experimentdata/experimentsample.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -377,7 +377,8 @@ def _experimentsample_factory(
377377
The converted experiment sample.
378378
"""
379379
if isinstance(experiment_sample, np.ndarray):
380-
return ExperimentSample.from_numpy(experiment_sample, domain)
380+
return ExperimentSample.from_numpy(input_array=experiment_sample,
381+
domain=domain)
381382

382383
elif isinstance(experiment_sample, dict):
383384
return ExperimentSample(dict_input=experiment_sample,

Diff for: src/f3dasm/_src/hydra_utils.py

+7-2
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,8 @@
2424

2525

2626
def update_config_with_experiment_sample(
27-
config: OmegaConf, experiment_sample: ExperimentSample) -> OmegaConf:
27+
config: OmegaConf, experiment_sample: ExperimentSample,
28+
force_add: bool = False) -> OmegaConf:
2829
"""
2930
Update the config with the values from the experiment sample
3031
@@ -34,6 +35,10 @@ def update_config_with_experiment_sample(
3435
The configuration to update
3536
experiment_sample : ExperimentSample
3637
The experiment sample to update the configuration with
38+
force_add : bool, optional
39+
If True, the function will add keys that are not present in the
40+
configuration. If False, the function will ignore keys that are not
41+
present in the configuration. Default is False.
3742
3843
Returns
3944
-------
@@ -55,7 +60,7 @@ def update_config_with_experiment_sample(
5560
cfg = deepcopy(config)
5661
for key, value in experiment_sample.to_dict().items():
5762
try:
58-
OmegaConf.update(cfg, key, value)
63+
OmegaConf.update(cfg, key, value, force_add=force_add)
5964
except AttributeError:
6065
continue
6166

Diff for: src/f3dasm/_src/optimization/adapters/scipy_implementations.py

+3
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,9 @@ def fun(x):
6565

6666
self.options['maxiter'] = iterations
6767

68+
if not hasattr(data_generator, 'dfdx'):
69+
data_generator.dfdx = None
70+
6871
minimize(
6972
fun=fun,
7073
method=self.method,

Diff for: studies/fragile_becomes_supercompressible/config.yaml

+5-5
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
defaults:
2-
- 7d_domain
2+
- 3d_domain
33
- override hydra/job_logging: custom
44

55
experimentdata:
@@ -26,10 +26,10 @@ imperfection:
2626
high: 1.0
2727

2828
scripts:
29-
lin_buckle_pre: /home/martin/Documents/GitHub/F3DASM/studies/fragile_becomes_supercompressible/scripts/supercompressible_lin_buckle.py
30-
lin_buckle_post: /home/martin/Documents/GitHub/F3DASM/studies/fragile_becomes_supercompressible/scripts/supercompressible_lin_buckle_pp.py
31-
riks_pre: /home/martin/Documents/GitHub/F3DASM/studies/fragile_becomes_supercompressible/scripts/supercompressible_riks.py
32-
riks_post: /home/martin/Documents/GitHub/F3DASM/studies/fragile_becomes_supercompressible/scripts/supercompressible_riks_pp.py
29+
lin_buckle_pre: ${hydra:runtime.cwd}/scripts/supercompressible_lin_buckle.py
30+
lin_buckle_post: ${hydra:runtime.cwd}/scripts/supercompressible_lin_buckle_pp.py
31+
riks_pre: ${hydra:runtime.cwd}/scripts/supercompressible_riks.py
32+
riks_post: ${hydra:runtime.cwd}/scripts/supercompressible_riks_pp.py
3333

3434

3535
log_level: 20

0 commit comments

Comments
 (0)