Skip to content

Commit 83049b2

Browse files
[pre-commit.ci] pre-commit autoupdate (#4552)
<!--pre-commit.ci start--> updates: - [github.com/astral-sh/ruff-pre-commit: v0.8.6 → v0.9.1](astral-sh/ruff-pre-commit@v0.8.6...v0.9.1) <!--pre-commit.ci end--> --------- Signed-off-by: Jinzhe Zeng <[email protected]> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Jinzhe Zeng <[email protected]>
1 parent 33df869 commit 83049b2

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

86 files changed

+428
-425
lines changed

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ repos:
2929
exclude: ^source/3rdparty
3030
- repo: https://github.com/astral-sh/ruff-pre-commit
3131
# Ruff version.
32-
rev: v0.8.6
32+
rev: v0.9.1
3333
hooks:
3434
- id: ruff
3535
args: ["--fix"]

backend/find_paddle.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ def get_pd_requirement(pd_version: str = "") -> dict:
105105

106106
return {
107107
"paddle": [
108-
"paddlepaddle>=3.0.0b1" if pd_version != "" else "paddlepaddle>=3.0.0b1",
108+
"paddlepaddle>=3.0.0b1",
109109
],
110110
}
111111

deepmd/dpmodel/atomic_model/linear_atomic_model.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -457,9 +457,9 @@ def _compute_weight(
457457
list[np.ndarray]
458458
the atomic ZBL weight for interpolation. (nframes, nloc, 1)
459459
"""
460-
assert (
461-
self.sw_rmax > self.sw_rmin
462-
), "The upper boundary `sw_rmax` must be greater than the lower boundary `sw_rmin`."
460+
assert self.sw_rmax > self.sw_rmin, (
461+
"The upper boundary `sw_rmax` must be greater than the lower boundary `sw_rmin`."
462+
)
463463

464464
xp = array_api_compat.array_namespace(extended_coord, extended_atype)
465465
dp_nlist = nlists_[0]

deepmd/dpmodel/descriptor/dpa1.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -432,9 +432,9 @@ def change_type_map(
432432
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
433433
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
434434
"""
435-
assert (
436-
self.type_map is not None
437-
), "'type_map' must be defined when performing type changing!"
435+
assert self.type_map is not None, (
436+
"'type_map' must be defined when performing type changing!"
437+
)
438438
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
439439
obj = self.se_atten
440440
obj.ntypes = len(type_map)

deepmd/dpmodel/descriptor/dpa2.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -543,9 +543,9 @@ def init_subclass_params(sub_data, sub_class):
543543
)
544544
self.rcsl_list.sort()
545545
for ii in range(1, len(self.rcsl_list)):
546-
assert (
547-
self.rcsl_list[ii - 1][1] <= self.rcsl_list[ii][1]
548-
), "rcut and sel are not in the same order"
546+
assert self.rcsl_list[ii - 1][1] <= self.rcsl_list[ii][1], (
547+
"rcut and sel are not in the same order"
548+
)
549549
self.rcut_list = [ii[0] for ii in self.rcsl_list]
550550
self.nsel_list = [ii[1] for ii in self.rcsl_list]
551551
self.use_econf_tebd = use_econf_tebd
@@ -678,9 +678,9 @@ def change_type_map(
678678
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
679679
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
680680
"""
681-
assert (
682-
self.type_map is not None
683-
), "'type_map' must be defined when performing type changing!"
681+
assert self.type_map is not None, (
682+
"'type_map' must be defined when performing type changing!"
683+
)
684684
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
685685
self.type_map = type_map
686686
self.type_embedding.change_type_map(type_map=type_map)

deepmd/dpmodel/descriptor/hybrid.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,9 @@ def __init__(
7171
for ii in range(1, self.numb_descrpt):
7272
assert (
7373
self.descrpt_list[ii].get_ntypes() == self.descrpt_list[0].get_ntypes()
74-
), f"number of atom types in {ii}th descriptor {self.descrpt_list[0].__class__.__name__} does not match others"
74+
), (
75+
f"number of atom types in {ii}th descriptor {self.descrpt_list[0].__class__.__name__} does not match others"
76+
)
7577
# if hybrid sel is larger than sub sel, the nlist needs to be cut for each type
7678
hybrid_sel = self.get_sel()
7779
nlist_cut_idx: list[np.ndarray] = []

deepmd/dpmodel/descriptor/se_t_tebd.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -270,9 +270,9 @@ def change_type_map(
270270
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
271271
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
272272
"""
273-
assert (
274-
self.type_map is not None
275-
), "'type_map' must be defined when performing type changing!"
273+
assert self.type_map is not None, (
274+
"'type_map' must be defined when performing type changing!"
275+
)
276276
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
277277
obj = self.se_ttebd
278278
obj.ntypes = len(type_map)

deepmd/dpmodel/fitting/general_fitting.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -242,9 +242,9 @@ def change_type_map(
242242
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
243243
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
244244
"""
245-
assert (
246-
self.type_map is not None
247-
), "'type_map' must be defined when performing type changing!"
245+
assert self.type_map is not None, (
246+
"'type_map' must be defined when performing type changing!"
247+
)
248248
assert self.mixed_types, "Only models in mixed types can perform type changing!"
249249
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
250250
self.type_map = type_map

deepmd/dpmodel/fitting/polarizability_fitting.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -224,9 +224,9 @@ def change_type_map(
224224
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
225225
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
226226
"""
227-
assert (
228-
self.type_map is not None
229-
), "'type_map' must be defined when performing type changing!"
227+
assert self.type_map is not None, (
228+
"'type_map' must be defined when performing type changing!"
229+
)
230230
assert self.mixed_types, "Only models in mixed types can perform type changing!"
231231
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
232232
super().change_type_map(type_map=type_map)
@@ -280,9 +280,9 @@ def call(
280280
"""
281281
xp = array_api_compat.array_namespace(descriptor, atype)
282282
nframes, nloc, _ = descriptor.shape
283-
assert (
284-
gr is not None
285-
), "Must provide the rotation matrix for polarizability fitting."
283+
assert gr is not None, (
284+
"Must provide the rotation matrix for polarizability fitting."
285+
)
286286
# (nframes, nloc, _net_out_dim)
287287
out = self._call_common(descriptor, atype, gr, g2, h2, fparam, aparam)[
288288
self.var_name

deepmd/dpmodel/utils/network.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -551,7 +551,7 @@ def check_shape_consistency(self) -> None:
551551
if self.layers[ii].dim_out() != self.layers[ii + 1].dim_in():
552552
raise ValueError(
553553
f"the dim of layer {ii} output {self.layers[ii].dim_out} ",
554-
f"does not match the dim of layer {ii+1} ",
554+
f"does not match the dim of layer {ii + 1} ",
555555
f"output {self.layers[ii].dim_out}",
556556
)
557557

deepmd/dpmodel/utils/type_embed.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -166,9 +166,9 @@ def change_type_map(
166166
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
167167
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
168168
"""
169-
assert (
170-
self.type_map is not None
171-
), "'type_map' must be defined when performing type changing!"
169+
assert self.type_map is not None, (
170+
"'type_map' must be defined when performing type changing!"
171+
)
172172
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
173173
if not self.use_econf_tebd:
174174
do_resnet = self.neuron[0] in [
@@ -177,9 +177,9 @@ def change_type_map(
177177
len(type_map),
178178
len(type_map) * 2,
179179
]
180-
assert (
181-
not do_resnet or self.activation_function == "Linear"
182-
), "'activation_function' must be 'Linear' when performing type changing on resnet structure!"
180+
assert not do_resnet or self.activation_function == "Linear", (
181+
"'activation_function' must be 'Linear' when performing type changing on resnet structure!"
182+
)
183183
first_layer_matrix = self.embedding_net.layers[0].w
184184
eye_vector = np.eye(self.ntypes, dtype=PRECISION_DICT[self.precision])
185185
# preprocess for resnet connection
@@ -227,9 +227,9 @@ def get_econf_tebd(type_map, precision: str = "default"):
227227
)
228228
from deepmd.utils.econf_embd import type_map as periodic_table
229229

230-
assert (
231-
type_map is not None
232-
), "When using electronic configuration type embedding, type_map must be provided!"
230+
assert type_map is not None, (
231+
"When using electronic configuration type embedding, type_map must be provided!"
232+
)
233233

234234
missing_types = [t for t in type_map if t not in periodic_table]
235235
assert not missing_types, (

deepmd/infer/model_devi.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@ def write_model_devi_out(
211211
f"min_devi_{item}",
212212
f"avg_devi_{item}",
213213
)
214-
header += f'{"devi_e":19s}'
214+
header += f"{'devi_e':19s}"
215215
if atomic:
216216
header += f"{'atm_devi_f(N)':19s}"
217217
with open(fname, "ab") as fp:

deepmd/loggers/training.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ def format_training_message(
99
wall_time: float,
1010
) -> str:
1111
"""Format a training message."""
12-
return f"batch {batch:7d}: " f"total wall time = {wall_time:.2f} s"
12+
return f"batch {batch:7d}: total wall time = {wall_time:.2f} s"
1313

1414

1515
def format_training_message_per_task(

deepmd/main.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ class DeprecateAction(argparse.Action):
7272
def __init__(self, *args, **kwargs) -> None:
7373
self.call_count = 0
7474
if "help" in kwargs:
75-
kwargs["help"] = f'[DEPRECATED] {kwargs["help"]}'
75+
kwargs["help"] = f"[DEPRECATED] {kwargs['help']}"
7676
super().__init__(*args, **kwargs)
7777

7878
def __call__(self, parser, namespace, values, option_string=None):

deepmd/pd/entrypoints/main.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -247,9 +247,9 @@ def train(
247247
if multi_task:
248248
config["model"], shared_links = preprocess_shared_params(config["model"])
249249
# handle the special key
250-
assert (
251-
"RANDOM" not in config["model"]["model_dict"]
252-
), "Model name can not be 'RANDOM' in multi-task mode!"
250+
assert "RANDOM" not in config["model"]["model_dict"], (
251+
"Model name can not be 'RANDOM' in multi-task mode!"
252+
)
253253

254254
# update fine-tuning config
255255
finetune_links = None
@@ -404,9 +404,9 @@ def change_bias(
404404
multi_task = "model_dict" in model_params
405405
bias_adjust_mode = "change-by-statistic" if mode == "change" else "set-by-statistic"
406406
if multi_task:
407-
assert (
408-
model_branch is not None
409-
), "For multitask model, the model branch must be set!"
407+
assert model_branch is not None, (
408+
"For multitask model, the model branch must be set!"
409+
)
410410
assert model_branch in model_params["model_dict"], (
411411
f"For multitask model, the model branch must be in the 'model_dict'! "
412412
f"Available options are : {list(model_params['model_dict'].keys())}."
@@ -427,12 +427,12 @@ def change_bias(
427427

428428
if bias_value is not None:
429429
# use user-defined bias
430-
assert model_to_change.model_type in [
431-
"ener"
432-
], "User-defined bias is only available for energy model!"
433-
assert (
434-
len(bias_value) == len(type_map)
435-
), f"The number of elements in the bias should be the same as that in the type_map: {type_map}."
430+
assert model_to_change.model_type in ["ener"], (
431+
"User-defined bias is only available for energy model!"
432+
)
433+
assert len(bias_value) == len(type_map), (
434+
f"The number of elements in the bias should be the same as that in the type_map: {type_map}."
435+
)
436436
old_bias = model_to_change.get_out_bias()
437437
bias_to_set = paddle.to_tensor(
438438
bias_value, dtype=old_bias.dtype, place=old_bias.place

deepmd/pd/infer/deep_eval.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -92,12 +92,12 @@ def __init__(
9292
model_keys = list(self.input_param["model_dict"].keys())
9393
if isinstance(head, int):
9494
head = model_keys[0]
95-
assert (
96-
head is not None
97-
), f"Head must be set for multitask model! Available heads are: {model_keys}"
98-
assert (
99-
head in model_keys
100-
), f"No head named {head} in model! Available heads are: {model_keys}"
95+
assert head is not None, (
96+
f"Head must be set for multitask model! Available heads are: {model_keys}"
97+
)
98+
assert head in model_keys, (
99+
f"No head named {head} in model! Available heads are: {model_keys}"
100+
)
101101
self.input_param = self.input_param["model_dict"][head]
102102
state_dict_head = {"_extra_state": state_dict["_extra_state"]}
103103
for item in state_dict:

deepmd/pd/model/descriptor/descriptor.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -128,9 +128,9 @@ def share_params(self, base_class, shared_level, resume=False):
128128
If not start from checkpoint (resume is False),
129129
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
130130
"""
131-
assert (
132-
self.__class__ == base_class.__class__
133-
), "Only descriptors of the same type can share params!"
131+
assert self.__class__ == base_class.__class__, (
132+
"Only descriptors of the same type can share params!"
133+
)
134134
if shared_level == 0:
135135
# link buffers
136136
if hasattr(self, "mean"):

deepmd/pd/model/descriptor/dpa1.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -378,9 +378,9 @@ def share_params(self, base_class, shared_level, resume=False) -> None:
378378
If not start from checkpoint (resume is False),
379379
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
380380
"""
381-
assert (
382-
self.__class__ == base_class.__class__
383-
), "Only descriptors of the same type can share params!"
381+
assert self.__class__ == base_class.__class__, (
382+
"Only descriptors of the same type can share params!"
383+
)
384384
# For DPA1 descriptors, the user-defined share-level
385385
# shared_level: 0
386386
# share all parameters in both type_embedding and se_atten
@@ -449,9 +449,9 @@ def change_type_map(
449449
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
450450
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
451451
"""
452-
assert (
453-
self.type_map is not None
454-
), "'type_map' must be defined when performing type changing!"
452+
assert self.type_map is not None, (
453+
"'type_map' must be defined when performing type changing!"
454+
)
455455
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
456456
obj = self.se_atten
457457
obj.ntypes = len(type_map)

deepmd/pd/model/descriptor/dpa2.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -254,9 +254,9 @@ def init_subclass_params(sub_data, sub_class):
254254
)
255255
self.rcsl_list.sort()
256256
for ii in range(1, len(self.rcsl_list)):
257-
assert (
258-
self.rcsl_list[ii - 1][1] <= self.rcsl_list[ii][1]
259-
), "rcut and sel are not in the same order"
257+
assert self.rcsl_list[ii - 1][1] <= self.rcsl_list[ii][1], (
258+
"rcut and sel are not in the same order"
259+
)
260260
self.rcut_list = [ii[0] for ii in self.rcsl_list]
261261
self.nsel_list = [ii[1] for ii in self.rcsl_list]
262262
self.use_econf_tebd = use_econf_tebd
@@ -386,9 +386,9 @@ def share_params(self, base_class, shared_level, resume=False) -> None:
386386
If not start from checkpoint (resume is False),
387387
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
388388
"""
389-
assert (
390-
self.__class__ == base_class.__class__
391-
), "Only descriptors of the same type can share params!"
389+
assert self.__class__ == base_class.__class__, (
390+
"Only descriptors of the same type can share params!"
391+
)
392392
# For DPA2 descriptors, the user-defined share-level
393393
# shared_level: 0
394394
# share all parameters in type_embedding, repinit and repformers
@@ -419,9 +419,9 @@ def change_type_map(
419419
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
420420
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
421421
"""
422-
assert (
423-
self.type_map is not None
424-
), "'type_map' must be defined when performing type changing!"
422+
assert self.type_map is not None, (
423+
"'type_map' must be defined when performing type changing!"
424+
)
425425
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
426426
self.type_map = type_map
427427
self.type_embedding.change_type_map(type_map=type_map)

deepmd/pd/model/descriptor/se_a.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -169,9 +169,9 @@ def share_params(self, base_class, shared_level, resume=False) -> None:
169169
If not start from checkpoint (resume is False),
170170
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
171171
"""
172-
assert (
173-
self.__class__ == base_class.__class__
174-
), "Only descriptors of the same type can share params!"
172+
assert self.__class__ == base_class.__class__, (
173+
"Only descriptors of the same type can share params!"
174+
)
175175
# For SeA descriptors, the user-defined share-level
176176
# shared_level: 0
177177
# share all parameters in sea

deepmd/pd/model/descriptor/se_t_tebd.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -246,9 +246,9 @@ def share_params(self, base_class, shared_level, resume=False) -> None:
246246
If not start from checkpoint (resume is False),
247247
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
248248
"""
249-
assert (
250-
self.__class__ == base_class.__class__
251-
), "Only descriptors of the same type can share params!"
249+
assert self.__class__ == base_class.__class__, (
250+
"Only descriptors of the same type can share params!"
251+
)
252252
# For DPA1 descriptors, the user-defined share-level
253253
# shared_level: 0
254254
# share all parameters in both type_embedding and se_ttebd
@@ -317,9 +317,9 @@ def change_type_map(
317317
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
318318
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
319319
"""
320-
assert (
321-
self.type_map is not None
322-
), "'type_map' must be defined when performing type changing!"
320+
assert self.type_map is not None, (
321+
"'type_map' must be defined when performing type changing!"
322+
)
323323
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
324324
obj = self.se_ttebd
325325
obj.ntypes = len(type_map)

0 commit comments

Comments
 (0)