Skip to content

Commit fb0b5ef

Browse files
authored
Merge branch 'master' into feat-se-t
Signed-off-by: Jinzhe Zeng <jinzhe.zeng@ustc.edu.cn>
2 parents 5b6c9d8 + cd67bbe commit fb0b5ef

File tree

9 files changed

+71
-24
lines changed

9 files changed

+71
-24
lines changed

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ repos:
2929
exclude: ^source/3rdparty
3030
- repo: https://github.com/astral-sh/ruff-pre-commit
3131
# Ruff version.
32-
rev: v0.14.14
32+
rev: v0.15.0
3333
hooks:
3434
- id: ruff
3535
args: ["--fix"]

deepmd/dpmodel/utils/network.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -886,10 +886,13 @@ def deserialize(cls, data: dict) -> "EmbeddingNet":
886886
obj = cls(**data)
887887
# Reinitialize layers from serialized data, using the same layer type
888888
# that __init__ created (respects subclass overrides via MRO).
889-
layer_type = type(obj.layers[0])
890-
obj.layers = type(obj.layers)(
891-
[layer_type.deserialize(layer) for layer in layers]
892-
)
889+
if obj.layers:
890+
layer_type = type(obj.layers[0])
891+
obj.layers = type(obj.layers)(
892+
[layer_type.deserialize(layer) for layer in layers]
893+
)
894+
else:
895+
obj.layers = type(obj.layers)([])
893896
return obj
894897

895898

deepmd/dpmodel/utils/serialization.py

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -106,9 +106,9 @@ def save_dp_model(filename: str, model_dict: dict) -> None:
106106
with h5py.File(filename, "w") as f:
107107
model_dict = traverse_model_dict(
108108
model_dict,
109-
lambda x: f.create_dataset(
110-
f"variable_{variable_counter():04d}", data=x
111-
).name,
109+
lambda x: (
110+
f.create_dataset(f"variable_{variable_counter():04d}", data=x).name
111+
),
112112
)
113113
save_dict = {
114114
**extra_dict,
@@ -118,15 +118,17 @@ def save_dp_model(filename: str, model_dict: dict) -> None:
118118
elif filename_extension in {".yaml", ".yml"}:
119119
model_dict = traverse_model_dict(
120120
model_dict,
121-
lambda x: {
122-
"@class": "np.ndarray",
123-
"@is_variable": True,
124-
"@version": 1,
125-
"dtype": x.dtype.name,
126-
"value": x.tolist(),
127-
}
128-
if isinstance(x, np.ndarray)
129-
else x,
121+
lambda x: (
122+
{
123+
"@class": "np.ndarray",
124+
"@is_variable": True,
125+
"@version": 1,
126+
"dtype": x.dtype.name,
127+
"value": x.tolist(),
128+
}
129+
if isinstance(x, np.ndarray)
130+
else x
131+
),
130132
)
131133
with open(filename, "w") as f:
132134
yaml.safe_dump(

deepmd/pd/utils/multi_task.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -150,8 +150,10 @@ def replace_one_item(
150150
for shared_key in shared_links:
151151
shared_links[shared_key]["links"] = sorted(
152152
shared_links[shared_key]["links"],
153-
key=lambda x: x["shared_level"]
154-
- ("spin" in model_config["model_dict"][x["model_key"]]) * 100,
153+
key=lambda x: (
154+
x["shared_level"]
155+
- ("spin" in model_config["model_dict"][x["model_key"]]) * 100
156+
),
155157
)
156158
# little trick to make spin models in the front to be the base models,
157159
# because its type embeddings are more general.

deepmd/pt/utils/env.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
# only linux
3535
ncpus = len(os.sched_getaffinity(0))
3636
except AttributeError:
37-
ncpus = os.cpu_count()
37+
ncpus = os.cpu_count() or 1
3838
NUM_WORKERS = int(os.environ.get("NUM_WORKERS", min(4, ncpus)))
3939
if multiprocessing.get_start_method() != "fork":
4040
# spawn or forkserver does not support NUM_WORKERS > 0 for DataLoader

deepmd/pt/utils/multi_task.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -155,8 +155,10 @@ def replace_one_item(
155155
for shared_key in shared_links:
156156
shared_links[shared_key]["links"] = sorted(
157157
shared_links[shared_key]["links"],
158-
key=lambda x: x["shared_level"]
159-
- ("spin" in model_config["model_dict"][x["model_key"]]) * 100,
158+
key=lambda x: (
159+
x["shared_level"]
160+
- ("spin" in model_config["model_dict"][x["model_key"]]) * 100
161+
),
160162
)
161163
# little trick to make spin models in the front to be the base models,
162164
# because its type embeddings are more general.

deepmd/pt_expt/utils/env.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
# only linux
3535
ncpus = len(os.sched_getaffinity(0))
3636
except AttributeError:
37-
ncpus = os.cpu_count()
37+
ncpus = os.cpu_count() or 1
3838
NUM_WORKERS = int(os.environ.get("NUM_WORKERS", min(4, ncpus)))
3939
if multiprocessing.get_start_method() != "fork":
4040
# spawn or forkserver does not support NUM_WORKERS > 0 for DataLoader

source/tests/common/dpmodel/test_network.py

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -250,6 +250,44 @@ def test_trainable_parameter_variants(self) -> None:
250250
self.assertTrue(net_mixed.layers[0].trainable)
251251
self.assertFalse(net_mixed.layers[1].trainable)
252252

253+
def test_empty_layers_round_trip(self) -> None:
254+
"""Test EmbeddingNet with empty neuron list (edge case for deserialize).
255+
256+
This tests the fix for IndexError when neuron=[] results in empty layers.
257+
The deserialize method should handle this case without trying to access
258+
layers[0] when the list is empty.
259+
"""
260+
in_dim = 4
261+
neuron = [] # Empty neuron list
262+
263+
# Create network with empty layers
264+
net = EmbeddingNet(
265+
in_dim=in_dim,
266+
neuron=neuron,
267+
activation_function="tanh",
268+
resnet_dt=True,
269+
precision="float64",
270+
)
271+
272+
# Verify it has no layers
273+
self.assertEqual(len(net.layers), 0)
274+
275+
# Serialize and deserialize
276+
serialized = net.serialize()
277+
net_restored = EmbeddingNet.deserialize(serialized)
278+
279+
# Verify restored network also has no layers
280+
self.assertEqual(len(net_restored.layers), 0)
281+
self.assertEqual(net_restored.in_dim, in_dim)
282+
self.assertEqual(net_restored.neuron, neuron)
283+
284+
# Verify forward pass works (should return input unchanged)
285+
rng = np.random.default_rng()
286+
x = rng.standard_normal((5, in_dim))
287+
out = net_restored.call(x)
288+
# With no layers, output should equal input
289+
np.testing.assert_allclose(out, x)
290+
253291

254292
class TestFittingNet(unittest.TestCase):
255293
def test_fitting_net(self) -> None:

source/tests/pt_expt/utils/test_network.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -165,7 +165,7 @@ def test_cross_backend_consistency(self) -> None:
165165
# Test forward pass
166166
rng = np.random.default_rng()
167167
x_np = rng.standard_normal((5, self.in_dim))
168-
x_torch = torch.from_numpy(x_np)
168+
x_torch = torch.from_numpy(x_np).to(env.DEVICE)
169169

170170
out_dp = dp_net.call(x_np)
171171
out_pt = pt_net(x_torch).detach().cpu().numpy()

0 commit comments

Comments
 (0)