Skip to content

Commit

Permalink
fix tebd
Browse files Browse the repository at this point in the history
  • Loading branch information
iProzd committed Jun 10, 2024
1 parent bf20853 commit af6c8b2
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 3 deletions.
9 changes: 9 additions & 0 deletions deepmd/dpmodel/utils/type_embed.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,15 @@ def change_type_map(
), "'type_map' must be defined when performing type changing!"
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
if not self.use_econf_tebd:
do_resnet = self.neuron[0] in [
self.ntypes,
self.ntypes * 2,
len(type_map),
len(type_map) * 2,
]
assert (
not do_resnet or self.activation_function == "Linear"
), "'activation_function' must be 'Linear' when performing type changing on resnet structure!"
first_layer_matrix = self.embedding_net.layers[0].w
eye_vector = np.eye(self.ntypes, dtype=PRECISION_DICT[self.precision])
# preprocess for resnet connection
Expand Down
9 changes: 9 additions & 0 deletions deepmd/pt/model/network/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -735,6 +735,15 @@ def change_type_map(
), "'type_map' must be defined when performing type changing!"
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
if not self.use_econf_tebd:
do_resnet = self.neuron[0] in [
self.ntypes,
self.ntypes * 2,
len(type_map),
len(type_map) * 2,
]
assert (
not do_resnet or self.activation_function == "Linear"
), "'activation_function' must be 'Linear' when performing type changing on resnet structure!"
first_layer_matrix = self.embedding_net.layers[0].matrix.data
eye_vector = torch.eye(
self.ntypes, dtype=self.prec, device=first_layer_matrix.device
Expand Down
17 changes: 14 additions & 3 deletions source/tests/universal/common/cases/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ def setUp(self):
self.input_dict = {
"ntypes": self.nt,
"neuron": [8],
"activation_function": "Linear",
"type_map": ["O", "H"],
"use_econf_tebd": False,
}
Expand Down Expand Up @@ -53,7 +52,7 @@ def test_change_type_map(self):
"Ar",
] # 18 elements
rng = np.random.default_rng(GLOBAL_SEED)
for old_tm, new_tm, neuron, econf in itertools.product(
for old_tm, new_tm, neuron, act, econf in itertools.product(
[
full_type_map_test[:], # 18 elements
full_type_map_test[
Expand All @@ -70,9 +69,19 @@ def test_change_type_map(self):
full_type_map_test[:8], # 8 elements, tebd default first dim
["H", "O"], # slimmed types
], # new_type_map
[[8], [8, 16, 32]],
[[8], [8, 16, 32]], # neuron
["Linear", "tanh"], # activation_function
[False, True], # use_econf_tebd
):
do_resnet = neuron[0] in [
len(old_tm),
len(old_tm) * 2,
len(new_tm),
len(new_tm) * 2,
]
if do_resnet and act != "Linear":
# `activation_function` must be "Linear" when performing type changing on resnet structure
continue
# use shuffled type_map
rng.shuffle(old_tm)
rng.shuffle(new_tm)
Expand All @@ -85,6 +94,8 @@ def test_change_type_map(self):
old_tm_input = deepcopy(self.input_dict)
old_tm_input["type_map"] = old_tm
old_tm_input["ntypes"] = len(old_tm)
old_tm_input["neuron"] = neuron
old_tm_input["activation_function"] = act
old_tm_input["use_econf_tebd"] = econf
old_tm_module = self.module_class(**old_tm_input)
old_tm_dd = self.forward_wrapper(old_tm_module)
Expand Down

0 comments on commit af6c8b2

Please sign in to comment.