Skip to content

Commit

Permalink
feat: add seeds to dpmodel and fix seeds in tf & pt (#3880)
Browse files Browse the repository at this point in the history
Fix #3799.

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

- **New Features**
- Introduced flexibility in specifying seed values, allowing either an
integer or a list of integers.
- Enhanced seed parameter usage across various initialization methods
and classes for more controlled randomization.

- **Improvements**
- Updated seed initialization logic to include additional computations
and dynamic adjustments.
- Enhanced documentation for parameters in multiple classes, providing
clearer usage guidelines.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->

---------

Signed-off-by: Jinzhe Zeng <[email protected]>
  • Loading branch information
njzjz authored Jun 19, 2024
1 parent e2b659a commit 0c472d1
Show file tree
Hide file tree
Showing 38 changed files with 536 additions and 225 deletions.
26 changes: 23 additions & 3 deletions deepmd/dpmodel/descriptor/dpa1.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@
LayerNorm,
NativeLayer,
)
from deepmd.dpmodel.utils.seed import (
child_seed,
)
from deepmd.dpmodel.utils.type_embed import (
TypeEmbedNet,
)
Expand Down Expand Up @@ -251,7 +254,7 @@ def __init__(
use_econf_tebd: bool = False,
type_map: Optional[List[str]] = None,
# consistent with argcheck, not used though
seed: Optional[int] = None,
seed: Optional[Union[int, List[int]]] = None,
) -> None:
## seed, uniform_seed, not included.
# Ensure compatibility with the deprecated stripped_type_embedding option.
Expand Down Expand Up @@ -294,6 +297,7 @@ def __init__(
env_protection=env_protection,
trainable_ln=trainable_ln,
ln_eps=ln_eps,
seed=child_seed(seed, 0),
)
self.use_econf_tebd = use_econf_tebd
self.type_map = type_map
Expand All @@ -305,6 +309,7 @@ def __init__(
precision=precision,
use_econf_tebd=use_econf_tebd,
type_map=type_map,
seed=child_seed(seed, 1),
)
self.tebd_dim = tebd_dim
self.concat_output_tebd = concat_output_tebd
Expand Down Expand Up @@ -625,6 +630,7 @@ def __init__(
trainable_ln: bool = True,
ln_eps: Optional[float] = 1e-5,
smooth: bool = True,
seed: Optional[Union[int, List[int]]] = None,
) -> None:
self.rcut = rcut
self.rcut_smth = rcut_smth
Expand Down Expand Up @@ -674,6 +680,7 @@ def __init__(
self.activation_function,
self.resnet_dt,
self.precision,
seed=child_seed(seed, 0),
)
if self.tebd_input_mode in ["strip"]:
self.embeddings_strip = NetworkCollection(
Expand All @@ -687,6 +694,7 @@ def __init__(
self.activation_function,
self.resnet_dt,
self.precision,
seed=child_seed(seed, 1),
)
else:
self.embeddings_strip = None
Expand All @@ -703,6 +711,7 @@ def __init__(
ln_eps=self.ln_eps,
smooth=self.smooth,
precision=self.precision,
seed=child_seed(seed, 2),
)

wanted_shape = (self.ntypes, self.nnei, 4)
Expand Down Expand Up @@ -950,6 +959,7 @@ def __init__(
ln_eps: float = 1e-5,
smooth: bool = True,
precision: str = DEFAULT_PRECISION,
seed: Optional[Union[int, List[int]]] = None,
):
"""Construct a neighbor-wise attention net."""
super().__init__()
Expand Down Expand Up @@ -982,8 +992,9 @@ def __init__(
ln_eps=ln_eps,
smooth=smooth,
precision=precision,
seed=child_seed(seed, ii),
)
for _ in range(layer_num)
for ii in range(layer_num)
]

def call(
Expand Down Expand Up @@ -1076,6 +1087,7 @@ def __init__(
ln_eps: float = 1e-5,
smooth: bool = True,
precision: str = DEFAULT_PRECISION,
seed: Optional[Union[int, List[int]]] = None,
):
"""Construct a neighbor-wise attention layer."""
super().__init__()
Expand All @@ -1101,9 +1113,14 @@ def __init__(
temperature=temperature,
smooth=smooth,
precision=precision,
seed=child_seed(seed, 0),
)
self.attn_layer_norm = LayerNorm(
self.embed_dim, eps=ln_eps, trainable=self.trainable_ln, precision=precision
self.embed_dim,
eps=ln_eps,
trainable=self.trainable_ln,
precision=precision,
seed=child_seed(seed, 1),
)

def call(
Expand Down Expand Up @@ -1176,6 +1193,7 @@ def __init__(
bias: bool = True,
smooth: bool = True,
precision: str = DEFAULT_PRECISION,
seed: Optional[Union[int, List[int]]] = None,
):
"""Construct a multi-head neighbor-wise attention net."""
super().__init__()
Expand Down Expand Up @@ -1204,13 +1222,15 @@ def __init__(
bias=bias,
use_timestep=False,
precision=precision,
seed=child_seed(seed, 0),
)
self.out_proj = NativeLayer(
hidden_dim,
embed_dim,
bias=bias,
use_timestep=False,
precision=precision,
seed=child_seed(seed, 1),
)

def call(self, query, nei_mask, input_r=None, sw=None, attnw_shift=20.0):
Expand Down
8 changes: 7 additions & 1 deletion deepmd/dpmodel/descriptor/dpa2.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@
build_multiple_neighbor_list,
get_multiple_nlist_key,
)
from deepmd.dpmodel.utils.seed import (
child_seed,
)
from deepmd.dpmodel.utils.type_embed import (
TypeEmbedNet,
)
Expand Down Expand Up @@ -325,7 +328,7 @@ def __init__(
exclude_types: List[Tuple[int, int]] = [],
env_protection: float = 0.0,
trainable: bool = True,
seed: Optional[int] = None,
seed: Optional[Union[int, List[int]]] = None,
add_tebd_to_repinit_out: bool = False,
use_econf_tebd: bool = False,
type_map: Optional[List[str]] = None,
Expand Down Expand Up @@ -408,6 +411,7 @@ def init_subclass_params(sub_data, sub_class):
resnet_dt=self.repinit_args.resnet_dt,
smooth=smooth,
type_one_side=self.repinit_args.type_one_side,
seed=child_seed(seed, 0),
)
self.repformers = DescrptBlockRepformers(
self.repformer_args.rcut,
Expand Down Expand Up @@ -442,6 +446,7 @@ def init_subclass_params(sub_data, sub_class):
precision=precision,
trainable_ln=self.repformer_args.trainable_ln,
ln_eps=self.repformer_args.ln_eps,
seed=child_seed(seed, 1),
)
self.use_econf_tebd = use_econf_tebd
self.type_map = type_map
Expand All @@ -453,6 +458,7 @@ def init_subclass_params(sub_data, sub_class):
precision=precision,
use_econf_tebd=use_econf_tebd,
type_map=type_map,
seed=child_seed(seed, 2),
)
self.concat_output_tebd = concat_output_tebd
self.precision = precision
Expand Down
Loading

0 comments on commit 0c472d1

Please sign in to comment.