From e9a25fbc897fb70f65a61766505ee0cf2e1cfc27 Mon Sep 17 00:00:00 2001 From: Jinzhe Zeng Date: Sat, 6 Jul 2024 11:36:19 -0400 Subject: [PATCH] chore: add the `mlp_engine` option (#1576) I am going to use DP-GEN to develop models trained by other MLP software. This may or may not be merged into the main branch, but I think a general `mlp_engine` option can be added anyway. ## Summary by CodeRabbit - **New Features** - Introduced handling for multiple ML potential engines with specialized training argument functions. - **Improvements** - Enhanced training initialization by splitting into common and engine-specific functions. - Improved error handling for unsupported ML potential engines. - **Bug Fixes** - Corrected logic to differentiate between `dp` and other engine values during training and model initialization. --------- Signed-off-by: Jinzhe Zeng Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- dpgen/generator/arginfo.py | 32 ++++++++++++++++++++++----- dpgen/generator/run.py | 44 +++++++++++++++++++++++++++++++------- dpgen/simplify/arginfo.py | 4 +++- dpgen/simplify/simplify.py | 8 +++++++ 4 files changed, 74 insertions(+), 14 deletions(-) diff --git a/dpgen/generator/arginfo.py b/dpgen/generator/arginfo.py index 92097af89..6cc38bbed 100644 --- a/dpgen/generator/arginfo.py +++ b/dpgen/generator/arginfo.py @@ -79,7 +79,14 @@ def data_args() -> list[Argument]: # Training -def training_args() -> list[Argument]: +def training_args_common() -> list[Argument]: + doc_numb_models = "Number of models to be trained in 00.train. 4 is recommend." + return [ + Argument("numb_models", int, optional=False, doc=doc_numb_models), + ] + + +def training_args_dp() -> list[Argument]: """Traning arguments. Returns @@ -90,7 +97,6 @@ def training_args() -> list[Argument]: doc_train_backend = ( "The backend of the training. Currently only support tensorflow and pytorch." ) - doc_numb_models = "Number of models to be trained in 00.train. 4 is recommend." doc_training_iter0_model_path = "The model used to init the first iter training. Number of element should be equal to numb_models." doc_training_init_model = "Iteration > 0, the model parameters will be initilized from the model trained at the previous iteration. Iteration == 0, the model parameters will be initialized from training_iter0_model_path." doc_default_training_param = "Training parameters for deepmd-kit in 00.train. You can find instructions from `DeePMD-kit documentation `_." @@ -133,7 +139,6 @@ def training_args() -> list[Argument]: default="tensorflow", doc=doc_train_backend, ), - Argument("numb_models", int, optional=False, doc=doc_numb_models), Argument( "training_iter0_model_path", list[str], @@ -224,6 +229,19 @@ def training_args() -> list[Argument]: ] +def training_args() -> Variant: + doc_mlp_engine = "Machine learning potential engine. Currently, only DeePMD-kit (defualt) is supported." + doc_dp = "DeePMD-kit." + return Variant( + "mlp_engine", + [ + Argument("dp", dict, training_args_dp(), doc=doc_dp), + ], + default_tag="dp", + doc=doc_mlp_engine, + ) + + # Exploration def model_devi_jobs_template_args() -> Argument: doc_template = ( @@ -987,7 +1005,11 @@ def run_jdata_arginfo() -> Argument: return Argument( "run_jdata", dict, - sub_fields=basic_args() + data_args() + training_args() + fp_args(), - sub_variants=model_devi_args() + [fp_style_variant_type_args()], + sub_fields=basic_args() + data_args() + training_args_common() + fp_args(), + sub_variants=[ + training_args(), + *model_devi_args(), + fp_style_variant_type_args(), + ], doc=doc_run_jdata, ) diff --git a/dpgen/generator/run.py b/dpgen/generator/run.py index 1e3e0e3fa..d376d467a 100644 --- a/dpgen/generator/run.py +++ b/dpgen/generator/run.py @@ -128,15 +128,19 @@ def _get_model_suffix(jdata) -> str: """Return the model suffix based on the backend.""" - suffix_map = {"tensorflow": ".pb", "pytorch": ".pth"} - backend = jdata.get("train_backend", "tensorflow") - if backend in suffix_map: - suffix = suffix_map[backend] + mlp_engine = jdata.get("mlp_engine", "dp") + if mlp_engine == "dp": + suffix_map = {"tensorflow": ".pb", "pytorch": ".pth"} + backend = jdata.get("train_backend", "tensorflow") + if backend in suffix_map: + suffix = suffix_map[backend] + else: + raise ValueError( + f"The backend {backend} is not available. Supported backends are: 'tensorflow', 'pytorch'." + ) + return suffix else: - raise ValueError( - f"The backend {backend} is not available. Supported backends are: 'tensorflow', 'pytorch'." - ) - return suffix + raise ValueError(f"Unsupported engine: {mlp_engine}") def get_job_names(jdata): @@ -270,6 +274,14 @@ def dump_to_deepmd_raw(dump, deepmd_raw, type_map, fmt="gromacs/gro", charge=Non def make_train(iter_index, jdata, mdata): + mlp_engine = jdata.get("mlp_engine", "dp") + if mlp_engine == "dp": + return make_train_dp(iter_index, jdata, mdata) + else: + raise ValueError(f"Unsupported engine: {mlp_engine}") + + +def make_train_dp(iter_index, jdata, mdata): # load json param # train_param = jdata['train_param'] train_input_file = default_train_input_file @@ -714,6 +726,14 @@ def get_nframes(system): def run_train(iter_index, jdata, mdata): + mlp_engine = jdata.get("mlp_engine", "dp") + if mlp_engine == "dp": + return make_train_dp(iter_index, jdata, mdata) + else: + raise ValueError(f"Unsupported engine: {mlp_engine}") + + +def run_train_dp(iter_index, jdata, mdata): # print("debug:run_train:mdata", mdata) # load json param numb_models = jdata["numb_models"] @@ -899,6 +919,14 @@ def run_train(iter_index, jdata, mdata): def post_train(iter_index, jdata, mdata): + mlp_engine = jdata.get("mlp_engine", "dp") + if mlp_engine == "dp": + return post_train_dp(iter_index, jdata, mdata) + else: + raise ValueError(f"Unsupported engine: {mlp_engine}") + + +def post_train_dp(iter_index, jdata, mdata): # load json param numb_models = jdata["numb_models"] # paths diff --git a/dpgen/simplify/arginfo.py b/dpgen/simplify/arginfo.py index 516b27e60..53507b2f6 100644 --- a/dpgen/simplify/arginfo.py +++ b/dpgen/simplify/arginfo.py @@ -12,6 +12,7 @@ fp_style_siesta_args, fp_style_vasp_args, training_args, + training_args_common, ) @@ -201,10 +202,11 @@ def simplify_jdata_arginfo() -> Argument: *data_args(), *general_simplify_arginfo(), # simplify use the same training method as run - *training_args(), + *training_args_common(), *fp_args(), ], sub_variants=[ + training_args(), fp_style_variant_type_args(), ], doc=doc_run_jdata, diff --git a/dpgen/simplify/simplify.py b/dpgen/simplify/simplify.py index 02fe54d79..24205fda3 100644 --- a/dpgen/simplify/simplify.py +++ b/dpgen/simplify/simplify.py @@ -103,6 +103,14 @@ def get_multi_system(path: Union[str, list[str]], jdata: dict) -> dpdata.MultiSy def init_model(iter_index, jdata, mdata): + mlp_engine = jdata.get("mlp_engine", "dp") + if mlp_engine == "dp": + init_model_dp(iter_index, jdata, mdata) + else: + raise TypeError(f"unsupported engine {mlp_engine}") + + +def init_model_dp(iter_index, jdata, mdata): training_init_model = jdata.get("training_init_model", False) if not training_init_model: return