diff --git a/.git_archival.txt b/.git_archival.txt index 8fb235d70..7c5100942 100644 --- a/.git_archival.txt +++ b/.git_archival.txt @@ -1,4 +1,3 @@ node: $Format:%H$ node-date: $Format:%cI$ describe-name: $Format:%(describe:tags=true,match=*[0-9]*)$ -ref-names: $Format:%D$ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 148dc95f2..535025f7a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -28,7 +28,7 @@ repos: # Python - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.5 + rev: v0.5.1 hooks: - id: ruff args: ["--fix"] diff --git a/dpgen/generator/arginfo.py b/dpgen/generator/arginfo.py index 92097af89..6cc38bbed 100644 --- a/dpgen/generator/arginfo.py +++ b/dpgen/generator/arginfo.py @@ -79,7 +79,14 @@ def data_args() -> list[Argument]: # Training -def training_args() -> list[Argument]: +def training_args_common() -> list[Argument]: + doc_numb_models = "Number of models to be trained in 00.train. 4 is recommend." + return [ + Argument("numb_models", int, optional=False, doc=doc_numb_models), + ] + + +def training_args_dp() -> list[Argument]: """Traning arguments. Returns @@ -90,7 +97,6 @@ def training_args() -> list[Argument]: doc_train_backend = ( "The backend of the training. Currently only support tensorflow and pytorch." ) - doc_numb_models = "Number of models to be trained in 00.train. 4 is recommend." doc_training_iter0_model_path = "The model used to init the first iter training. Number of element should be equal to numb_models." doc_training_init_model = "Iteration > 0, the model parameters will be initilized from the model trained at the previous iteration. Iteration == 0, the model parameters will be initialized from training_iter0_model_path." doc_default_training_param = "Training parameters for deepmd-kit in 00.train. You can find instructions from `DeePMD-kit documentation `_." @@ -133,7 +139,6 @@ def training_args() -> list[Argument]: default="tensorflow", doc=doc_train_backend, ), - Argument("numb_models", int, optional=False, doc=doc_numb_models), Argument( "training_iter0_model_path", list[str], @@ -224,6 +229,19 @@ def training_args() -> list[Argument]: ] +def training_args() -> Variant: + doc_mlp_engine = "Machine learning potential engine. Currently, only DeePMD-kit (defualt) is supported." + doc_dp = "DeePMD-kit." + return Variant( + "mlp_engine", + [ + Argument("dp", dict, training_args_dp(), doc=doc_dp), + ], + default_tag="dp", + doc=doc_mlp_engine, + ) + + # Exploration def model_devi_jobs_template_args() -> Argument: doc_template = ( @@ -987,7 +1005,11 @@ def run_jdata_arginfo() -> Argument: return Argument( "run_jdata", dict, - sub_fields=basic_args() + data_args() + training_args() + fp_args(), - sub_variants=model_devi_args() + [fp_style_variant_type_args()], + sub_fields=basic_args() + data_args() + training_args_common() + fp_args(), + sub_variants=[ + training_args(), + *model_devi_args(), + fp_style_variant_type_args(), + ], doc=doc_run_jdata, ) diff --git a/dpgen/generator/run.py b/dpgen/generator/run.py index 1e3e0e3fa..d376d467a 100644 --- a/dpgen/generator/run.py +++ b/dpgen/generator/run.py @@ -128,15 +128,19 @@ def _get_model_suffix(jdata) -> str: """Return the model suffix based on the backend.""" - suffix_map = {"tensorflow": ".pb", "pytorch": ".pth"} - backend = jdata.get("train_backend", "tensorflow") - if backend in suffix_map: - suffix = suffix_map[backend] + mlp_engine = jdata.get("mlp_engine", "dp") + if mlp_engine == "dp": + suffix_map = {"tensorflow": ".pb", "pytorch": ".pth"} + backend = jdata.get("train_backend", "tensorflow") + if backend in suffix_map: + suffix = suffix_map[backend] + else: + raise ValueError( + f"The backend {backend} is not available. Supported backends are: 'tensorflow', 'pytorch'." + ) + return suffix else: - raise ValueError( - f"The backend {backend} is not available. Supported backends are: 'tensorflow', 'pytorch'." - ) - return suffix + raise ValueError(f"Unsupported engine: {mlp_engine}") def get_job_names(jdata): @@ -270,6 +274,14 @@ def dump_to_deepmd_raw(dump, deepmd_raw, type_map, fmt="gromacs/gro", charge=Non def make_train(iter_index, jdata, mdata): + mlp_engine = jdata.get("mlp_engine", "dp") + if mlp_engine == "dp": + return make_train_dp(iter_index, jdata, mdata) + else: + raise ValueError(f"Unsupported engine: {mlp_engine}") + + +def make_train_dp(iter_index, jdata, mdata): # load json param # train_param = jdata['train_param'] train_input_file = default_train_input_file @@ -714,6 +726,14 @@ def get_nframes(system): def run_train(iter_index, jdata, mdata): + mlp_engine = jdata.get("mlp_engine", "dp") + if mlp_engine == "dp": + return make_train_dp(iter_index, jdata, mdata) + else: + raise ValueError(f"Unsupported engine: {mlp_engine}") + + +def run_train_dp(iter_index, jdata, mdata): # print("debug:run_train:mdata", mdata) # load json param numb_models = jdata["numb_models"] @@ -899,6 +919,14 @@ def run_train(iter_index, jdata, mdata): def post_train(iter_index, jdata, mdata): + mlp_engine = jdata.get("mlp_engine", "dp") + if mlp_engine == "dp": + return post_train_dp(iter_index, jdata, mdata) + else: + raise ValueError(f"Unsupported engine: {mlp_engine}") + + +def post_train_dp(iter_index, jdata, mdata): # load json param numb_models = jdata["numb_models"] # paths diff --git a/dpgen/simplify/arginfo.py b/dpgen/simplify/arginfo.py index 516b27e60..53507b2f6 100644 --- a/dpgen/simplify/arginfo.py +++ b/dpgen/simplify/arginfo.py @@ -12,6 +12,7 @@ fp_style_siesta_args, fp_style_vasp_args, training_args, + training_args_common, ) @@ -201,10 +202,11 @@ def simplify_jdata_arginfo() -> Argument: *data_args(), *general_simplify_arginfo(), # simplify use the same training method as run - *training_args(), + *training_args_common(), *fp_args(), ], sub_variants=[ + training_args(), fp_style_variant_type_args(), ], doc=doc_run_jdata, diff --git a/dpgen/simplify/simplify.py b/dpgen/simplify/simplify.py index 02fe54d79..24205fda3 100644 --- a/dpgen/simplify/simplify.py +++ b/dpgen/simplify/simplify.py @@ -103,6 +103,14 @@ def get_multi_system(path: Union[str, list[str]], jdata: dict) -> dpdata.MultiSy def init_model(iter_index, jdata, mdata): + mlp_engine = jdata.get("mlp_engine", "dp") + if mlp_engine == "dp": + init_model_dp(iter_index, jdata, mdata) + else: + raise TypeError(f"unsupported engine {mlp_engine}") + + +def init_model_dp(iter_index, jdata, mdata): training_init_model = jdata.get("training_init_model", False) if not training_init_model: return diff --git a/pyproject.toml b/pyproject.toml index d5997f6a6..709cb0ca3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,6 +27,7 @@ dependencies = [ 'paramiko', 'custodian', 'GromacsWrapper>=0.8.0', + 'GromacsWrapper>=0.9.0; python_version >= "3.12"', 'dpdispatcher>=0.3.11', 'netCDF4', 'dargs>=0.4.0', @@ -64,8 +65,9 @@ test = [ "dpgui", "coverage", "pymatgen-analysis-defects<2023.08.22", - # To be fixed: https://github.com/Becksteinlab/GromacsWrapper/issues/263 - 'setuptools; python_version >= "3.12"', + # https://github.com/materialsproject/pymatgen/issues/3882 + # https://github.com/kuelumbus/rdkit-pypi/issues/102 + "numpy<2", ] gui = [ "dpgui",