diff --git a/arc/checks/common.py b/arc/checks/common.py index ec5b91b5b3..f096c70c76 100644 --- a/arc/checks/common.py +++ b/arc/checks/common.py @@ -36,8 +36,10 @@ def get_i_from_job_name(job_name: str) -> Optional[int]: Optional[int]: The corresponding conformer or tsg index. """ i = None - if 'conformer' in job_name: + if 'conf_opt' in job_name: i = int(job_name[9:]) + elif 'conf_sp' in job_name: + i = int(job_name[8:]) elif 'tsg' in job_name: i = int(job_name[3:]) return i diff --git a/arc/checks/common_test.py b/arc/checks/common_test.py index c55e5c531b..dc2b991c89 100644 --- a/arc/checks/common_test.py +++ b/arc/checks/common_test.py @@ -36,9 +36,9 @@ def test_get_i_from_job_name(self): """Test the get_i_from_job_name() function""" self.assertIsNone(common.get_i_from_job_name('')) self.assertIsNone(common.get_i_from_job_name('some_job_name')) - self.assertEqual(common.get_i_from_job_name('conformer3'), 3) - self.assertEqual(common.get_i_from_job_name('conformer33'), 33) - self.assertEqual(common.get_i_from_job_name('conformer3355'), 3355) + self.assertEqual(common.get_i_from_job_name('conf_opt_3'), 3) + self.assertEqual(common.get_i_from_job_name('conf_opt_33'), 33) + self.assertEqual(common.get_i_from_job_name('conf_opt_3355'), 3355) self.assertEqual(common.get_i_from_job_name('tsg2'), 2) diff --git a/arc/common.py b/arc/common.py index a99b9752f3..fa12084b62 100644 --- a/arc/common.py +++ b/arc/common.py @@ -92,8 +92,8 @@ def initialize_job_types(job_types: Optional[dict] = None, if 'fine_grid' in job_types: del job_types['fine_grid'] - defaults_to_true = ['conformers', 'fine', 'freq', 'irc', 'opt', 'rotors', 'sp'] - defaults_to_false = ['bde', 'onedmin', 'orbitals'] + defaults_to_true = ['conf_opt', 'fine', 'freq', 'irc', 'opt', 'rotors', 'sp'] + defaults_to_false = ['conf_sp', 'bde', 'onedmin', 'orbitals'] if job_types is None: job_types = default_job_types logger.info("Job types were not specified, using ARC's defaults") diff --git a/arc/common_test.py b/arc/common_test.py index 71af05fdd2..26d4e997f1 100644 --- a/arc/common_test.py +++ b/arc/common_test.py @@ -43,13 +43,14 @@ def setUpClass(cls): cls.maxDiff = None cls.rmgdb = make_rmg_database_object() load_families_only(cls.rmgdb) - cls.default_job_types = {'conformers': True, + cls.default_job_types = {'conf_opt': True, 'opt': True, 'fine': True, 'freq': True, 'sp': True, 'rotors': True, 'irc': True, + 'conf_sp': False, 'orbitals': False, 'onedmin': False, 'bde': False, @@ -388,7 +389,7 @@ def test_almost_equal_lists(self): def test_initialize_job_with_given_job_type(self): """Test the initialize_job_types() function""" - job_types = {'conformers': False, 'opt': True, 'fine': True, 'freq': True, 'sp': False, 'rotors': False, 'irc': True} + job_types = {'conf_opt': False, 'opt': True, 'fine': True, 'freq': True, 'sp': False, 'rotors': False, 'irc': True} job_types_expected = copy.deepcopy(self.default_job_types) job_types_expected.update(job_types) job_types_initialized = common.initialize_job_types(job_types) diff --git a/arc/job/adapter.py b/arc/job/adapter.py index 428abfee1c..31ac915fdf 100644 --- a/arc/job/adapter.py +++ b/arc/job/adapter.py @@ -110,7 +110,8 @@ class JobTypeEnum(str, Enum): The available jon types are a finite set. """ composite = 'composite' - conformers = 'conformers' # conformer optimization (not generation) + conf_opt = 'conf_opt' # conformer optimization (not generation) + conf_sp = 'conf_sp' # conformer single point freq = 'freq' gen_confs = 'gen_confs' # conformer generation irc = 'irc' @@ -385,9 +386,9 @@ def determine_job_array_parameters(self): if self.species is not None: if len(self.species) > 1: self.iterate_by.append('species') - if job_type == 'conformers': + if job_type == 'conf_opt': if self.species is not None and sum(len(species.conformers) for species in self.species) > 10: - self.iterate_by.append('conformers') + self.iterate_by.append('conf_opt') self.number_of_processes += sum([len(species.conformers) for species in self.species]) for species in self.species: if job_type in ['sp', 'opt', 'freq', 'optfreq', 'composite', 'ornitals', 'onedmin', 'irc']: @@ -456,7 +457,7 @@ def write_hdf5(self): else: for species in self.species: data[species.label] = list() - if 'conformers' in self.iterate_by: + if 'conf_opt' in self.iterate_by: for conformer in species.conformers: data[species.label].append(DataPoint(charge=species.charge, job_types=['opt'], @@ -693,10 +694,8 @@ def _set_job_number(self): self.job_num = job_num # 2. Set other related attributes job_name and job_server_name. self.job_server_name = self.job_server_name or 'a' + str(self.job_num) - if self.conformer is not None and (self.job_name is None or 'conformer_a' in self.job_name): - if self.job_name is not None: - logger.warning(f'Replacing job name {self.job_name} with conformer{self.conformer}') - self.job_name = f'conformer{self.conformer}' + if self.conformer is not None and self.job_name is None: + self.job_name = f'{self.job_type}_{self.conformer}_{self.job_server_name}' elif self.tsg is not None and (self.job_name is None or 'tsg_a' in self.job_name): if self.job_name is not None: logger.warning(f'Replacing job name {self.job_name} with tsg{self.conformer}') @@ -1097,7 +1096,7 @@ def _log_job_execution(self): local = 'local ' else: server = f' on {self.server}' - if 'conformer' in self.job_name or 'tsg' in self.job_name: + if 'conf_opt' in self.job_name or 'tsg' in self.job_name: job_server_name = f' ({self.job_server_name})' execution_type = {'incore': 'incore job', 'queue': 'queue job', 'pipe': 'job array (pipe)'}[self.execution_type] pivots = f' for pivots {[[tor[1] + 1, tor[2] + 1] for tor in self.torsions]}' if self.torsions is not None else '' diff --git a/arc/job/adapter_test.py b/arc/job/adapter_test.py index a45ea74f53..e08607e3fa 100644 --- a/arc/job/adapter_test.py +++ b/arc/job/adapter_test.py @@ -53,7 +53,8 @@ def test_job_enum(self): def test_job_type_enum(self): """Test the JobTypeEnum class""" self.assertEqual(JobTypeEnum('composite').value, 'composite') - self.assertEqual(JobTypeEnum('conformers').value, 'conformers') + self.assertEqual(JobTypeEnum('conf_opt').value, 'conf_opt') + self.assertEqual(JobTypeEnum('conf_sp').value, 'conf_sp') self.assertEqual(JobTypeEnum('freq').value, 'freq') self.assertEqual(JobTypeEnum('gen_confs').value, 'gen_confs') self.assertEqual(JobTypeEnum('irc').value, 'irc') @@ -123,7 +124,7 @@ def setUpClass(cls): """ cls.maxDiff = None cls.job_1 = GaussianAdapter(execution_type='queue', - job_type='conformers', + job_type='conf_opt', level=Level(method='cbs-qb3'), project='test', project_directory=os.path.join(ARC_PATH, 'arc', 'testing', 'test_JobAdapter'), @@ -224,7 +225,7 @@ def setUpClass(cls): def test_determine_job_array_parameters(self): """Test determining job array parameters""" - self.assertEqual(self.job_1.iterate_by, ['species', 'conformers']) + self.assertEqual(self.job_1.iterate_by, ['species', 'conf_opt']) self.assertEqual(self.job_1.number_of_processes, 3 * 6) self.assertEqual(self.job_1.workers, 4) diff --git a/arc/job/adapters/cfour.py b/arc/job/adapters/cfour.py index cd277e5254..458d33f644 100644 --- a/arc/job/adapters/cfour.py +++ b/arc/job/adapters/cfour.py @@ -212,12 +212,12 @@ def write_input_file(self) -> None: input_dict['keywords'] = '' keywords = list() - if self.job_type in ['opt', 'conformers']: + if self.job_type in ['opt', 'conf_opt']: keywords.append('METHOD=TS' if self.is_ts else 'METHOD=MANR') elif self.job_type in ['freq', 'optfreq', 'scan']: raise NotImplementedError(f'CFour cannot execute frequency computations or scans, ' f'got job type {self.job_type}') - elif self.job_type == 'sp': + elif self.job_type in ['sp', 'conf_sp']: pass input_dict['keywords'] = ','.join(key for key in keywords) diff --git a/arc/job/adapters/gaussian.py b/arc/job/adapters/gaussian.py index 35169500e1..476af80e67 100644 --- a/arc/job/adapters/gaussian.py +++ b/arc/job/adapters/gaussian.py @@ -275,7 +275,7 @@ def write_input_file(self) -> None: max_c = int(match.group(1)) break - if self.job_type in ['opt', 'conformers', 'optfreq', 'composite']: + if self.job_type in ['opt', 'conf_opt', 'optfreq', 'composite']: keywords = ['ts', 'calcfc', 'noeigentest', f'maxcycle={max_c}'] if self.is_ts else ['calcfc'] if self.level.method in ['rocbs-qb3']: # There are no analytical 2nd derivatives (FC) for this method. @@ -312,7 +312,7 @@ def write_input_file(self) -> None: elif self.job_type == 'optfreq': input_dict['job_type_2'] = 'freq IOp(7/33=1)' - elif self.job_type == 'sp': + elif self.job_type in ['sp', 'conf_sp']: input_dict['job_type_1'] = f'integral=(grid=ultrafine, {integral_algorithm})' if input_dict['trsh']: input_dict['trsh'] += ' ' diff --git a/arc/job/adapters/molpro.py b/arc/job/adapters/molpro.py index 176570aa94..d62522acf5 100644 --- a/arc/job/adapters/molpro.py +++ b/arc/job/adapters/molpro.py @@ -233,7 +233,7 @@ def write_input_file(self) -> None: input_dict['restricted'] = 'u' # Job type specific options - if self.job_type in ['opt', 'optfreq', 'conformers']: + if self.job_type in ['opt', 'optfreq', 'conf_opt']: keywords = ['optg', 'root=2', 'method=qsd', 'readhess', "savexyz='geometry.xyz'"] if self.is_ts \ else ['optg', "savexyz='geometry.xyz'"] input_dict['job_type_1'] = ', '.join(key for key in keywords) @@ -241,7 +241,7 @@ def write_input_file(self) -> None: elif self.job_type in ['freq', 'optfreq']: input_dict['job_type_2'] = '{frequencies;\nthermo;\nprint,HESSIAN,thermo;}' - elif self.job_type == 'sp': + elif self.job_type in ['sp', 'conf_sp']: pass elif self.job_type == 'scan': diff --git a/arc/job/adapters/orca.py b/arc/job/adapters/orca.py index e118db214e..d64340af7b 100644 --- a/arc/job/adapters/orca.py +++ b/arc/job/adapters/orca.py @@ -259,7 +259,7 @@ def write_input_file(self) -> None: input_dict['restricted'] = 'r' if is_restricted(self) else 'u' # Job type specific options - if self.job_type in ['opt', 'conformers', 'optfreq']: + if self.job_type in ['opt', 'conf_opt', 'optfreq']: opt_convergence_key = 'fine_opt_convergence' if self.fine else 'opt_convergence' opt_convergence = self.args['keyword'].get(opt_convergence_key, '').lower() or \ orca_default_options_dict['opt']['keyword'].get(opt_convergence_key, '').lower() @@ -293,7 +293,7 @@ def write_input_file(self) -> None: logger.info('Using numerical frequencies calculation in Orca. Note: This job might therefore be ' 'time-consuming.') - elif self.job_type == 'sp': + elif self.job_type in ['sp', 'conf_sp']: input_dict['job_type_1'] = 'sp' elif self.job_type == 'scan': diff --git a/arc/job/adapters/qchem.py b/arc/job/adapters/qchem.py index 1e1cfca19e..1c66efec21 100644 --- a/arc/job/adapters/qchem.py +++ b/arc/job/adapters/qchem.py @@ -224,7 +224,7 @@ def write_input_file(self) -> None: input_dict['unrestricted'] = 'True' if not is_restricted(self) else 'False' # Job type specific options - if self.job_type in ['opt', 'conformers', 'optfreq', 'orbitals', 'scan']: + if self.job_type in ['opt', 'conf_opt', 'optfreq', 'orbitals', 'scan']: input_dict['job_type_1'] = 'ts' if self.is_ts else 'opt' if self.fine: input_dict['fine'] = '\n GEOM_OPT_TOL_GRADIENT 15' \ @@ -238,7 +238,7 @@ def write_input_file(self) -> None: elif self.job_type == 'freq': input_dict['job_type_1'] = 'freq' - elif self.job_type == 'sp': + elif self.job_type in ['sp', 'conf_sp']: input_dict['job_type_1'] = 'sp' elif self.job_type == 'orbitals': diff --git a/arc/job/adapters/scripts/ob_script.py b/arc/job/adapters/scripts/ob_script.py index 56b955feaa..d4e93370e6 100644 --- a/arc/job/adapters/scripts/ob_script.py +++ b/arc/job/adapters/scripts/ob_script.py @@ -176,7 +176,7 @@ def main(): sp = run_sp(mol=mol, ff_method=ff_method) save_output_file(path = str(args.yml_path), key="sp", val=sp) - if job_type in ['opt', 'conformers', 'directed_scan']: + if job_type in ['opt', 'conf_opt', 'directed_scan']: constraints = input_dict["constraints"] if "constraints" in input_dict.keys() else None opt_xyz, sp = constraint_opt(mol=mol, constraints_dict=constraints, ff_method=ff_method) save_output_file(path = str(args.yml_path), content_dict = {"opt_xyz" : opt_xyz, "sp" : sp}) diff --git a/arc/job/adapters/scripts/tani_script.py b/arc/job/adapters/scripts/tani_script.py index 6cb8f32edc..18739b42aa 100644 --- a/arc/job/adapters/scripts/tani_script.py +++ b/arc/job/adapters/scripts/tani_script.py @@ -345,7 +345,7 @@ def main(): forces = run_force(xyz=xyz, device=device, model=model) save_output_file(path = str(args.yml_path), key="force", val=forces) - elif job_type in ['opt', 'conformers', 'directed_scan', 'optfreq']: + elif job_type in ['opt', 'conf_opt', 'directed_scan', 'optfreq']: constraints = input_dict["constraints"] if "constraints" in input_dict.keys() else None opt_xyz = run_opt(xyz=xyz, constraints=constraints, fmax=input_dict["fmax"], model=model, steps=input_dict["steps"] if "steps" in input_dict.keys() else None, engine=input_dict["engine"]) diff --git a/arc/job/adapters/terachem.py b/arc/job/adapters/terachem.py index 59862e2e8c..f9eaed3ea4 100644 --- a/arc/job/adapters/terachem.py +++ b/arc/job/adapters/terachem.py @@ -239,7 +239,7 @@ def write_input_file(self) -> None: input_dict['dispersion'] = 'no' # Job type specific options - if self.job_type in ['conformer', 'opt', 'scan']: + if self.job_type in ['conf_opt', 'opt', 'scan']: input_dict['job_type_1'] = 'minimize\n' \ 'new_minimizer yes' if self.fine: @@ -250,7 +250,7 @@ def write_input_file(self) -> None: elif self.job_type == 'freq': input_dict['job_type_1'] = 'frequencies' - elif self.job_type == 'sp': + elif self.job_type in ['sp', 'conf_sp']: input_dict['job_type_1'] = 'energy' if self.job_type == 'scan' \ diff --git a/arc/job/adapters/xtb_adapter.py b/arc/job/adapters/xtb_adapter.py index 27cbaf4490..14f62bbe1d 100644 --- a/arc/job/adapters/xtb_adapter.py +++ b/arc/job/adapters/xtb_adapter.py @@ -216,7 +216,7 @@ def write_input_file(self) -> None: directives, block = '', '' uhf = self.species[0].number_of_radicals or self.multiplicity - 1 - if self.job_type in ['opt', 'conformers', 'scan']: + if self.job_type in ['opt', 'conf_opt', 'scan']: directives += ' --opt' directives += self.add_accuracy() if self.constraints and self.job_type != 'scan': @@ -234,7 +234,7 @@ def write_input_file(self) -> None: elif self.job_type in ['fukui']: directives += ' --vfukui' - elif self.job_type == 'sp': + elif self.job_type in ['sp', 'conf_sp']: pass directives += f' --{self.level.method}' if self.level is not None and self.level.method != 'xtb' else ' --gfn2' @@ -291,7 +291,7 @@ def is_opt_ts_job(self) -> bool: bool: Whether this is a transition state geometry optimization job. """ if self.species is not None and len(self.species) and self.species[0].is_ts \ - and self.job_type in ['opt', 'conformers']: + and self.job_type in ['opt', 'conf_opt']: return True return False diff --git a/arc/job/trsh.py b/arc/job/trsh.py index 04995c41f2..f985db6a61 100644 --- a/arc/job/trsh.py +++ b/arc/job/trsh.py @@ -229,7 +229,7 @@ def determine_ess_status(output_path: str, done = True # If this is an opt job, we must also check that the max num of cycles hasn't been reached, # so don't break yet. - if 'opt' not in job_type and 'conformer' not in job_type and 'ts' not in job_type: + if 'opt' not in job_type and 'conf_opt' not in job_type and 'ts' not in job_type: break elif 'SCF failed' in line: keywords = ['SCF'] @@ -245,7 +245,7 @@ def determine_ess_status(output_path: str, elif 'Invalid charge/multiplicity combination' in line: raise SpeciesError(f'The multiplicity and charge combination for species ' f'{species_label} are wrong.') - if 'opt' in job_type or 'conformer' in job_type or 'ts' in job_type: + if 'opt' in job_type or 'conf_opt' in job_type or 'ts' in job_type: if 'MAXIMUM OPTIMIZATION CYCLES REACHED' in line: keywords = ['MaxOptCycles'] error = 'Maximum optimization cycles reached.' diff --git a/arc/main.py b/arc/main.py index b2f0e97453..d5c4c63a85 100644 --- a/arc/main.py +++ b/arc/main.py @@ -80,6 +80,8 @@ class ARC(object): instead (e.g., ``opt_level``). composite_method (str, dict, Level, optional): Composite method. conformer_level (str, dict, Level, optional): Level of theory for conformer searches. + conformer_opt_level (str, dict, Level, optional): Level of theory for conformer searches, interchangable with `conformer_level`. + conformer_sp_level (str, dict, Level, optional): Level of theory for conformer sp jobs after opt. opt_level (str, dict, Level, optional): Level of theory for geometry optimization. freq_level (str, dict, Level, optional): Level of theory for frequency calculations. sp_level (str, dict, Level, optional): Level of theory for single point calculations. @@ -167,7 +169,8 @@ class ARC(object): reactions (list): A list of :ref:`ARCReaction ` objects. level_of_theory (str): A shortcut representing either sp//geometry levels or a composite method. composite_method (Level): Composite method. - conformer_level (Level): Level of theory for conformer searches. + conformer_opt_level (Level): Level of theory for conformer searches. + conformer_sp_level (Level): Level of theory for conformer sp jobs after opt. opt_level (Level): Level of theory for geometry optimization. freq_level (Level): Level of theory for frequency calculations. sp_level (Level): Level of theory for single point calculations. @@ -245,6 +248,8 @@ def __init__(self, compute_thermo: bool = True, compute_transport: bool = False, conformer_level: Optional[Union[str, dict, Level]] = None, + conformer_opt_level: Optional[Union[str, dict, Level]] = None, + conformer_sp_level: Optional[Union[str, dict, Level]] = None, dont_gen_confs: List[str] = None, e_confs: float = 5.0, ess_settings: Dict[str, Union[str, List[str]]] = None, @@ -346,7 +351,8 @@ def __init__(self, # attributes related to level of theory specifications self.level_of_theory = level_of_theory self.composite_method = composite_method or None - self.conformer_level = conformer_level or None + self.conformer_opt_level = conformer_level or conformer_opt_level or None + self.conformer_sp_level = conformer_sp_level or None self.opt_level = opt_level or None self.freq_level = freq_level or None self.sp_level = sp_level or None @@ -489,8 +495,10 @@ def as_dict(self) -> dict: restart_dict['compute_thermo'] = self.compute_thermo if self.compute_transport: restart_dict['compute_transport'] = self.compute_transport - if self.conformer_level is not None and str(self.conformer_level).split()[0] != default_levels_of_theory['conformer']: - restart_dict['conformer_level'] = self.conformer_level.as_dict() + if self.conformer_opt_level is not None: + restart_dict['conformer_opt_level'] = self.conformer_opt_level.as_dict() + if self.conformer_sp_level is not None: + restart_dict['conformer_sp_level'] = self.conformer_sp_level.as_dict() if self.dont_gen_confs: restart_dict['dont_gen_confs'] = self.dont_gen_confs if self.ts_adapters is not None: @@ -608,7 +616,8 @@ def execute(self) -> dict: species_list=self.species, rxn_list=self.reactions, composite_method=self.composite_method, - conformer_level=self.conformer_level, + conformer_opt_level=self.conformer_opt_level, + conformer_sp_level=self.conformer_sp_level, opt_level=self.opt_level, freq_level=self.freq_level, sp_level=self.sp_level, @@ -689,7 +698,8 @@ def save_project_info_file(self): txt = '' txt += f'ARC v{self.__version__}\n' txt += f'ARC project {self.project}\n\nLevels of theory used:\n\n' - txt += f'Conformers: {self.conformer_level}\n' + txt += f'Conformer optimization: {self.conformer_opt_level}\n' + txt += f'Conformer single point: {self.conformer_sp_level}\n' txt += f'TS guesses: {self.ts_guess_level}\n' if self.composite_method is not None: txt += f'Composite method: {self.composite_method} {fine_txt}\n' @@ -975,13 +985,17 @@ def set_levels_of_theory(self): logger.info('\n\nUsing the following levels of theory:\n') - if self.conformer_level is None: - self.conformer_level = default_levels_of_theory['conformer'] + if self.conformer_opt_level is None: + self.conformer_opt_level = default_levels_of_theory['conformer'] default_flag = ' (default)' else: default_flag = '' - self.conformer_level = Level(repr=self.conformer_level) - logger.info(f'Conformers:{default_flag} {self.conformer_level}') + self.conformer_opt_level = Level(repr=self.conformer_opt_level) + logger.info(f'Conformers opt:{default_flag} {self.conformer_opt_level}') + + if self.conformer_sp_level is not None: + self.conformer_sp_level = Level(repr=self.conformer_sp_level) + logger.info(f'Conformers sp: {self.conformer_sp_level}') if self.reactions or any([spc.is_ts for spc in self.species]): if not self.ts_guess_level: diff --git a/arc/main_test.py b/arc/main_test.py index c40adaa3a1..a3e7c4a0e9 100644 --- a/arc/main_test.py +++ b/arc/main_test.py @@ -43,11 +43,12 @@ def setUpClass(cls): """ cls.maxDiff = None cls.servers = servers.keys() - cls.job_types1 = {'conformers': True, + cls.job_types1 = {'conf_opt': True, 'opt': True, 'fine_grid': False, 'freq': True, 'sp': True, + 'conf_sp': False, 'rotors': False, 'orbitals': False, 'lennard_jones': False, @@ -84,7 +85,7 @@ def test_as_dict(self): 'method': 'ccsd(t)-f12', 'method_type': 'wavefunction', 'software': 'molpro'}, - 'conformer_level': {'basis': 'def2svp', + 'conformer_opt_level': {'basis': 'def2svp', 'compatible_ess': ['gaussian', 'terachem'], 'method': 'wb97xd', 'method_type': 'dft', @@ -114,7 +115,8 @@ def test_as_dict(self): 'software': 'gaussian'}, 'job_memory': 14, 'job_types': {'bde': True, - 'conformers': True, + 'conf_opt': True, + 'conf_sp': False, 'fine': False, 'freq': True, 'irc': True, @@ -152,7 +154,7 @@ def test_as_dict(self): def test_from_dict(self): """Test the from_dict() method of ARC""" restart_dict = {'composite_method': '', - 'conformer_level': 'b97-d3/6-311+g(d,p)', + 'conformer_opt_level': 'b97-d3/6-311+g(d,p)', 'freq_level': 'wb97x-d3/6-311+g(d,p)', 'freq_scale_factor': 0.96, 'opt_level': 'wb97x-d3/6-311+g(d,p)', @@ -204,7 +206,7 @@ def test_from_dict_specific_job(self): 'project_directory': os.path.join(ARC_PATH, 'Projects', 'unit_test_specific_job'), } arc1 = ARC(**restart_dict) - job_type_expected = {'conformers': False, 'opt': True, 'freq': True, 'sp': True, 'rotors': False, + job_type_expected = {'conf_opt': False, 'conf_sp': False, 'opt': True, 'freq': True, 'sp': True, 'rotors': False, 'orbitals': False, 'bde': True, 'onedmin': False, 'fine': True, 'irc': False} self.assertEqual(arc1.job_types, job_type_expected) diff --git a/arc/plotter.py b/arc/plotter.py index 55044f911c..316f4d3058 100644 --- a/arc/plotter.py +++ b/arc/plotter.py @@ -901,6 +901,7 @@ def save_conformers_file(project_directory: str, im_freqs: Optional[List[List[float]]] = None, log_content: bool = False, before_optimization: bool = True, + sp_flag = False, ): """ Save the conformers before or after optimization. @@ -920,6 +921,7 @@ def save_conformers_file(project_directory: str, im_freqs (list, optional): Entries lists of imaginary frequencies. log_content (bool): Whether to log the content of the conformers file. ``True`` to log, default is ``False``. before_optimization (bool): Whether the conformers are before DFT optimization. ``True`` for before, default is ``True``. + sp_flag (bool): Whether the conformers are single point calculations. ``True`` for single point, default is ``False``. """ spc_dir = 'rxns' if is_ts else 'Species' geo_dir = os.path.join(project_directory, 'output', spc_dir, label, 'geometry', 'conformers') @@ -936,7 +938,10 @@ def save_conformers_file(project_directory: str, content += f'Conformers for {label}, computed using a force field:\n\n' else: level_of_theory = level_of_theory.simple() if isinstance(level_of_theory, Level) else level_of_theory - content += f'Conformers for {label}, optimized at the {level_of_theory} level:\n\n' + if not sp_flag: + content += f'Conformers for {label}, optimized at the {level_of_theory} level:\n\n' + else: + content += f'Conformers for {label}, single point calculation at the {level_of_theory} level:\n\n' for i, xyz in enumerate(xyzs): content += f'conformer {i}:\n' if xyz is not None: diff --git a/arc/reaction_test.py b/arc/reaction_test.py index 51b87cb4a3..5c3b85e7fc 100644 --- a/arc/reaction_test.py +++ b/arc/reaction_test.py @@ -1928,7 +1928,7 @@ def test_load_ts_xyz_user_guess_from_files(self): Scheduler(project=arc_object.project, species_list=arc_object.species, rxn_list=arc_object.reactions, - conformer_level=arc_object.conformer_level, + conformer_opt_level=arc_object.conformer_opt_level, opt_level=arc_object.opt_level, freq_level=arc_object.freq_level, sp_level=arc_object.sp_level, @@ -1950,7 +1950,7 @@ def test_load_ts_xyz_user_guess_from_files(self): Scheduler(project=arc_object.project, species_list=arc_object.species, rxn_list=arc_object.reactions, - conformer_level=arc_object.conformer_level, + conformer_opt_level=arc_object.conformer_opt_level, opt_level=arc_object.opt_level, freq_level=arc_object.freq_level, sp_level=arc_object.sp_level, diff --git a/arc/scheduler.py b/arc/scheduler.py index dbaef49b1c..b6c7618daf 100644 --- a/arc/scheduler.py +++ b/arc/scheduler.py @@ -79,7 +79,9 @@ class Scheduler(object): Dictionary structures:: - job_dict = {label_1: {'conformers': {0: Job1, + job_dict = {label_1: {'conf_opt': {0: Job1, + 1: Job2, ...}, + 'conf_sp': {0: Job1, 1: Job2, ...}, 'tsg': {0: Job1, 1: Job2, ...}, # TS guesses @@ -130,7 +132,8 @@ class Scheduler(object): rxn_list (list): Contains input :ref:`ARCReaction ` objects. project_directory (str): Folder path for the project: the input file path or ARC/Projects/project-name. composite_method (str, optional): A composite method to use. - conformer_level (Union[str, dict], optional): The level of theory to use for conformer comparisons. + conformer_opt_level (Union[str, dict], optional): The level of theory to use for conformer comparisons. + conformer_sp_level (Union[str, dict], optional): The level of theory to use for conformer sp jobs. opt_level (Union[str, dict], optional): The level of theory to use for geometry optimizations. freq_level (Union[str, dict], optional): The level of theory to use for frequency calculations. sp_level (Union[str, dict], optional): The level of theory to use for single point energy calculations. @@ -202,7 +205,8 @@ class Scheduler(object): bath_gas (str): A bath gas. Currently used in OneDMin to calc L-J parameters. Allowed values are He, Ne, Ar, Kr, H2, N2, O2. composite_method (str): A composite method to use. - conformer_level (dict): The level of theory to use for conformer comparisons. + conformer_opt_level (dict): The level of theory to use for conformer comparisons. + conformer_sp_level (dict): The level of theory to use for conformer sp jobs. opt_level (dict): The level of theory to use for geometry optimizations. freq_level (dict): The level of theory to use for frequency calculations. sp_level (dict): The level of theory to use for single point energy calculations. @@ -230,7 +234,8 @@ def __init__(self, species_list: list, project_directory: str, composite_method: Optional[Level] = None, - conformer_level: Optional[Level] = None, + conformer_opt_level: Optional[Level] = None, + conformer_sp_level: Optional[Level] = None, opt_level: Optional[Level] = None, freq_level: Optional[Level] = None, sp_level: Optional[Level] = None, @@ -310,7 +315,8 @@ def __init__(self, self.report_time = time.time() # init time for reporting status every 1 hr self.servers = list() self.composite_method = composite_method - self.conformer_level = conformer_level + self.conformer_opt_level = conformer_opt_level + self.conformer_sp_level = conformer_sp_level self.ts_guess_level = ts_guess_level self.opt_level = opt_level self.freq_level = freq_level @@ -424,8 +430,8 @@ def __init__(self, if not self.job_types['opt'] and species.final_xyz is not None: # opt wasn't asked for, and it's not needed, declare it as converged self.output[species.label]['job_types']['opt'] = True - if not self.job_types['conformers'] and len(species.conformers) == 1: - # conformers weren't asked for, assign initial_xyz + if not self.job_types['conf_opt'] and len(species.conformers) == 1: + # conformers opt weren't asked for, assign initial_xyz species.initial_xyz = species.conformers[0] if species.label not in self.running_jobs: self.running_jobs[species.label if not species.multi_species else species.multi_species] = list() @@ -443,7 +449,7 @@ def __init__(self, self.run_sp_job(label=species.label) if self.job_types['onedmin']: self.run_onedmin_job(species.label) - elif species.get_xyz(generate=False) and not self.job_types['conformers'] and not self.job_types['opt'] \ + elif species.get_xyz(generate=False) and not self.job_types['conf_opt'] and not self.job_types['opt'] \ and species.irc_label is None: if self.job_types['freq']: self.run_freq_job(species.label) @@ -543,20 +549,25 @@ def schedule_jobs(self): continue job_list = self.running_jobs[label] for job_name in job_list: - if 'conformer' in job_name: + if 'conf' in job_name: i = get_i_from_job_name(job_name) - job = self.job_dict[label]['conformers'][i] + job = self.job_dict[label]['conf_opt'][i] if 'conf_opt' in job_name \ + else self.job_dict[label]['conf_sp'][i] if not (job.job_id in self.server_job_ids and job.job_id not in self.completed_incore_jobs): # this is a completed conformer job successful_server_termination = self.end_job(job=job, label=label, job_name=job_name) if successful_server_termination: troubleshooting_conformer = self.parse_conformer(job=job, label=label, i=i) + if 'conf_opt' in job_name and self.job_types['conf_sp'] and not troubleshooting_conformer: + self.run_sp_job(label=label, + level=self.conformer_sp_level, + conformer=i) if troubleshooting_conformer: break # Just terminated a conformer job. # Are there additional conformer jobs currently running for this species? for spec_jobs in job_list: - if 'conformer' in spec_jobs and spec_jobs != job_name: + if ('conf_opt' in spec_jobs or 'conf_sp' in spec_jobs) and spec_jobs != job_name: break else: # All conformer jobs terminated. @@ -565,7 +576,7 @@ def schedule_jobs(self): if self.species_dict[label].is_ts: self.determine_most_likely_ts_conformer(label) else: - self.determine_most_stable_conformer(label) # also checks isomorphism + self.determine_most_stable_conformer(label, sp_flag=True if self.job_types['conf_sp'] else False) # also checks isomorphism if self.species_dict[label].initial_xyz is not None: # if initial_xyz is None, then we're probably troubleshooting conformers, don't opt if not self.composite_method: @@ -593,7 +604,7 @@ def schedule_jobs(self): self.run_conformer_jobs(labels=[label]) self.timer = False break - elif 'opt' in job_name: + elif 'opt' in job_name and 'conf_opt' not in job_name: # val is 'opt1', 'opt2', etc., or 'optfreq1', optfreq2', etc. job = self.job_dict[label]['opt'][job_name] if not (job.job_id in self.server_job_ids and job.job_id not in self.completed_incore_jobs): @@ -625,7 +636,7 @@ def schedule_jobs(self): self.check_freq_job(label=label, job=job) self.timer = False break - elif 'sp' in job_name: + elif 'sp' in job_name and 'conf_sp' not in job_name: job = self.job_dict[label]['sp'][job_name] if not (job.job_id in self.server_job_ids and job.job_id not in self.completed_incore_jobs): successful_server_termination = self.end_job(job=job, label=label, job_name=job_name) @@ -879,10 +890,12 @@ def run_job(self, elif conformer is not None: # Running a conformer DFT job. Append differently to job_dict. self.running_jobs[label] = list() if label not in self.running_jobs else self.running_jobs[label] - self.running_jobs[label].append(f'conformer{conformer}') # mark as a running job - if 'conformers' not in self.job_dict[label]: - self.job_dict[label]['conformers'] = dict() - self.job_dict[label]['conformers'][conformer] = job # save job object + self.running_jobs[label].append(f'{job_type}_{conformer}') # mark as a running job + if 'conf_opt' not in self.job_dict[label]: + self.job_dict[label]['conf_opt'] = dict() + if 'conf_sp' not in self.job_dict[label] and job_type == 'conf_sp': + self.job_dict[label]['conf_sp'] = dict() + self.job_dict[label][job_type][conformer] = job # save job object elif tsg is not None: # Running a TS guess job. Append differently to job_dict. self.running_jobs[label] = list() if label not in self.running_jobs else self.running_jobs[label] @@ -1084,10 +1097,10 @@ def run_conformer_jobs(self, labels: Optional[List[str]] = None): and all([e is None for e in self.species_dict[label].conformer_energies]) \ and self.species_dict[label].number_of_atoms > 1 and not self.output[label]['paths']['geo'] \ and self.species_dict[label].yml_path is None and not self.output[label]['convergence'] \ - and (self.job_types['conformers'] and label not in self.dont_gen_confs + and (self.job_types['conf_opt'] and label not in self.dont_gen_confs or self.species_dict[label].get_xyz(generate=False) is None): # This is not a TS, opt (/composite) did not converge nor is it running, and conformer energies were - # not set. Also, either 'conformers' are set to True in job_types (and it's not in dont_gen_confs), + # not set. Also, either 'conf_opt' are set to True in job_types (and it's not in dont_gen_confs), # or they are set to False (or it's in dont_gen_confs), but the species has no 3D information. # Generate conformers. if not log_info_printed: @@ -1149,12 +1162,12 @@ def run_ts_conformer_jobs(self, label: str): ) successful_tsgs = [tsg for tsg in self.species_dict[label].ts_guesses if tsg.success] if len(successful_tsgs) > 1: - self.job_dict[label]['conformers'] = dict() + self.job_dict[label]['conf_opt'] = dict() for i, tsg in enumerate(successful_tsgs): self.run_job(label=label, xyz=tsg.initial_xyz, level_of_theory=self.ts_guess_level, - job_type='conformers', + job_type='conf_opt', conformer=i, ) tsg.conformer_index = i # Store the conformer index in the TSGuess object to match them later. @@ -1254,6 +1267,7 @@ def run_freq_job(self, label): def run_sp_job(self, label: str, level: Optional[Level] = None, + conformer: Optional[int] = None, ): """ Spawn a single point job using 'final_xyz' for species ot TS 'label'. @@ -1262,9 +1276,17 @@ def run_sp_job(self, Args: label (str): The species label. level (Level): An alternative level of theory to run at. If ``None``, self.sp_level will be used. + conformer (int): The conformer number. """ level = level or self.sp_level + if self.job_types['conf_sp'] and conformer is not None and self.conformer_sp_level != self.conformer_opt_level: + self.run_job(label=label, + xyz=self.species_dict[label].conformers[conformer], + level_of_theory=self.conformer_sp_level, + job_type='conf_sp', + conformer=conformer) + return # determine_occ(xyz=self.xyz, charge=self.charge) if level == self.opt_level and not self.composite_method \ and not (level.software == 'xtb' and self.species_dict[label].is_ts) \ @@ -1840,8 +1862,8 @@ def process_directed_scans(self, label: str, pivots: Union[List[int], List[List[ def process_conformers(self, label): """ - Process the generated conformers and spawn DFT jobs at the conformer_level. - If more than one conformer is available, they will be optimized at the DFT conformer_level. + Process the generated conformers and spawn DFT jobs at the conformer_opt_level. + If more than one conformer is available, they will be optimized at the DFT conformer_opt_level. Args: label (str): The species label. @@ -1849,7 +1871,7 @@ def process_conformers(self, label): plotter.save_conformers_file(project_directory=self.project_directory, label=label, xyzs=self.species_dict[label].conformers, - level_of_theory=self.conformer_level, + level_of_theory=self.conformer_opt_level, multiplicity=self.species_dict[label].multiplicity, charge=self.species_dict[label].charge, is_ts=False, @@ -1860,12 +1882,12 @@ def process_conformers(self, label): if self.species_dict[label].initial_xyz is None and self.species_dict[label].final_xyz is None \ and not self.testing: if len(self.species_dict[label].conformers) > 1: - self.job_dict[label]['conformers'] = dict() + self.job_dict[label]['conf_opt'] = dict() for i, xyz in enumerate(self.species_dict[label].conformers): self.run_job(label=label, xyz=xyz, - level_of_theory=self.conformer_level, - job_type='conformers', + job_type='conf_opt', + level_of_theory=self.conformer_opt_level, conformer=i, ) elif len(self.species_dict[label].conformers) == 1: @@ -1888,7 +1910,7 @@ def process_conformers(self, label): if self.species_dict[label].charge: logger.warning(f'Isomorphism check cannot be done for charged species {label}') self.output[label]['conformers'] += 'Single conformer could not be checked for isomorphism; ' - self.output[label]['job_types']['conformers'] = True + self.output[label]['job_types']['conf_opt'] = True self.species_dict[label].conf_is_isomorphic, spawn_jobs = True, True else: logger.error(f'The only conformer for species {label} could not be checked for isomorphism ' @@ -1897,7 +1919,7 @@ def process_conformers(self, label): f'this species. To change this behaviour, pass `allow_nonisomorphic_2d = True`.') self.species_dict[label].conf_is_isomorphic, spawn_jobs = False, False if b_mol is None and (self.allow_nonisomorphic_2d or self.species_dict[label].charge): - self.output[label]['job_types']['conformers'] = True + self.output[label]['job_types']['conf_opt'] = True if b_mol is not None: try: is_isomorphic = check_isomorphism(self.species_dict[label].mol, b_mol) @@ -1913,7 +1935,7 @@ def process_conformers(self, label): logger.info(f'The only conformer for species {label} was found to be isomorphic ' f'with the 2D graph representation {b_mol.copy(deep=True).to_smiles()}\n') self.output[label]['conformers'] += 'single conformer passed isomorphism check; ' - self.output[label]['job_types']['conformers'] = True + self.output[label]['job_types']['conf_opt'] = True self.species_dict[label].conf_is_isomorphic = True else: logger.error(f'The only conformer for species {label} is not isomorphic ' @@ -1998,7 +2020,7 @@ def parse_conformer(self, return True return False - def determine_most_stable_conformer(self, label): + def determine_most_stable_conformer(self, label, sp_flag=False): """ Determine the most stable conformer for a species (which is not a TS). Also run an isomorphism check. @@ -2006,13 +2028,14 @@ def determine_most_stable_conformer(self, label): Args: label (str): The species label. + sp_flag (bool): Whether this is a single point calculation job. """ if self.species_dict[label].is_ts: raise SchedulerError('The determine_most_stable_conformer() method does not deal with transition ' 'state guesses.') - if 'conformers' in self.job_dict[label].keys() and all(e is None for e in self.species_dict[label].conformer_energies): + if 'conf_opt' in self.job_dict[label].keys() and all(e is None for e in self.species_dict[label].conformer_energies): logger.error(f'No conformer converged for species {label}! Trying to troubleshoot conformer jobs...') - for i, job in self.job_dict[label]['conformers'].items(): + for i, job in self.job_dict[label]['conf_opt'].items(): self.troubleshoot_ess(label, job, level_of_theory=job.level, conformer=job.conformer) else: conformer_xyz = None @@ -2020,19 +2043,20 @@ def determine_most_stable_conformer(self, label): if self.species_dict[label].conformer_energies: xyzs = self.species_dict[label].conformers else: - for job in self.job_dict[label]['conformers'].values(): + for job in self.job_dict[label]['conf_opt'].values(): xyzs.append(parser.parse_xyz_from_file(path=job.local_path_to_output_file)) xyzs_in_original_order = xyzs energies, xyzs = sort_two_lists_by_the_first(self.species_dict[label].conformer_energies, xyzs) plotter.save_conformers_file(project_directory=self.project_directory, label=label, xyzs=self.species_dict[label].conformers, - level_of_theory=self.conformer_level, + level_of_theory=self.conformer_opt_level if not sp_flag else self.conformer_sp_level, multiplicity=self.species_dict[label].multiplicity, charge=self.species_dict[label].charge, is_ts=False, energies=self.species_dict[label].conformer_energies, before_optimization=False, + sp_flag=sp_flag, ) # after optimization # Run isomorphism checks if a 2D representation is available if self.species_dict[label].mol is not None: @@ -2088,7 +2112,7 @@ def determine_most_stable_conformer(self, label): conformer_xyz = xyz if 'Conformers optimized and compared' not in self.output[label]['conformers']: self.output[label]['conformers'] += \ - f'Conformers optimized and compared at {self.conformer_level.simple()}; ' + f'Conformers optimized and compared at {self.conformer_opt_level.simple()}; ' break else: if i == 0: @@ -2127,11 +2151,11 @@ def determine_most_stable_conformer(self, label): else: # troubleshoot when all conformers of a species failed isomorphic test logger.warning(f'Isomorphism check for all conformers of species {label} failed at ' - f'{self.conformer_level.simple()}. ' + f'{self.conformer_opt_level.simple()}. ' f'Attempting to troubleshoot using a different level.') self.output[label]['conformers'] += \ f'Error: No conformer was found to be isomorphic with the 2D graph representation at ' \ - f'{self.conformer_level.simple()}; ' + f'{self.conformer_opt_level.simple()}; ' self.troubleshoot_conformer_isomorphism(label=label) else: logger.warning(f'Could not run isomorphism check for species {label} due to missing 2D graph ' @@ -2142,7 +2166,9 @@ def determine_most_stable_conformer(self, label): self.species_dict[label].most_stable_conformer = xyzs_in_original_order.index(conformer_xyz) logger.info(f'Conformer number {xyzs_in_original_order.index(conformer_xyz)} for species {label} is ' f'used for geometry optimization.') - self.output[label]['job_types']['conformers'] = True + self.output[label]['job_types']['conf_opt'] = True + if sp_flag: + self.output[label]['job_types']['conf_sp'] = True def determine_most_likely_ts_conformer(self, label: str): """ @@ -3046,11 +3072,11 @@ def check_all_done(self, label: str): if label in self.output and not self.output[label]['convergence']: for job_type, spawn_job_type in self.job_types.items(): if spawn_job_type and not self.output[label]['job_types'][job_type] \ - and not ((self.species_dict[label].is_ts and job_type in ['scan', 'conformers']) + and not ((self.species_dict[label].is_ts and job_type in ['scan', 'conf_opt']) or (self.species_dict[label].number_of_atoms == 1 - and job_type in ['conformers', 'opt', 'fine', 'freq', 'rotors', 'bde']) + and job_type in ['conf_opt', 'opt', 'fine', 'freq', 'rotors', 'bde']) or job_type == 'bde' and self.species_dict[label].bdes is None - or job_type == 'conformers' + or job_type == 'conf_opt' or job_type == 'irc' or job_type == 'tsg'): logger.debug(f'Species {label} did not converge.') @@ -3062,9 +3088,12 @@ def check_all_done(self, label: str): self.species_dict[label].make_ts_report() logger.info(self.species_dict[label].ts_report + '\n') zero_delta = datetime.timedelta(0) - conf_time = extremum_list([job.run_time for job in self.job_dict[label]['conformers'].values()], + conf_time = extremum_list([job.run_time for job in self.job_dict[label]['conf_opt'].values()], + return_min=False) \ + if 'conf_opt' in self.job_dict[label].keys() else zero_delta + conf_time = conf_time + extremum_list([job.run_time for job in self.job_dict[label]['conf_sp'].values()], return_min=False) \ - if 'conformers' in self.job_dict[label].keys() else zero_delta + if 'conf_sp' in self.job_dict[label].keys() else zero_delta tsg_time = extremum_list([job.run_time for job in self.job_dict[label]['tsg'].values()], return_min=False) \ if 'tsg' in self.job_dict[label].keys() else zero_delta opt_time = sum_time_delta([job.run_time for job in self.job_dict[label]['opt'].values()]) \ @@ -3073,8 +3102,8 @@ def check_all_done(self, label: str): if 'composite' in self.job_dict[label].keys() else zero_delta other_time = extremum_list([sum_time_delta([job.run_time for job in job_dictionary.values()]) for job_type, job_dictionary in self.job_dict[label].items() - if job_type not in ['conformers', 'opt', 'composite']], return_min=False) \ - if any([job_type not in ['conformers', 'opt', 'composite'] + if job_type not in ['conf_opt', 'conf_sp', 'opt', 'composite']], return_min=False) \ + if any([job_type not in ['conf_opt', 'conf_sp', 'opt', 'composite'] for job_type in self.job_dict[label].keys()]) else zero_delta self.species_dict[label].run_time = self.species_dict[label].run_time \ or (conf_time or zero_delta) + \ @@ -3122,8 +3151,10 @@ def get_completed_incore_jobs(self): if i is None: job_type = '_'.join(job_name.split('_')[:-1]) # Consider job types such as 'directed_scan'. job = self.job_dict[label][job_type][job_name] - elif 'conformer' in job_name: - job = self.job_dict[label]['conformers'][i] + elif 'conf_opt' in job_name: + job = self.job_dict[label]['conf_opt'][i] + elif 'conf_sp' in job_name: + job = self.job_dict[label]['conf_sp'][i] elif 'tsg' in job_name: job = self.job_dict[label]['tsg'][i] else: @@ -3167,12 +3198,12 @@ def troubleshoot_negative_freq(self, self.delete_all_species_jobs(label) self.species_dict[label].conformers = confs self.species_dict[label].conformer_energies = [None] * len(confs) - self.job_dict[label]['conformers'] = dict() # initialize the conformer job dictionary + self.job_dict[label]['conf_opt'] = dict() # initialize the conformer job dictionary for i, xyz in enumerate(self.species_dict[label].conformers): self.run_job(label=label, xyz=xyz, - level_of_theory=self.conformer_level, - job_type='conformers', + level_of_theory=self.conformer_opt_level, + job_type='conf_opt', conformer=i, ) @@ -3499,7 +3530,7 @@ def troubleshoot_conformer_isomorphism(self, label: str): raise SchedulerError('The troubleshoot_conformer_isomorphism() method got zero conformers.') # use the first conformer of a species to determine applicable troubleshooting method - job = self.job_dict[label]['conformers'][0] + job = self.job_dict[label]['conf_opt'][0] level_of_theory = trsh_conformer_isomorphism(software=job.job_adapter, ess_trsh_methods=job.ess_trsh_methods) @@ -3520,15 +3551,15 @@ def troubleshoot_conformer_isomorphism(self, label: str): # initial xyz before troubleshooting xyz = self.species_dict[label].conformers_before_opt[conformer] - job = self.job_dict[label]['conformers'][conformer] - if 'Conformers: ' + level_of_theory not in job.ess_trsh_methods: - job.ess_trsh_methods.append('Conformers: ' + level_of_theory) + job = self.job_dict[label]['conf_opt'][conformer] + if 'conf_opt: ' + level_of_theory not in job.ess_trsh_methods: + job.ess_trsh_methods.append('conf_opt: ' + level_of_theory) self.run_job(label=label, xyz=xyz, level_of_theory=level_of_theory, job_adapter=job.job_adapter, - job_type='conformers', + job_type='conf_opt', ess_trsh_methods=job.ess_trsh_methods, conformer=conformer, ) @@ -3542,7 +3573,7 @@ def delete_all_species_jobs(self, label: str): """ logger.debug(f'Deleting all jobs for species {label}') for value in self.job_dict[label].values(): - if value in ['conformers', 'tsg']: + if value in ['conf_opt', 'tsg']: for job_name, job in self.job_dict[label][value].items(): if label in self.running_jobs.keys() and job_name in self.running_jobs[label] \ and job.execution_type != 'incore': @@ -3599,17 +3630,17 @@ def restore_running_jobs(self): if ('conformer' not in job_description or job_description['conformer'] is None) \ and ('tsg' not in job_description or job_description['tsg'] is None): self.job_dict[spc_label][job_description['job_type']] = dict() - elif 'conformers' not in self.job_dict[spc_label].keys(): - self.job_dict[spc_label]['conformers'] = dict() + elif 'conf_opt' not in self.job_dict[spc_label].keys(): + self.job_dict[spc_label]['conf_opt'] = dict() elif 'tsg' not in self.job_dict[spc_label].keys(): self.job_dict[spc_label]['tsg'] = dict() if ('conformer' not in job_description or job_description['conformer'] is None) \ and ('tsg' not in job_description or job_description['tsg'] is None): self.job_dict[spc_label][job_description['job_type']][job_description['job_name']] = job elif 'conformer' in job_description and job_description['conformer'] is not None: - if 'conformers' not in self.job_dict[spc_label].keys(): - self.job_dict[spc_label]['conformers'] = dict() - self.job_dict[spc_label]['conformers'][int(job_description['conformer'])] = job + if 'conf_opt' not in self.job_dict[spc_label].keys(): + self.job_dict[spc_label]['conf_opt'] = dict() + self.job_dict[spc_label]['conf_opt'][int(job_description['conformer'])] = job # don't generate additional conformers for this species self.dont_gen_confs.append(spc_label) elif 'tsg' in job_description and job_description['tsg'] is not None: @@ -3623,9 +3654,9 @@ def restore_running_jobs(self): content += f'\n{spc_label}: ' for job_type in self.job_dict[spc_label].keys(): for job_name in self.job_dict[spc_label][job_type].keys(): - if job_type not in ['conformers', 'tsg']: + if job_type not in ['conf_opt', 'conf_sp', 'tsg']: content += job_name + ', ' - elif job_type == 'conformers': + elif 'conf_' in job_type: content += self.job_dict[spc_label][job_type][job_name].job_name \ + f' (conformer{job_name}), ' elif job_type == 'tsg': @@ -3649,9 +3680,11 @@ def save_restart_dict(self): self.restart_dict['running_jobs'][spc.label] = \ [self.job_dict[spc.label][job_name.rsplit('_', 1)[0]][job_name].as_dict() for job_name in self.running_jobs[spc.label] - if 'conformer' not in job_name and 'tsg' not in job_name] \ - + [self.job_dict[spc.label]['conformers'][get_i_from_job_name(job_name)].as_dict() - for job_name in self.running_jobs[spc.label] if 'conformer' in job_name] \ + if all(x not in job_name for x in ['conf_opt', 'conf_sp', 'tsg'])] \ + + [self.job_dict[spc.label]['conf_opt'][get_i_from_job_name(job_name)].as_dict() + for job_name in self.running_jobs[spc.label] if 'conf_opt' in job_name] \ + + [self.job_dict[spc.label]['conf_sp'][get_i_from_job_name(job_name)].as_dict() + for job_name in self.running_jobs[spc.label] if 'conf_sp' in job_name] \ + [self.job_dict[spc.label]['tsg'][get_i_from_job_name(job_name)].as_dict() for job_name in self.running_jobs[spc.label] if 'tsg' in job_name] logger.debug(f'Dumping restart dictionary:\n{self.restart_dict}') diff --git a/arc/scheduler_test.py b/arc/scheduler_test.py index 7e08bba031..10d5b1571e 100644 --- a/arc/scheduler_test.py +++ b/arc/scheduler_test.py @@ -54,11 +54,11 @@ def setUpClass(cls): H -1.82570782 0.42754384 -0.56130718""" cls.spc3 = ARCSpecies(label='CtripCO', smiles='C#CO', xyz=xyz3) cls.job1 = job_factory(job_adapter='gaussian', project='project_test', ess_settings=cls.ess_settings, - species=[cls.spc1], xyz=xyz1, job_type='conformers', + species=[cls.spc1], xyz=xyz1, job_type='conf_opt', conformer=0, level=Level(repr={'method': 'b97-d3', 'basis': '6-311+g(d,p)'}), project_directory=cls.project_directory, job_num=101) cls.job2 = job_factory(job_adapter='gaussian', project='project_test', ess_settings=cls.ess_settings, - species=[cls.spc1], xyz=xyz1, job_type='conformers', + species=[cls.spc1], xyz=xyz1, job_type='conf_opt', conformer=1, level=Level(repr={'method': 'b97-d3', 'basis': '6-311+g(d,p)'}), project_directory=cls.project_directory, job_num=102) cls.job3 = job_factory(job_adapter='qchem', project='project_test', ess_settings=cls.ess_settings, @@ -70,7 +70,8 @@ def setUpClass(cls): level=Level(repr={'method': 'b3lyp', 'basis': 'cbsb7'}), project_directory=cls.project_directory, job_num=104) cls.rmg_database = rmgdb.make_rmg_database_object() - cls.job_types1 = {'conformers': True, + cls.job_types1 = {'conf_opt': True, + 'conf_sp': False, 'opt': True, 'fine': False, 'freq': True, @@ -79,7 +80,8 @@ def setUpClass(cls): 'orbitals': False, 'lennard_jones': False, } - cls.job_types2 = {'conformers': True, + cls.job_types2 = {'conf_opt': True, + 'conf_sp': False, 'opt': True, 'fine': False, 'freq': True, @@ -89,7 +91,7 @@ def setUpClass(cls): cls.sched1 = Scheduler(project='project_test_1', ess_settings=cls.ess_settings, species_list=[cls.spc1, cls.spc2, cls.spc3], composite_method=None, - conformer_level=Level(repr=default_levels_of_theory['conformer']), + conformer_opt_level=Level(repr=default_levels_of_theory['conformer']), opt_level=Level(repr=default_levels_of_theory['opt']), freq_level=Level(repr=default_levels_of_theory['freq']), sp_level=Level(repr=default_levels_of_theory['sp']), @@ -105,7 +107,7 @@ def setUpClass(cls): cls.sched2 = Scheduler(project='project_test_2', ess_settings=cls.ess_settings, species_list=[cls.spc1, cls.spc2, cls.spc3], composite_method=None, - conformer_level=Level(repr=default_levels_of_theory['conformer']), + conformer_opt_level=Level(repr=default_levels_of_theory['conformer']), opt_level=Level(repr=default_levels_of_theory['opt']), freq_level=Level(repr=default_levels_of_theory['freq']), sp_level=Level(repr=default_levels_of_theory['sp']), @@ -121,7 +123,7 @@ def setUpClass(cls): cls.sched3 = Scheduler(project='project_test_4', ess_settings=cls.ess_settings, species_list=[cls.spc1], composite_method=Level(repr='CBS-QB3'), - conformer_level=Level(repr=default_levels_of_theory['conformer']), + conformer_opt_level=Level(repr=default_levels_of_theory['conformer']), opt_level=Level(repr=default_levels_of_theory['freq_for_composite']), freq_level=Level(repr=default_levels_of_theory['freq_for_composite']), scan_level=Level(repr=default_levels_of_theory['scan_for_composite']), @@ -140,9 +142,9 @@ def test_conformers(self): self.job2.local_path_to_output_file = os.path.join(ARC_PATH, 'arc', 'testing', 'methylamine_conformer_1.out') self.job2.job_status = ['done', {'status': 'done', 'keywords': list(), 'error': '', 'line': ''}] self.sched1.job_dict[label] = dict() - self.sched1.job_dict[label]['conformers'] = dict() - self.sched1.job_dict[label]['conformers'][0] = self.job1 - self.sched1.job_dict[label]['conformers'][1] = self.job2 + self.sched1.job_dict[label]['conf_opt'] = dict() + self.sched1.job_dict[label]['conf_opt'][0] = self.job1 + self.sched1.job_dict[label]['conf_opt'][1] = self.job2 self.sched1.species_dict[label].conformer_energies = [None, None] self.sched1.species_dict[label].conformers = [None, None] self.sched1.parse_conformer(job=self.job1, label=label, i=0) @@ -176,7 +178,7 @@ def test_conformers(self): 'warnings': '', 'errors': '', 'job_types': {'opt': False, 'composite': False, 'sp': False, 'fine': False, - 'freq': False, 'conformers': False}, + 'freq': False, 'conf_opt': False, 'conf_sp': False}, 'convergence': False, 'conformers': '', 'restart': ''} self.sched1.run_conformer_jobs() save_conformers_file(project_directory=self.sched1.project_directory, @@ -221,7 +223,7 @@ def test_determine_adaptive_level(self): ess_settings=self.ess_settings, species_list=[self.spc1, self.spc2], composite_method=None, - conformer_level=default_levels_of_theory['conformer'], + conformer_opt_level=default_levels_of_theory['conformer'], opt_level=default_levels_of_theory['opt'], freq_level=default_levels_of_theory['freq'], sp_level=default_levels_of_theory['sp'], @@ -276,7 +278,8 @@ def test_initialize_output_dict(self): 'isomorphism': '', 'job_types': {'rotors': True, 'composite': False, - 'conformers': False, + 'conf_opt': False, + 'conf_sp': False, 'fine': False, 'freq': False, 'lennard_jones': False, @@ -644,21 +647,21 @@ def test_check_rxn_e0_by_spc(self): 'sp': os.path.join(ARC_PATH, 'arc', 'testing', 'opt', 'nC3H7.out'), 'composite': ''}, 'restart': '', 'convergence': True, - 'job_types': {'conformers': True, 'opt': True, 'freq': True, 'sp': True, 'rotors': True, 'irc': True, 'fine': True}, + 'job_types': {'conf_opt': True, 'conf_sp': False, 'opt': True, 'freq': True, 'sp': True, 'rotors': True, 'irc': True, 'fine': True}, }, 'iC3H7': {'paths': {'geo': os.path.join(ARC_PATH, 'arc', 'testing', 'opt', 'iC3H7.out'), 'freq': os.path.join(ARC_PATH, 'arc', 'testing', 'freq', 'iC3H7.out'), 'sp': os.path.join(ARC_PATH, 'arc', 'testing', 'opt', 'iC3H7.out'), 'composite': ''}, 'restart': '', 'convergence': True, - 'job_types': {'conformers': True, 'opt': True, 'freq': True, 'sp': True, 'rotors': True, 'irc': True, 'fine': True}, + 'job_types': {'conf_opt': True, 'conf_sp': False, 'opt': True, 'freq': True, 'sp': True, 'rotors': True, 'irc': True, 'fine': True}, }, 'TS0': {'paths': {'geo': os.path.join(ARC_PATH, 'arc', 'testing', 'opt', 'TS_nC3H7-iC3H7.out'), 'freq': os.path.join(ARC_PATH, 'arc', 'testing', 'freq', 'TS_nC3H7-iC3H7.out'), 'sp': os.path.join(ARC_PATH, 'arc', 'testing', 'opt', 'TS_nC3H7-iC3H7.out'), 'composite': ''}, 'restart': '', 'convergence': True, - 'job_types': {'conformers': True, 'opt': True, 'freq': True, 'sp': True, 'rotors': True, 'irc': True, 'fine': True}, + 'job_types': {'conf_opt': True, 'conf_sp': False, 'opt': True, 'freq': True, 'sp': True, 'rotors': True, 'irc': True, 'fine': True}, }, } project_directory = os.path.join(ARC_PATH, 'Projects', 'arc_project_for_testing_delete_after_usage6') @@ -717,7 +720,7 @@ def test_save_e_elect(self): opt_level=Level(method='B3LYP', basis='6-31G(d,p)', software='gaussian'), sp_level=Level(method='B3LYP', basis='6-31G(d,p)', software='gaussian'), job_types={'opt': True, 'fine_grid': False, 'freq': False, 'sp': True, 'rotors': False, - 'conformers': False, 'irc': False}, + 'conf_opt': False, 'conf_sp': False, 'irc': False}, report_e_elect=True, testing=True, ) diff --git a/arc/settings/settings.py b/arc/settings/settings.py index de3da3f596..72946e303a 100644 --- a/arc/settings/settings.py +++ b/arc/settings/settings.py @@ -90,7 +90,8 @@ ts_adapters = ['heuristics', 'AutoTST', 'GCN', 'xtb_gsm'] # List here job types to execute by default -default_job_types = {'conformers': True, # defaults to True if not specified +default_job_types = {'conf_opt': True, # defaults to True if not specified + 'conf_sp': False, # defaults to False if not specified 'opt': True, # defaults to True if not specified 'fine_grid': True, # defaults to True if not specified 'freq': True, # defaults to True if not specified diff --git a/arc/testing/restart/1_restart_thermo/restart.yml b/arc/testing/restart/1_restart_thermo/restart.yml index 1cad62fe28..726f7c3ef6 100644 --- a/arc/testing/restart/1_restart_thermo/restart.yml +++ b/arc/testing/restart/1_restart_thermo/restart.yml @@ -9,7 +9,7 @@ arkane_level_of_theory: software: gaussian calc_freq_factor: true compute_transport: false -conformer_level: +conformer_opt_level: basis: 6-31g(d,p) dispersion: empiricaldispersion=gd3bj method: b3lyp @@ -43,7 +43,8 @@ irc_level: job_memory: 14 job_types: bde: false - conformers: true + conf_opt: true + conf_sp: false fine: true freq: true irc: true @@ -74,7 +75,8 @@ output: job_types: bde: true composite: false - conformers: true + conf_opt: true + conf_sp: false fine: true freq: true irc: true @@ -99,7 +101,8 @@ output: job_types: bde: true composite: false - conformers: false + conf_opt: false + conf_sp: false fine: false freq: false irc: true @@ -125,7 +128,8 @@ output: job_types: bde: true composite: false - conformers: true + conf_opt: true + conf_sp: false fine: true freq: true irc: true @@ -151,7 +155,8 @@ output: job_types: bde: true composite: false - conformers: true + conf_opt: true + conf_sp: false fine: true freq: true irc: true diff --git a/arc/testing/restart/2_restart_rate/restart.yml b/arc/testing/restart/2_restart_rate/restart.yml index b80e763cad..2394c2541a 100644 --- a/arc/testing/restart/2_restart_rate/restart.yml +++ b/arc/testing/restart/2_restart_rate/restart.yml @@ -3,7 +3,7 @@ calc_freq_factor: false compare_to_rmg: false composite_method: '' bac_type: null -conformer_level: +conformer_opt_level: auxiliary_basis: '' basis: def2svp dispersion: '' @@ -23,7 +23,8 @@ irc_level: job_memory: 14 job_types: bde: false - conformers: false + conf_opt: false + conf_sp: false fine: false freq: true irc: false @@ -55,7 +56,8 @@ output: job_types: bde: true composite: false - conformers: true + conf_opt: true + conf_sp: false fine: false freq: true irc: true @@ -80,7 +82,8 @@ output: job_types: bde: true composite: false - conformers: true + conf_opt: true + conf_sp: false fine: false freq: true irc: true @@ -105,7 +108,8 @@ output: job_types: bde: true composite: false - conformers: true + conf_opt: true + conf_sp: false fine: false freq: true irc: true @@ -130,7 +134,8 @@ output: job_types: bde: true composite: false - conformers: true + conf_opt: true + conf_sp: false fine: false freq: true irc: true @@ -155,7 +160,8 @@ output: job_types: bde: true composite: false - conformers: false + conf_opt: false + conf_sp: false fine: false freq: true irc: true diff --git a/arc/testing/restart/3_restart_bde/restart.yml b/arc/testing/restart/3_restart_bde/restart.yml index 0040081fcb..df743c9d9c 100644 --- a/arc/testing/restart/3_restart_bde/restart.yml +++ b/arc/testing/restart/3_restart_bde/restart.yml @@ -9,7 +9,7 @@ arkane_level_of_theory: software: gaussian compare_to_rmg: false compute_transport: false -conformer_level: +conformer_opt_level: basis: def2svp compatible_ess: - gaussian @@ -43,7 +43,8 @@ irc_level: job_memory: 14 job_types: bde: true - conformers: true + conf_opt: true + conf_sp: false fine: true freq: true irc: true @@ -78,7 +79,8 @@ output: job_types: bde: true composite: false - conformers: false + conf_opt: false + conf_sp: false fine: false freq: false irc: true @@ -105,7 +107,8 @@ output: job_types: bde: true composite: false - conformers: true + conf_opt: true + conf_sp: false fine: true freq: true irc: true @@ -131,7 +134,8 @@ output: job_types: bde: true composite: false - conformers: true + conf_opt: true + conf_sp: false fine: true freq: true irc: true @@ -156,7 +160,8 @@ output: job_types: bde: true composite: false - conformers: true + conf_opt: true + conf_sp: false fine: true freq: true irc: true @@ -181,7 +186,8 @@ output: job_types: bde: true composite: false - conformers: true + conf_opt: true + conf_sp: false fine: true freq: true irc: true @@ -206,7 +212,8 @@ output: job_types: bde: true composite: false - conformers: true + conf_opt: true + conf_sp: false fine: true freq: true irc: true @@ -231,7 +238,8 @@ output: job_types: bde: true composite: false - conformers: true + conf_opt: true + conf_sp: false fine: true freq: true irc: true @@ -256,7 +264,8 @@ output: job_types: bde: true composite: false - conformers: true + conf_opt: true + conf_sp: false fine: true freq: true irc: true diff --git a/arc/testing/restart/5_TS1/restart.yml b/arc/testing/restart/5_TS1/restart.yml index 7478bf7e8f..e690632ea5 100644 --- a/arc/testing/restart/5_TS1/restart.yml +++ b/arc/testing/restart/5_TS1/restart.yml @@ -4,7 +4,7 @@ arkane_level_of_theory: method: ccsd(t)-f12 method_type: wavefunction software: molpro -conformer_level: +conformer_opt_level: basis: def2tzvp compatible_ess: - gaussian @@ -24,7 +24,8 @@ irc_level: software: gaussian job_types: bde: false - conformers: true + conf_opt: true + conf_sp: false fine: true freq: true irc: true @@ -48,7 +49,8 @@ output: job_types: bde: true composite: false - conformers: false + conf_opt: false + conf_sp: false fine: false freq: false irc: false @@ -73,7 +75,8 @@ output: job_types: bde: true composite: false - conformers: false + conf_opt: false + conf_sp: false fine: false freq: false irc: false @@ -98,7 +101,8 @@ output: job_types: bde: true composite: false - conformers: false + conf_opt: false + conf_sp: false fine: false freq: false irc: false @@ -123,7 +127,8 @@ output: job_types: bde: true composite: false - conformers: false + conf_opt: false + conf_sp: false fine: false freq: false irc: false @@ -148,7 +153,8 @@ output: job_types: bde: true composite: false - conformers: false + conf_opt: false + conf_sp: false fine: true freq: true irc: true diff --git a/docs/source/advanced.rst b/docs/source/advanced.rst index 37c94e1a7a..86953f9b98 100644 --- a/docs/source/advanced.rst +++ b/docs/source/advanced.rst @@ -566,12 +566,13 @@ which could be read as an input in ARC:: input_dict['project'] = 'Demo_project_input_file_from_API' - input_dict['job_types'] = {'conformers': True, + input_dict['job_types'] = {'conf_opt': True, 'opt': True, 'fine': True, 'freq': True, 'sp': True, 'rotors': True, + 'conf_sp': False, 'orbitals': False, 'lennard_jones': False, } diff --git a/docs/source/examples.rst b/docs/source/examples.rst index 5e9538dccf..86899a99cb 100644 --- a/docs/source/examples.rst +++ b/docs/source/examples.rst @@ -126,7 +126,7 @@ The same example as above ran via the API (e.g., in `Jupyter notebooks`__) would arc = ARC(project='arc_demo_1', ess_settings={'gaussian': ['local', 'server1'], 'molpro': 'server1', 'qchem': 'server2'}, - job_types={'rotors': True, 'conformers': True, 'fine': True, 'freq': True, 'opt': True, sp: True}, + job_types={'rotors': True, 'conf_opt': True, 'conf_sp': True, 'fine': True, 'freq': True, 'opt': True, sp: True}, max_job_time=24, level_of_theory='CCSD(T)-F12/cc-pVTZ-F12//wb97xd/def2tzvp', scan_level='wb97xd/def2tzvp', diff --git a/functional/functional_test.py b/functional/functional_test.py index a69ec7029f..8624d4e8cf 100644 --- a/functional/functional_test.py +++ b/functional/functional_test.py @@ -31,11 +31,12 @@ def setUpClass(cls): cls.maxDiff = None cls.has_settings = False - cls.job_types = {'conformers': True, + cls.job_types = {'conf_opt': True, 'opt': True, 'fine_grid': False, 'freq': True, 'sp': True, + 'conf_sp': False, 'rotors': False, 'irc': False, }