From 986312e7c9b56e6398a7ccb6d1501be56728db36 Mon Sep 17 00:00:00 2001 From: Duminda Date: Sat, 10 Oct 2020 11:03:37 -0400 Subject: [PATCH 1/2] bug fix: change from 'local' to dictionary server name --- arc/job/ssh.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/arc/job/ssh.py b/arc/job/ssh.py index 502cbe0573..be966f3ae5 100644 --- a/arc/job/ssh.py +++ b/arc/job/ssh.py @@ -275,11 +275,11 @@ def check_running_jobs_ids(self) -> list: cmd = check_status_command[servers[self.server]['cluster_soft']] + ' -u $USER' stdout = self._send_command_to_server(cmd)[0] for i, status_line in enumerate(stdout): - if servers['local']['cluster_soft'].lower() == 'slurm' and i > 0: + if servers[self.server]['cluster_soft'].lower() == 'slurm' and i > 0: running_jobs_ids.append(int(status_line.split()[0])) - elif servers['local']['cluster_soft'].lower() in ['oge', 'sge'] and i > 1: + elif servers[self.server]['cluster_soft'].lower() in ['oge', 'sge'] and i > 1: running_jobs_ids.append(int(status_line.split()[0])) - elif servers['local']['cluster_soft'].lower() == 'pbs' and i > 4: + elif servers[self.server]['cluster_soft'].lower() == 'pbs' and i > 4: running_jobs_ids.append(int(status_line.split('.')[0])) return running_jobs_ids @@ -308,13 +308,13 @@ def submit_job(self, remote_path: str) -> Tuple[str, int]: if 'Requested node configuration is not available' in line: logger.warning(f'User may be requesting more resources than are available. Please check server ' f'settings, such as cpus and memory, in ARC/arc/settings/settings.py') - elif servers['local']['cluster_soft'].lower() in ['oge', 'sge'] and 'submitted' in stdout[0].lower(): + elif servers[self.server]['cluster_soft'].lower() in ['oge', 'sge'] and 'submitted' in stdout[0].lower(): job_id = int(stdout[0].split()[2]) job_status = 'running' - elif servers['local']['cluster_soft'].lower() == 'slurm' and 'submitted' in stdout[0].lower(): + elif servers[self.server]['cluster_soft'].lower() == 'slurm' and 'submitted' in stdout[0].lower(): job_id = int(stdout[0].split()[3]) job_status = 'running' - elif servers['local']['cluster_soft'].lower() == 'pbs': + elif servers[self.server]['cluster_soft'].lower() == 'pbs': job_id = int(stdout[0].split('.')[0]) job_status = 'running' else: From 5dc5a71f16d4727bc0606d947ac80e7cebaaedc4 Mon Sep 17 00:00:00 2001 From: Duminda Date: Sat, 10 Oct 2020 11:06:04 -0400 Subject: [PATCH 2/2] moved job .cvs files form ARC to .arc folder. --- arc/common.py | 1 + arc/job/job.py | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/arc/common.py b/arc/common.py index 12fcbad32a..034e45ba32 100644 --- a/arc/common.py +++ b/arc/common.py @@ -35,6 +35,7 @@ logger = logging.getLogger('arc') arc_path = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) # absolute path to the ARC folder +local_arc_path = os.path.join(os.getenv("HOME"), '.arc') VERSION = '1.1.0' diff --git a/arc/job/job.py b/arc/job/job.py index 3f86c16e29..7394fb47f7 100644 --- a/arc/job/job.py +++ b/arc/job/job.py @@ -10,7 +10,7 @@ from pprint import pformat from typing import Dict, Optional, Union -from arc.common import arc_path, get_logger +from arc.common import arc_path, get_logger, local_arc_path from arc.exceptions import JobError, InputError from arc.imports import settings, input_files, submit_scripts from arc.job.local import (get_last_modified_time, @@ -424,7 +424,7 @@ def _set_job_number(self): """ Used as the entry number in the database, as well as the job name on the server. """ - csv_path = os.path.join(arc_path, 'initiated_jobs.csv') + csv_path = os.path.join(local_arc_path, 'initiated_jobs.csv') if not os.path.isfile(csv_path): # check file, make index file and write headers if file doesn't exists with open(csv_path, 'w') as f: @@ -445,7 +445,7 @@ def _write_initiated_job_to_csv_file(self): """ Write an initiated ARCJob into the initiated_jobs.csv file. """ - csv_path = os.path.join(arc_path, 'initiated_jobs.csv') + csv_path = os.path.join(local_arc_path, 'initiated_jobs.csv') if self.conformer < 0: # this is not a conformer search job conformer = '-' else: @@ -463,7 +463,7 @@ def write_completed_job_to_csv_file(self): """ if self.job_status[0] != 'done' or self.job_status[1]['status'] != 'done': self.determine_job_status() - csv_path = os.path.join(arc_path, 'completed_jobs.csv') + csv_path = os.path.join(local_arc_path, 'completed_jobs.csv') if not os.path.isfile(csv_path): # check file, make index file and write headers if file doesn't exists with open(csv_path, 'w') as f: @@ -473,7 +473,7 @@ def write_completed_job_to_csv_file(self): 'final_time', 'run_time', 'job_status_(server)', 'job_status_(ESS)', 'ESS troubleshooting methods used', 'comments'] writer.writerow(row) - csv_path = os.path.join(arc_path, 'completed_jobs.csv') + csv_path = os.path.join(local_arc_path, 'completed_jobs.csv') if self.conformer < 0: # this is not a conformer search job conformer = '-' else: