diff --git a/efelg/tools/resources.py b/efelg/tools/resources.py
index e9c17d07..38053922 100755
--- a/efelg/tools/resources.py
+++ b/efelg/tools/resources.py
@@ -2,12 +2,9 @@
import sys
import json
import collections
-import neo
import pprint
from datetime import datetime
-import requests
import logging
-import bluepyefe.formats.axon as fa
# set logging up
logging.basicConfig(stream=sys.stdout)
diff --git a/efelg/views.py b/efelg/views.py
old mode 100755
new mode 100644
index d7b608a9..3f30310e
--- a/efelg/views.py
+++ b/efelg/views.py
@@ -188,7 +188,7 @@ def get_data(request, cellname=""):
trace_info['coefficient'] = coefficient
trace_info['disp_sampling_rate'] = disp_sampling_rate
- trace_info['md5'] = content['md5']
+ trace_info['md5'] = content.get('md5')
trace_info['sampling_rate'] = content['sampling_rate']
trace_info['etype'] = content['etype']
@@ -618,7 +618,7 @@ def file_formats_docs(request):
"""
Render Guidebook file formats page
"""
- logger.info(LOG_ACTION.format(request.session['username'], 'access FILE_FORMATS page'))
+ logger.info(LOG_ACTION.format(request.session.get('username', 'anonymous'), 'access FILE_FORMATS page'))
return render(request, 'efelg/docs/file_formats.html')
@@ -638,7 +638,7 @@ def dataset(request):
"""
Return Guidebook dataset page
"""
- logger.info(LOG_ACTION.format(request.session['username'], 'access DATASET page'))
+ logger.info(LOG_ACTION.format(request.session.get('username', 'anonymous'), 'access DATASET page'))
return render(request, 'efelg/docs/dataset.html')
diff --git a/hh_neuron_builder/config/dev_conf.py.example b/hh_neuron_builder/config/dev_conf.py.example
index 93ba4bc3..dc4d1008 100755
--- a/hh_neuron_builder/config/dev_conf.py.example
+++ b/hh_neuron_builder/config/dev_conf.py.example
@@ -36,6 +36,6 @@ DATABASES = {
}
}
-MEDIA_ROOT = os.path.join('/apps', 'media')
+MEDIA_ROOT = os.path.join(BASE_DIR, '../app', 'media')
-LOG_ROOT_PATH = './'
\ No newline at end of file
+LOG_ROOT_PATH = os.path.join(BASE_DIR, '../log/')
diff --git a/hh_neuron_builder/config/hhf_template/parameters.json b/hh_neuron_builder/config/hhf_template/parameters.json
deleted file mode 100644
index 428c8b7c..00000000
--- a/hh_neuron_builder/config/hhf_template/parameters.json
+++ /dev/null
@@ -1,60 +0,0 @@
-{
-"model_key": {
- "mechanisms": {
- "all": ["pas","kdrb", "na3"],
- "somatic": ["kdb", "kmb", "kap", "hd", "can", "cal", "cat", "cagk", "kca", "cacum"],
- "axonal": ["kmb", "kap"],
- "alldend":["hd", "can", "cal", "cat", "cagk", "kca", "cacum", "kad"]
- },
- "distributions": {
- "exp_na_dend": "math.exp((-{distance})/50)*{value}"
- },
- "fixed": {
- "global": [["v_init", -80], ["celsius", 34]],
- "all": [["cm", 1, "secvar"],["ena", 50, "secvar"],["ek", -90, "secvar"]]
- },
- "optimized": {
- "all": [
- ],
- "axonal": [
- ["gbar_na3", 0.025, 0.15, "uniform" ],
- ["gkdrbar_kdrb", 0.01, 0.08, "uniform" ],
- ["gkabar_kap", 0.001, 0.04, "uniform" ],
- ["gbar_kmb", 0.004, 0.05, "uniform" ],
- ["Ra", 50, 300, "secvar"],
- ["g_pas", 1e-6, 8e-5, "uniform" ],
- ["e_pas", -95, -75, "uniform" ]
- ],
- "somatic": [
- ["gbar_na3", 0.02, 0.06, "uniform" ],
- ["gkdrbar_kdrb", 0.001, 0.08, "uniform" ],
- ["gkabar_kap", 0.005, 0.04, "uniform" ],
- ["gkdbar_kdb", 1e-5, 10e-5, "uniform" ],
- ["gbar_kmb", 0.001, 0.02, "uniform" ],
- ["ghdbar_hd", 1e-6, 5e-4, "uniform" ],
- ["gcalbar_cal", 0.1e-5, 3e-5, "uniform" ],
- ["gcanbar_can", 0.1e-5, 3e-5, "uniform" ],
- ["gcatbar_cat", 0.1e-5, 3e-5, "uniform" ],
- ["gbar_kca", 1e-6, 1e-5, "uniform" ],
- ["gbar_cagk", 0.5e-5, 3e-4, "uniform" ],
- ["Ra", 50, 400, "secvar"],
- ["g_pas", 1e-6, 20e-5, "uniform" ],
- ["e_pas", -120, -75, "uniform" ]
- ],
- "alldend": [
- ["gbar_na3", 0.02, 0.06, "exp_na_dend" ],
- ["gkdrbar_kdrb", 0.001, 0.8, "uniform" ],
- ["gkabar_kad", 0.005, 0.1, "uniform" ],
- ["ghdbar_hd", 1e-6, 5e-4, "uniform" ],
- ["gcalbar_cal", 0.1e-5, 3e-5, "uniform" ],
- ["gcanbar_can", 0.1e-5, 3e-5, "uniform" ],
- ["gcatbar_cat", 0.1e-5, 3e-5, "uniform" ],
- ["gbar_kca", 1e-6, 1e-5, "uniform" ],
- ["gbar_cagk", 0.5e-5, 3e-4, "uniform" ],
- ["Ra", 50, 400, "secvar"],
- ["g_pas", 1e-6, 20e-5, "uniform" ],
- ["e_pas", -120, -80, "uniform" ]
- ]
- }
-}
-}
diff --git a/hh_neuron_builder/config/hhf_template/parameters/interneuron/parameters.json b/hh_neuron_builder/config/hhf_template/parameters/interneuron/parameters.json
new file mode 100644
index 00000000..7e27f0a4
--- /dev/null
+++ b/hh_neuron_builder/config/hhf_template/parameters/interneuron/parameters.json
@@ -0,0 +1,276 @@
+{
+ "W_20221102163317": {
+ "mechanisms": {
+ "all": [
+ "pas",
+ "kdrb",
+ "na3"
+ ],
+ "somatic": [
+ "kdb",
+ "kmb",
+ "kap",
+ "hd",
+ "can",
+ "cal",
+ "cat",
+ "cagk",
+ "kca",
+ "cacum"
+ ],
+ "axonal": [
+ "kmb",
+ "kap"
+ ],
+ "alldend": [
+ "hd",
+ "can",
+ "cal",
+ "cat",
+ "cagk",
+ "kca",
+ "cacum",
+ "kad"
+ ]
+ },
+ "distributions": {
+ "exp_na_dend": "math.exp((-{distance})/50)*{value}"
+ },
+ "fixed": {
+ "global": [
+ [
+ "v_init",
+ -80
+ ],
+ [
+ "celsius",
+ 34
+ ]
+ ],
+ "all": [
+ [
+ "cm",
+ 1,
+ "secvar"
+ ],
+ [
+ "ena",
+ 50,
+ "secvar"
+ ],
+ [
+ "ek",
+ -90,
+ "secvar"
+ ]
+ ]
+ },
+ "optimized": {
+ "all": [],
+ "axonal": [
+ [
+ "gbar_na3",
+ 0.025,
+ 0.15,
+ "uniform"
+ ],
+ [
+ "gkdrbar_kdrb",
+ 0.01,
+ 0.08,
+ "uniform"
+ ],
+ [
+ "gkabar_kap",
+ 0.001,
+ 0.04,
+ "uniform"
+ ],
+ [
+ "gbar_kmb",
+ 0.004,
+ 0.05,
+ "uniform"
+ ],
+ [
+ "Ra",
+ 50,
+ 300,
+ "secvar"
+ ],
+ [
+ "g_pas",
+ 1e-06,
+ 8e-05,
+ "uniform"
+ ],
+ [
+ "e_pas",
+ -95,
+ -75,
+ "uniform"
+ ]
+ ],
+ "somatic": [
+ [
+ "gbar_na3",
+ 0.02,
+ 0.06,
+ "uniform"
+ ],
+ [
+ "gkdrbar_kdrb",
+ 0.001,
+ 0.08,
+ "uniform"
+ ],
+ [
+ "gkabar_kap",
+ 0.005,
+ 0.04,
+ "uniform"
+ ],
+ [
+ "gkdbar_kdb",
+ 1e-05,
+ 0.0001,
+ "uniform"
+ ],
+ [
+ "gbar_kmb",
+ 0.001,
+ 0.02,
+ "uniform"
+ ],
+ [
+ "ghdbar_hd",
+ 1e-06,
+ 0.0005,
+ "uniform"
+ ],
+ [
+ "gcalbar_cal",
+ 1e-06,
+ 3e-05,
+ "uniform"
+ ],
+ [
+ "gcanbar_can",
+ 1e-06,
+ 3e-05,
+ "uniform"
+ ],
+ [
+ "gcatbar_cat",
+ 1e-06,
+ 3e-05,
+ "uniform"
+ ],
+ [
+ "gbar_kca",
+ 1e-06,
+ 1e-05,
+ "uniform"
+ ],
+ [
+ "gbar_cagk",
+ 5e-06,
+ 0.0003,
+ "uniform"
+ ],
+ [
+ "Ra",
+ 50,
+ 400,
+ "secvar"
+ ],
+ [
+ "g_pas",
+ 1e-06,
+ 0.0002,
+ "uniform"
+ ],
+ [
+ "e_pas",
+ -120,
+ -75,
+ "uniform"
+ ]
+ ],
+ "alldend": [
+ [
+ "gbar_na3",
+ 0.02,
+ 0.06,
+ "exp_na_dend"
+ ],
+ [
+ "gkdrbar_kdrb",
+ 0.001,
+ 0.8,
+ "uniform"
+ ],
+ [
+ "gkabar_kad",
+ 0.005,
+ 0.1,
+ "uniform"
+ ],
+ [
+ "ghdbar_hd",
+ 1e-06,
+ 0.0005,
+ "uniform"
+ ],
+ [
+ "gcalbar_cal",
+ 1e-06,
+ 3e-05,
+ "uniform"
+ ],
+ [
+ "gcanbar_can",
+ 1e-06,
+ 3e-05,
+ "uniform"
+ ],
+ [
+ "gcatbar_cat",
+ 1e-06,
+ 3e-05,
+ "uniform"
+ ],
+ [
+ "gbar_kca",
+ 1e-06,
+ 1e-05,
+ "uniform"
+ ],
+ [
+ "gbar_cagk",
+ 5e-06,
+ 0.0003,
+ "uniform"
+ ],
+ [
+ "Ra",
+ 50,
+ 400,
+ "secvar"
+ ],
+ [
+ "g_pas",
+ 1e-06,
+ 0.0002,
+ "uniform"
+ ],
+ [
+ "e_pas",
+ -120,
+ -80,
+ "uniform"
+ ]
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/hh_neuron_builder/config/hhf_template/parameters/pyramidal/parameters.json b/hh_neuron_builder/config/hhf_template/parameters/pyramidal/parameters.json
new file mode 100644
index 00000000..5e4c27be
--- /dev/null
+++ b/hh_neuron_builder/config/hhf_template/parameters/pyramidal/parameters.json
@@ -0,0 +1,258 @@
+{
+ "W_20221102163317": {
+ "mechanisms": {
+ "all": [
+ "pas",
+ "kdr",
+ "nax"
+ ],
+ "axonal": [
+ "kmb",
+ "kap"
+ ],
+ "somatic": [
+ "kmb",
+ "kap",
+ "hd",
+ "can",
+ "cal",
+ "cat",
+ "kca",
+ "cagk",
+ "cacum"
+ ],
+ "alldend": [
+ "kad",
+ "hd",
+ "can",
+ "cal",
+ "cat",
+ "kca",
+ "cagk",
+ "cacum"
+ ]
+ },
+ "distributions": {
+ "linear_hd_apic": "(1. + 3./100. * {distance})*{value}",
+ "sigmoid_kad_apic": "(15./(1. + math.exp((300-{distance})/50)))*{value}",
+ "linear_e_pas_apic": "({value}-5*{distance}/150)"
+ },
+ "fixed": {
+ "global": [
+ [
+ "v_init",
+ -70
+ ],
+ [
+ "celsius",
+ 34
+ ]
+ ],
+ "all": [
+ [
+ "cm",
+ 1,
+ "secvar"
+ ],
+ [
+ "ena",
+ 50,
+ "secvar"
+ ],
+ [
+ "ek",
+ -90,
+ "secvar"
+ ]
+ ]
+ },
+ "optimized": {
+ "axonal": [
+ [
+ "gbar_nax",
+ 0.1,
+ 0.4,
+ "uniform"
+ ],
+ [
+ "gkdrbar_kdr",
+ 0.01,
+ 0.04,
+ "uniform"
+ ],
+ [
+ "gbar_kmb",
+ 0.002,
+ 0.06,
+ "uniform"
+ ],
+ [
+ "gkabar_kap",
+ 0.025,
+ 0.3,
+ "uniform"
+ ],
+ [
+ "Ra",
+ 50,
+ 100,
+ "secvar"
+ ],
+ [
+ "g_pas",
+ 2e-05,
+ 0.0002,
+ "uniform"
+ ],
+ [
+ "e_pas",
+ -80,
+ -60,
+ "uniform"
+ ]
+ ],
+ "somatic": [
+ [
+ "gkabar_kap",
+ 0.025,
+ 0.1,
+ "uniform"
+ ],
+ [
+ "gbar_nax",
+ 0.02,
+ 0.08,
+ "uniform"
+ ],
+ [
+ "gbar_kmb",
+ 0.0015,
+ 0.06,
+ "uniform"
+ ],
+ [
+ "Ra",
+ 100,
+ 400,
+ "secvar"
+ ],
+ [
+ "g_pas",
+ 5e-06,
+ 0.0001,
+ "uniform"
+ ],
+ [
+ "e_pas",
+ -80,
+ -65,
+ "linear_e_pas_apic"
+ ]
+ ],
+ "allnoaxon": [
+ [
+ "gkdrbar_kdr",
+ 0.001,
+ 0.008,
+ "uniform"
+ ],
+ [
+ "ghdbar_hd",
+ 5e-06,
+ 2e-05,
+ "linear_hd_apic"
+ ],
+ [
+ "gcalbar_cal",
+ 1e-06,
+ 2e-05,
+ "uniform"
+ ],
+ [
+ "gcanbar_can",
+ 1e-06,
+ 2e-05,
+ "uniform"
+ ],
+ [
+ "gcatbar_cat",
+ 1e-06,
+ 2e-05,
+ "uniform"
+ ],
+ [
+ "gbar_kca",
+ 2e-05,
+ 0.0002,
+ "uniform"
+ ],
+ [
+ "gbar_cagk",
+ 1e-06,
+ 0.0004,
+ "uniform"
+ ]
+ ],
+ "basal": [
+ [
+ "gbar_nax",
+ 0.002,
+ 0.02,
+ "uniform"
+ ],
+ [
+ "Ra",
+ 100,
+ 400,
+ "secvar"
+ ],
+ [
+ "g_pas",
+ 5e-06,
+ 0.0001,
+ "uniform"
+ ],
+ [
+ "e_pas",
+ -80,
+ -65,
+ "linear_e_pas_apic"
+ ]
+ ],
+ "apical": [
+ [
+ "gbar_nax",
+ 0.02,
+ 0.08,
+ "uniform"
+ ],
+ [
+ "Ra",
+ 100,
+ 400,
+ "secvar"
+ ],
+ [
+ "g_pas",
+ 5e-06,
+ 0.0001,
+ "uniform"
+ ],
+ [
+ "e_pas",
+ -80,
+ -65,
+ "linear_e_pas_apic"
+ ]
+ ],
+ "alldend": [
+ [
+ "gkabar_kad",
+ 0.005,
+ 0.05,
+ "sigmoid_kad_apic"
+ ]
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/hh_neuron_builder/settings.py b/hh_neuron_builder/settings.py
index a78eb526..bc688593 100755
--- a/hh_neuron_builder/settings.py
+++ b/hh_neuron_builder/settings.py
@@ -166,12 +166,13 @@
MEDIA_ROOT = conf.MEDIA_ROOT
HHF_TEMPLATE_DIR = os.path.join(BASE_DIR, 'hh_neuron_builder', 'config', 'hhf_template', 'hhf')
+HHF_PARAMETERS_TEMPLATE_DIR = os.path.join(BASE_DIR, 'hh_neuron_builder', 'config', 'hhf_template', 'parameters')
if not os.path.exists(HHF_TEMPLATE_DIR):
os.makedirs(HHF_TEMPLATE_DIR)
TMP_DIR = os.path.join(MEDIA_ROOT, 'hhnb', 'tmp')
if not os.path.exists(TMP_DIR):
- os.mkdir(TMP_DIR)
+ os.makedirs(TMP_DIR)
MODEL_CATALOG_FILTER = {
@@ -219,12 +220,15 @@
OIDC_OP_TOKEN_ENDPOINT = 'https://iam.ebrains.eu/auth/realms/hbp/protocol/openid-connect/token'
OIDC_OP_USER_ENDPOINT = 'https://iam.ebrains.eu/auth/realms/hbp/protocol/openid-connect/userinfo'
+
+# OIDC_TIMEOUT = 1
+
+
LOGIN_REDIRECT_URL = "/hh-neuron-builder"
LOGOUT_REDIRECT_URL = "/hh-neuron-builder"
LOGIN_URL = 'oidc_authentication_init'
-
# Logging
# Set LOG_ROOT_PATH for logging files
diff --git a/hhnb/core/conf/__init__.py b/hhnb/core/conf/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/hhnb/core/conf/exec_files_conf.py b/hhnb/core/conf/exec_files_conf.py
index 65e91a8d..287bfcf3 100644
--- a/hhnb/core/conf/exec_files_conf.py
+++ b/hhnb/core/conf/exec_files_conf.py
@@ -1,4 +1,5 @@
import os
+import json
class ExecFileConf:
@@ -8,26 +9,32 @@ class ExecFileConf:
@staticmethod
- def write_nsg_exec(dst_dir, max_gen, offspring):
+ def write_nsg_exec(dst_dir, max_gen, offspring, job_name, mode='start'):
"""
- Write the excecution script for the NSG system.
+ Write the execution script for the NSG system.
Parameters
----------
dst_dir : str
- destination folder where the file will be writen.
+ destination folder where the file will be written.
max_gen : int
maximum number of generations used by opt_neuron.
offspring : int
- number of individuals in offspring useb by opt_neuron.
+ number of individuals in offspring used by opt_neuron.
+ job_name : str
+ set job_name
+ mode : str, optional, default='start'
+ select the action mode to run the job on the HPC. Can be 'start' or 'resume'.
"""
buffer = \
f"""
import os
+import json
-os.system('python3 opt_neuron.py --max_ngen={max_gen} --offspring_size={offspring} --start --checkpoint ./checkpoints/checkpoint.pkl')
-
+os.system('python3 opt_neuron.py --max_ngen={max_gen} --offspring_size={offspring} --{mode} --checkpoint ./checkpoints/checkpoint.pkl')
+with open('resume_job_settings.json', 'w') as fd:
+ json.dump({json.dumps({'offspring_size': offspring, 'max_gen': max_gen, 'job_name': job_name, 'hpc': 'nsg',})}, fd)
"""
try:
with open(os.path.join(dst_dir, 'init.py'), 'w') as fd:
@@ -35,22 +42,25 @@ def write_nsg_exec(dst_dir, max_gen, offspring):
except Exception as e:
raise e
-
@staticmethod
- def write_daint_exec(dst_dir, folder_name, offspring, max_gen, ):
+ def write_daint_exec(dst_dir, folder_name, offspring, max_gen, job_name, mode='start'):
"""
- Write the excecution script for the Piz-Daint (UNICORE) system.
+ Write the execution script for the Piz-Daint (UNICORE) system.
Parameters
----------
dst_dir : str
- destination folder where the file will be writen.
+ destination folder where the file will be written.
folder_name : str
the model root folder name.
max_gen : int
maximum number of generations used by opt_neuron.
offspring : int
- number of individuals in offspring useb by opt_neuron.
+ number of individuals in offspring used by opt_neuron.
+ job_name : str
+ set job_name
+ mode : str, optional, default='start'
+ select the action mode to run the job on the HPC. Can be 'start' or 'resume'.
"""
buffer_zipfolder = \
f"""
@@ -115,7 +125,8 @@ def zipdir(path, ziph):
sleep 30
srun ipengine --profile=${IPYTHON_PROFILE} &
CHECKPOINTS_DIR="checkpoints"
-BLUEPYOPT_SEED=1 python opt_neuron.py --offspring_size={offspring} --max_ngen={max_gen} --start --checkpoint "${CHECKPOINTS_DIR}/checkpoint.pkl"
+BLUEPYOPT_SEED=1 python opt_neuron.py --offspring_size={offspring} --max_ngen={max_gen} --{mode} --checkpoint "${CHECKPOINTS_DIR}/checkpoint.pkl"
+echo '{json.dumps({'offspring_size': offspring, 'max_gen': max_gen, 'job_name': job_name, 'hpc': 'cscs'})}' > resume_job_settings.json
python zipfolder.py
"""
try:
diff --git a/hhnb/core/job_handler.py b/hhnb/core/job_handler.py
index b5303c2d..24b8163b 100644
--- a/hhnb/core/job_handler.py
+++ b/hhnb/core/job_handler.py
@@ -70,7 +70,7 @@ def is_job_expired(job_details):
class JobHandler:
"""
- Useful class to easly handle jobs, and the relative files,
+ Useful class to easily handle jobs, and the relative files,
on the selected HPC system. This class is intended
to be used by calling its static methods and for this
it is not recommended to instantiate a JobHandler object
@@ -91,14 +91,13 @@ class HPCException(Exception):
def __init__(self):
self._SA_ROOT_URL = 'https://bspsa.cineca.it/'
- self._SA_DAINT_JOB_URL = self._SA_ROOT_URL + 'jobs/pizdaint/hhnb_daint_cscs/'
- self._SA_DAINT_FILES_URL = self._SA_ROOT_URL + 'files/pizdaint/hhnb_daint_cscs/'
- self._SA_NSG_JOB_URL = self._SA_ROOT_URL + 'jobs/nsg/hhnb_nsg/'
- self._SA_NSG_FILES_URL = self._SA_ROOT_URL + 'files/nsg/hhnb_nsg/'
+ self._SA_JOBS_URL = self._SA_ROOT_URL + 'jobs/{}/{}/'
+ self._SA_FILES_URL = self._SA_ROOT_URL + 'files/{}/{}/'
self._NSG_URL = 'https://nsgr.sdsc.edu:8443/cipresrest/v1'
self._DAINT_URL = 'https://brissago.cscs.ch:8080/DAINT-CSCS/rest/core'
self._DAINT_CSCS = 'DAINT-CSCS'
+ self._SA = 'SA'
self._SA_CSCS = 'SA-CSCS'
self._NSG = 'NSG'
self._SA_NSG = 'SA-NSG'
@@ -167,8 +166,7 @@ def _submit_on_nsg(self, username, password, zip_file, settings):
hhnb.core.response.ResponseUtil
the result of the submission.
"""
- zip_name = os.path.split(zip_file)[1]
- payload = self._get_nsg_payload(job_name=zip_name.split('.')[0],
+ payload = self._get_nsg_payload(job_name=settings['job_name'],
core_num=settings['core-num'],
node_num=settings['node-num'],
runtime=settings['runtime'])
@@ -198,7 +196,7 @@ def _submit_on_nsg(self, username, password, zip_file, settings):
if not r.status_code == 200:
return ResponseUtil.ko_response(r.text)
- return ResponseUtil.ok_response(messages.JOB_SUBMITTED.format('NSG'))
+ return ResponseUtil.ok_response(messages.JOB_SUBMITTED.format('NSG'))
return ResponseUtil.ko_response(r.text)
@@ -216,7 +214,7 @@ def _get_nsg_jobs(self, username, password):
Returns
-------
dict
- a dictionaire of all submitted jobs.
+ a dictionary of all submitted jobs.
Raises
------
@@ -315,7 +313,7 @@ def _get_nsg_job_results(self, username, password, job_id):
logger.debug(f'requests: {r.url} with headers: {r.headers}')
if r.status_code != 200:
logger.error(f'CODE: {r.status_code}, CONTENT: {r.content}')
- raise self.HPCException(messages.JOB_RESULTS_FETCH_ERRROR)
+ raise self.HPCException(messages.JOB_RESULTS_FETCH_ERROR)
file_list = {}
root = xml.etree.ElementTree.fromstring(r.text)
@@ -351,7 +349,7 @@ def _get_unicore_job_description(self, command, job_name, node_num,
Returns the UNICORE job description.
The job description is a payload that is formed by the command
to run, the name of the job, and the resources list to
- reserve for the job that are substracted from the project.
+ reserve for the job that are subtracted from the project.
Parameters
----------
@@ -455,7 +453,7 @@ def _submit_on_unicore(self, hpc, token, zip_file, settings):
zip_name = os.path.split(zip_file)[1]
job_description = self._get_unicore_job_description(
command=self._get_unicore_command(zip_name),
- job_name=zip_name.split('.')[0],
+ job_name=settings['job_name'],
node_num=settings['node-num'],
core_num=settings['core-num'],
runtime=settings['runtime'],
@@ -464,7 +462,60 @@ def _submit_on_unicore(self, hpc, token, zip_file, settings):
client = self._initialize_unicore_client(hpc, token)
job = client.new_job(job_description=job_description, inputs=[zip_file])
logger.info(f'job submitted on UNICORE Client: {job}')
- return ResponseUtil.ok_response(messages.JOB_SUBMITTED.format(hpc))
+ return ResponseUtil.ok_response(messages.JOB_SUBMITTED.format('' + hpc + ''))
+
+ def _reoptimize_model_on_unicore(self, job_id, job_name, hpc, max_gen,
+ node_num, core_num, runtime, token):
+ """
+ Submit job to reoptimize model on unicore. The job executes a dedicated script on
+ CSCS-PizDAINT that make a copy of the output of the previous optimization and then
+ resume the optimization starting from its checkpoint.
+
+ Args:
+ job_id : str
+ the job id.
+ job_name : str
+ the job name.
+ hpc : str
+ the HPC in which submit the job.
+ max_gen : int
+ the new generation parameter for the optimization.
+ node_num : int
+ the node number allocated for the optimization.
+ core_num : int
+ the core number allocated for the optimization.
+ runtime : str
+ the maximum amount of time for the job to be completed.
+ token : str
+ the EBRAINS user token.
+
+ Returns:
+ Response : (status_code, content)
+ the response of the submission request.
+ """
+ job_description = {
+ 'User precommand': f'cp -r /scratch/snx3000/unicore/FILESPACE/{job_id}/{job_name}/ .',
+ 'Executable': f'/apps/hbp/ich002/cnr-software-utils/hhnb/reoptimize_model.sh {job_name} {max_gen}',
+ 'User postcommand': f'mv {job_name} reopt_{job_name}; cd reopt_{job_name}; ' + \
+ f'sed -i "s/{job_name}/reopt_{job_name}/" zipfolder.py; ' + \
+ f'python ./zipfolder.py',
+ 'Name': 'reopt_' + job_name,
+ 'Resources': {
+ 'Nodes': node_num,
+ 'CPUsPerNode': core_num,
+ 'Runtime': runtime,
+ 'NodeConstraints': 'mc',
+ 'Project': 'ich002'
+ },
+ 'Tags': self._TAGS,
+ "haveClientStageIn": "false",
+ }
+ client = self._initialize_unicore_client(hpc, token)
+ job = client.new_job(job_description=job_description)
+ job.start
+ logger.info(f'reoptimize model job {job_id} submitted on UNICORE Client')
+ return ResponseUtil.ok_response(messages.JOB_SUBMITTED.format('' + hpc + ''))
+
def _get_unicore_jobs(self, hpc, token):
"""
@@ -582,7 +633,7 @@ def _get_service_account_headers(self, token, zip_name=None, payload=None):
headers.update({'payload': json.dumps(payload)})
return headers
- def _submit_on_service_account(self, hpc, token, zip_file, settings):
+ def _submit_on_service_account(self, hpc, project, token, zip_file, settings):
"""
Submit a job behind the Service Account in the selected HPC
system. To submit a job, an available HPC system must be choose
@@ -610,35 +661,33 @@ def _submit_on_service_account(self, hpc, token, zip_file, settings):
"""
zip_name = os.path.split(zip_file)[1]
payload = self._get_service_account_payload(
- command=self._get_unicore_command(zip_name) if hpc=='SA-CSCS' else None,
- tool=self._NSG_TOOL if hpc=='SA-NSG' else None,
+ command=self._get_unicore_command(zip_name) if hpc=='pizdaint' else None,
+ tool=self._NSG_TOOL if hpc=='nsg' else None,
node_num=settings['node-num'],
core_num=settings['core-num'],
runtime=settings['runtime'],
- title=zip_name.split('.')[0]
+ title=settings['job_name']
)
headers = self._get_service_account_headers(token, zip_name, payload)
job_file = {'file': open(zip_file, 'rb')}
- if hpc == self._SA_CSCS:
- sa_endpoint = self._SA_DAINT_JOB_URL
- elif hpc == self._SA_NSG:
- sa_endpoint =self._SA_NSG_JOB_URL
+ sa_endpoint = self._SA_JOBS_URL.format(hpc, project)
+
r = requests.post(url=sa_endpoint, headers=headers, files=job_file)
logger.debug(f'requests: {r.url} with headers: {r.headers} and files: {job_file}')
+ print(f"SERVICE ACCOUNT RESPONSE: {r.status_code}, {r.content}")
if r.status_code >= 400:
logger.error(f'CODE: {r.status_code}, CONTENT: {r.content}')
+ if r.status_code == 500:
+ raise self.ServiceAccountException(r.content, r.status_code)
return ResponseUtil.ko_response(r.text)
- message = 'Job submitted'
- if hpc == self._SA_CSCS:
- message = messages.JOB_SUBMITTED.format('SA-CSCS')
- elif hpc == self._SA_NSG:
- message = messages.JOB_SUBMITTED.format('SA-NSG')
+ message = messages.JOB_SUBMITTED.format(
+ '' + hpc.upper() + ' using the Service Account project ' + project + '')
return ResponseUtil.ok_response(message)
- def _get_service_account_jobs(self, hpc, token):
+ def _get_service_account_jobs(self, hpc, project, token):
"""
Returns a list of all jobs submitted using the Service Account
by the user to the selected HPC system.
@@ -661,10 +710,7 @@ def _get_service_account_jobs(self, hpc, token):
if something happens on the Service Account side
"""
headers = self._get_service_account_headers(token)
- if hpc == self._SA_CSCS:
- sa_endpoint = self._SA_DAINT_JOB_URL
- elif hpc == self._SA_NSG:
- sa_endpoint = self._SA_NSG_JOB_URL
+ sa_endpoint = self._SA_JOBS_URL.format(hpc, project)
r = requests.get(url=sa_endpoint, headers=headers)
logger.debug(f'requests: {r.url} with headers: {r.headers}')
if r.status_code != 200:
@@ -672,7 +718,7 @@ def _get_service_account_jobs(self, hpc, token):
raise self.ServiceAccountException(r.content, r.status_code)
return r.json()
- def _get_service_account_job_results(self, hpc, token, job_id):
+ def _get_service_account_job_results(self, hpc, project, token, job_id):
"""
Returns a list of the files once the job ends.
The list is formed by a json object per file
@@ -697,17 +743,14 @@ def _get_service_account_job_results(self, hpc, token, job_id):
self.JobsFilesNotFound
if the job is not found for the selected HPC system.
self.ServiceAccountException
- if something happends on the Service Account side or
+ if something happens on the Service Account side or
if it is down.
"""
headers = self._get_service_account_headers(token)
- if hpc == self._SA_CSCS:
- sa_endpoint = self._SA_DAINT_FILES_URL + job_id + '/'
- elif hpc == self._SA_NSG:
- sa_endpoint = self._SA_NSG_FILES_URL + job_id + '/'
+ sa_endpoint = self._SA_FILES_URL.format(hpc, project)
- r = requests.get(url=sa_endpoint, headers=headers)
+ r = requests.get(url=sa_endpoint + job_id + '/', headers=headers)
logger.debug(f'requests: {r.url} with headers: {r.headers}')
if r.status_code >= 400:
@@ -719,9 +762,9 @@ def _get_service_account_job_results(self, hpc, token, job_id):
file_list = []
for f in r.json():
- if hpc == self._SA_CSCS:
+ if hpc == 'pizdaint':
file_list.append({'id': f, 'name': f})
- elif hpc == self._SA_NSG:
+ elif hpc == 'nsg':
file_list.append({'id': f['fileid'], 'name': f['filename']})
return file_list
@@ -729,8 +772,8 @@ def _get_service_account_job_results(self, hpc, token, job_id):
@classmethod
def submit_job(cls, user, zip_file, settings):
"""
- A static method to easly submit a job in the HPC system.
- A settings dictionaire must be provided with the "hpc" key
+ A static method to easily submit a job in the HPC system.
+ A settings dictionary must be provided with the "hpc" key
that indicate in which HPC system the job should be submitted.
A ResponseUtil object will be returned.
@@ -757,15 +800,15 @@ def submit_job(cls, user, zip_file, settings):
elif settings['hpc'] == job_handler._DAINT_CSCS:
return job_handler._submit_on_unicore(job_handler._DAINT_CSCS, user.get_token(),
zip_file, settings)
- elif settings['hpc'] == job_handler._SA_CSCS or settings['hpc'] == job_handler._SA_NSG:
- return job_handler._submit_on_service_account(settings['hpc'], user.get_token(),
- zip_file, settings)
+ elif settings['hpc'] == job_handler._SA:
+ return job_handler._submit_on_service_account(settings['sa-hpc'], settings['sa-project'],
+ user.get_token(), zip_file, settings)
return ResponseUtil.ko_response(messages.GENERAL_ERROR)
@classmethod
- def fetch_jobs_list(cls, hpc, user):
+ def fetch_jobs_list(cls, hpc, user, sa_hpc=None, sa_project=None):
"""
- A static method to easly fetch the jobs list from the selected HPC system.
+ A static method to easily fetch the jobs list from the selected HPC system.
Parameters
----------
@@ -773,11 +816,17 @@ def fetch_jobs_list(cls, hpc, user):
the HPC system according to the available ones
user : hhnb.core.user.HhnbUser
the user who wants to fetch the jobs
-
+ sa_hpc : str, optional
+ select which HPC will be used behind the Service Account. Only works
+ with the "hpc" parameter set to "SA"
+ sa_project : str, optional
+ select which Service Account project will be used to fetch jobs. Only
+ works with the "hpc" parameter set to "SA"
+
Returns
-------
dict
- a dictionaire containing the list of job
+ a dictionary containing the list of job
ordered by the date from the most recent
"""
logger.info(LOG_ACTION.format(user, 'fetch %s jobs list' % hpc))
@@ -800,8 +849,8 @@ def fetch_jobs_list(cls, hpc, user):
}
jobs.update(job)
- elif hpc == job_handler._SA_CSCS or hpc == job_handler._SA_NSG:
- raw_jobs = job_handler._get_service_account_jobs(hpc, user.get_token())
+ elif hpc == job_handler._SA:
+ raw_jobs = job_handler._get_service_account_jobs(sa_hpc, sa_project, user.get_token())
for raw_job in raw_jobs:
job = {raw_job['job_id']: {
'workflow_id': raw_job['title'],
@@ -815,18 +864,19 @@ def fetch_jobs_list(cls, hpc, user):
ordered_jobs = OrderedDict(sorted(jobs.items(),
key=lambda x: x[1]['date'],
reverse=True))
+
logger.info(LOG_ACTION.format(user, 'jobs list: %s' % ordered_jobs))
return {'jobs': ordered_jobs}
@classmethod
- def fetch_job_files(cls, hpc, job_id, user):
+ def fetch_job_files(cls, hpc, job_id, user, sa_hpc=None, sa_project=None):
"""
- A static method to easly fetch the job's files that are produced
+ A static method to easily fetch the job's files that are produced
once the HPC ends to process the job. To fetch the list a
the parameters that must be passed are the HPC in which the job
was submitted, the job id to identify the correct job, and the
- user as owner of the job. The result will be a dictionaire
+ user as owner of the job. The result will be a dictionary
containing the "root_url" that is required to download the files,
the list of files and, optionally, an header that must be passed
in the download request to correctly identify user when the HPC
@@ -840,14 +890,21 @@ def fetch_job_files(cls, hpc, job_id, user):
the job id
user : hhnb.core.user.HhnbUser
the owner of the job
+ sa_hpc : str, optional
+ select which HPC will be used behind the Service Account. Only
+ works with the "hpc" parameter set to "SA"
+ sa_project : str, optional
+ select the job'project where the job was submitted. Only works
+ with the "hpc" parameter set to "SA"
Returns
-------
dict
- a dictionaire containing all required information to download the files
+ a dictionary containing all required information to download the files
"""
logger.info(LOG_ACTION.format(user, 'fetch files of job: %s in %s' % (job_id, hpc)))
job_handler = cls()
+ file_list = None
if hpc == job_handler._NSG:
raw_file_list = job_handler._get_nsg_job_results(user.get_nsg_user().get_username(),
user.get_nsg_user().get_password(),
@@ -860,22 +917,22 @@ def fetch_job_files(cls, hpc, job_id, user):
'password': user.get_nsg_user().get_password()
}
- if hpc == job_handler._DAINT_CSCS:
+ elif hpc == job_handler._DAINT_CSCS:
raw_file_list = job_handler._get_unicore_job_results(hpc, user.get_token(), job_id)
file_list = {
'root_url': 'unicore',
'file_list': raw_file_list
}
- if hpc == job_handler._SA_CSCS or hpc == job_handler._SA_NSG:
- raw_file_list = job_handler._get_service_account_job_results(hpc, user.get_token(), job_id)
- if hpc == job_handler._SA_CSCS:
- root_url = job_handler._SA_DAINT_FILES_URL + job_id
- elif hpc == job_handler._SA_NSG:
- root_url = job_handler._SA_NSG_FILES_URL + job_id + '/'
+ elif hpc == job_handler._SA:
+ raw_file_list = job_handler._get_service_account_job_results(sa_hpc, sa_project, user.get_token(), job_id)
+ root_url = job_handler._SA_FILES_URL.format(sa_hpc, sa_project) + job_id
+ if sa_hpc == 'nsg':
+ root_url += '/'
file_list = {
'root_url': root_url,
'file_list': raw_file_list,
'headers': {'Authorization': 'Bearer ' + user.get_token()}
}
+
return file_list
diff --git a/hhnb/core/lib/exception/model_exception.py b/hhnb/core/lib/exception/model_exception.py
index a9471af5..8699b8e7 100644
--- a/hhnb/core/lib/exception/model_exception.py
+++ b/hhnb/core/lib/exception/model_exception.py
@@ -14,4 +14,10 @@ class MorphologyConfigError(Exception):
pass
class InvalidMorphologyDirectory(Exception):
+ pass
+
+class InvalidMorphologyFile(Exception):
+ pass
+
+class InvalidMechanismFile(Exception):
pass
\ No newline at end of file
diff --git a/hhnb/core/lib/exception/workflow_exception.py b/hhnb/core/lib/exception/workflow_exception.py
index 2363cd62..0077e39e 100644
--- a/hhnb/core/lib/exception/workflow_exception.py
+++ b/hhnb/core/lib/exception/workflow_exception.py
@@ -19,8 +19,13 @@ class EmptyWorkflow(Exception):
pass
-class MechanismsProcessError(Exception): # CalledProcessError):
+class MechanismsProcessError(CalledProcessError):
pass
-class AnalysisProcessError(Exception): # CalledProcessError):
+
+class AnalysisProcessError(CalledProcessError):
pass
+
+
+class UnknownParametersTemplate(Exception):
+ pass
\ No newline at end of file
diff --git a/hhnb/core/lib/model.py b/hhnb/core/lib/model.py
index 9ca36f5c..353fe8cf 100644
--- a/hhnb/core/lib/model.py
+++ b/hhnb/core/lib/model.py
@@ -1,8 +1,6 @@
-""" This package provide some useful classes and methods to easly handle a model instance. """
+""" This package provide some useful classes and methods to easily handle a model instance. """
-from typing import Type
from .exception.model_exception import *
-from multipledispatch import dispatch
import os
import json
@@ -37,10 +35,7 @@ def __str__(self):
"""
String representation of the Features instance.
"""
- print('', sep='\n')
+ return f''
def set_features(self, features):
"""
@@ -123,7 +118,7 @@ class Morphology:
"""
An easy way to handle the morphology of the model.
"""
-
+
def __init__(self, morphology=None, key=None):
"""
Instantiate a Morphology object.
@@ -145,10 +140,7 @@ def __str__(self):
"""
String representation of the Features instance.
"""
- print('', sep='\n')
+ return f''
def set_morphology(self, morphology, key=None):
"""
@@ -162,7 +154,7 @@ def set_morphology(self, morphology, key=None):
the morphology file path. The morphology must be in ".asc" format.
key : str, optional
the model global key to generate the config file, if set to None
- a temporarely key will be used.
+ a temporarily key will be used.
Raises
------
@@ -172,9 +164,12 @@ def set_morphology(self, morphology, key=None):
if the path is pointing a directory.
"""
if not os.path.exists(morphology):
- raise FileNotFoundError('%s not found' % morphology)
+ raise FileNotFoundError('%s not found.' % morphology)
if not os.path.isfile(morphology):
- raise IsADirectoryError('%s is a directory' % morphology)
+ raise IsADirectoryError('%s is a directory.' % morphology)
+ if not morphology.endswith('.asc'):
+ raise InvalidMorphologyFile('%s is not ".asc" morphology format.' % morphology)
+
self._morphology = morphology
self._MORPHOLOGY = True
if not key:
@@ -245,12 +240,13 @@ def __init__(self, key, **kwargs):
self._key = key
self.set_features(Features())
self.set_morphology(Morphology())
+
if 'features' in kwargs and kwargs['features']:
self.set_features(features=kwargs['features'])
if 'morphology' in kwargs and kwargs['morphology']:
self.set_morphology(kwargs['morphology'])
if 'protocols' in kwargs and kwargs['protocols']:
- self.set_protocols(protocols=kwargs['protocols'])
+ self.set_features(protocols=kwargs['protocols'])
if 'parameters' in kwargs and kwargs['parameters']:
self.set_parameters(kwargs['parameters'])
if 'mechanisms' in kwargs and kwargs['mechanisms']:
@@ -259,7 +255,7 @@ def __init__(self, key, **kwargs):
def set_features(self, *args, **kwargs):
"""
Set the model features and protocols.
- This method accepts a unique Features instance as argoument,
+ This method accepts a unique Features instance as argument,
or can be called by passing a "features" and/or "protocols" kwargs with the relative file
Raises
@@ -276,7 +272,7 @@ def set_features(self, *args, **kwargs):
for k in kwargs.keys():
if k != 'features' and k != 'protocols':
raise TypeError('set_features() got an unexpected\
- keyword argoument %s' % k)
+ keyword argument %s' % k)
if 'features' in kwargs.keys():
self._features.set_features(kwargs['features'])
if 'protocols' in kwargs.keys():
@@ -302,7 +298,7 @@ def set_morphology(self, morphology):
def set_mechanisms(self, mechanisms):
"""
Set the mechanisms files for the model.
- The mechanisms file can be a list of file, a directory cotaining the mechanisms file or a list of files.
+ The mechanisms file can be a list of file, a directory containing the mechanisms file or a list of files.
Parameters
----------
@@ -316,20 +312,27 @@ def set_mechanisms(self, mechanisms):
IsADirectoryError
if it tries to access a file but is a directory.
"""
+ self._mechanisms = []
+
if type(mechanisms) == list:
self._mechanisms = mechanisms
- if type(mechanisms) == str:
+
+ elif type(mechanisms) == str:
if os.path.isdir(mechanisms):
self._mechanisms = [
os.path.join(mechanisms, m) for m in os.listdir(mechanisms)
]
else:
- self._mechanisms = mechanisms
+ self._mechanisms.append(mechanisms)
+
for m in self._mechanisms:
if not os.path.exists(m):
raise FileNotFoundError('%s not found' % m)
if not os.path.isfile(m):
raise IsADirectoryError('%s is a directory' % m)
+ if not m.endswith('.mod'):
+ raise InvalidMechanismFile('%s is not a valid Mechanism file. \
+ Only accept ".mod" extension.' % m)
if not self._mechanisms:
self._MECHANISMS = False
else:
@@ -364,7 +367,7 @@ def get_features(self):
def get_mechanisms(self):
""" Get the mechanism file list. """
- return self._mechansisms
+ return self._mechanisms
def get_parameters(self):
""" Get the parameters file path. """
diff --git a/hhnb/core/model.py b/hhnb/core/model.py
index a44f3c00..64514975 100644
--- a/hhnb/core/model.py
+++ b/hhnb/core/model.py
@@ -5,7 +5,6 @@
"""
-import datetime
from hhnb.core.lib.exception.model_exception import *
from hhnb.core.lib.model import *
@@ -41,7 +40,7 @@ def _write_file_to_directory(src_file, dst_dir, dst_file=None):
The destination file can be also a json file and in this case,
the output will be formatted as json. In the other case the
source file will be treated as a binary file. Furthermore
- the copy can be explicitely named using the "dst_file" parameter
+ the copy can be explicitly named using the "dst_file" parameter
otherwise the source file name will be used.
Parameters
@@ -84,7 +83,7 @@ def __init__(self, model_dir, **kwargs):
"""
Initialize the Model object by reading all the files present
in the "model_dir" folder.
- For keyword argouments read the hhnb.core.model.Model doc.
+ For keyword arguments read the hhnb.core.model.Model doc.
Parameters
----------
@@ -100,7 +99,6 @@ def __init__(self, model_dir, **kwargs):
super().__init__(key, **kwargs)
self._ETRACES = False
-
def _check_if_file_exists(self, **kwargs):
"""
Private method that checks if a file already exists
@@ -114,12 +112,12 @@ def _check_if_file_exists(self, **kwargs):
Raises
------
TypeError
- if the argoument passed is wrong
+ if the argument passed is wrong
"""
if len(kwargs) > 1:
raise TypeError(f'{__name__} takes only 1 keyword argument')
- keyword_list = ['paramters', 'protocols', 'features', 'morphology']
+ keyword_list = ['parameters', 'protocols', 'features', 'morphology']
key = list(kwargs.keys())[0]
if key not in keyword_list:
@@ -138,49 +136,10 @@ def _check_if_file_exists(self, **kwargs):
return False
return c
- # def set_features(self, features=None, protocols=None):
- # f = self._features.get_features()
- # if not self._check_if_file_exists(features=f):
- # shutil.copy(f)
- # super().set_features(features=features, protocols=protocols)
-
- # def set_features(self, features=None, protocols=None):
- # if not features and not protocols:
- # if os
-
- # def set_morphology(self, morphology=None):
- # if not morphology:
- # morphology = os.path.join(
- # self._model_dir,
- # os.listdir(os.path.join(self._model_dir, 'morphology'))[0]
- # )
-
- # super().set_morphology(morphology)
-
- # def set_mechanisms(self, mechansisms=None):
- # if not mechansisms:
- # mechansisms = os.path.join(self._model_dir, 'mechanisms')
- # super().set_mechanisms(mechansisms=mechansisms)
-
- # def set_parameters(self, parameters=None):
- # if not parameters:
- # parameters = os.path.join(self._model_dir, 'config', 'parameters.json')
- # super().set_parameters(parameters=parameters)
-
-
- # @classmethod
- # def from_zip(cls, zip_model):
- # model = cls()
- # # TODO: to be complete
- # tmp_unzip_model = os.path.join(TMP_DIR, datetime.datetime.now())
- # shutil.unpack_archive(zip_model, tmp_unzip_model)
- # pass
-
-
@classmethod
def from_dir(cls, model_dir, key):
"""
- Thie method is used to initialize automatically a Model object
+ This method is used to initialize automatically a Model object
by passing the model root folder as parameter, reads all the
files and the structure of the folder subtree and return a
Model object with the "key" set as the model global key.
@@ -261,9 +220,9 @@ def from_dir(cls, model_dir, key):
def update_optimization_files(self, model_dir):
"""
This method update the current model optimization files using
- the new ones in the "model_dir" folder passed as argoument.
+ the new ones in the "model_dir" folder passed as argument.
- With "optimization files" is intendend any files that belog to
+ With "optimization files" is intended any files that belong to
the following categories: ["parameters", "morphology", "mechanisms"].
Parameters
@@ -276,30 +235,60 @@ def update_optimization_files(self, model_dir):
FileNotFoundError
if any optimization file is not found
"""
+
+ src_config_dir = os.path.join(model_dir, 'config')
+ src_morph_dir = os.path.join(model_dir, 'morphology')
+ src_mechans_dir = os.path.join(model_dir, 'mechanisms')
+
+ dst_config_dir = os.path.join(self._model_dir, 'config')
+ dst_morph_dir = os.path.join(self._model_dir, 'morphology')
+ dst_mechans_dir = os.path.join(self._model_dir, 'mechanisms')
+
try:
# parameters
- parameters = shutil.copy(os.path.join(model_dir, 'config', 'parameters.json'),
- os.path.join(self._model_dir, 'config'))
- self.set_parameters(parameters)
+ if os.path.exists(src_config_dir) and \
+ os.path.exists(os.path.join(src_config_dir, 'parameters.json')):
+ parameters = shutil.copy(os.path.join(src_config_dir, 'parameters.json'),
+ dst_config_dir)
+ self.set_parameters(parameters)
+
# morphology
- for m in os.listdir(os.path.join(model_dir, 'morphology')):
- morph = shutil.copy(os.path.join(model_dir, 'morphology', m),
- os.path.join(self._model_dir, 'morphology'))
- conf_file = shutil.copy(os.path.join(model_dir, 'config', 'morph.json'),
- os.path.join(self._model_dir, 'config'))
- self.set_morphology(morphology=morph)
+ if os.path.exists(src_morph_dir) and len(os.listdir(src_morph_dir)) == 1:
+ for m in os.listdir(dst_morph_dir):
+ os.remove(os.path.join(dst_morph_dir, m))
+ try:
+ os.remove(os.path.join(dst_config_dir, 'morph.json'))
+ except FileNotFoundError:
+ pass
+
+ for m in os.listdir(src_morph_dir):
+ morph = shutil.copy(os.path.join(src_morph_dir, m),
+ os.path.join(dst_morph_dir, m))
+
+ self.set_morphology(morph)
+ with open(os.path.join(dst_config_dir, 'morph.json'), 'w') as morph_conf:
+ json.dump(self.get_morphology().get_config(), morph_conf)
+
+
# mechanisms
- for m in os.listdir(os.path.join(model_dir, 'mechanisms')):
- shutil.copy(os.path.join(model_dir, 'mechanisms', m),
- os.path.join(self._model_dir, 'mechanisms'))
- self.set_mechanisms(os.path.join(self._model_dir, 'mechanisms'))
- # python's files
+ if os.path.exists(src_mechans_dir) and len(os.listdir(src_mechans_dir)) > 0:
+ for m in os.listdir(dst_mechans_dir):
+ os.remove(os.path.join(dst_mechans_dir, m))
+
+ for m in os.listdir(src_mechans_dir):
+ shutil.copy(os.path.join(src_mechans_dir, m),
+ os.path.join(dst_mechans_dir, m))
+
+ self.set_mechanisms(dst_mechans_dir)
+
+ pass
+
except FileNotFoundError as e:
raise FileNotFoundError(e)
def get_optimization_files_raw_status(self):
"""
- Returns a dictionaire with the optimization files as keys and
+ Returns a dictionary with the optimization files as keys and
their status (True if present, False otherwise) as a boolean value.
"""
return {
@@ -310,7 +299,7 @@ def get_optimization_files_raw_status(self):
def get_optimization_files_status(self):
"""
- Returns a dictionaire with the optimization files as keys and
+ Returns a dictionary with the optimization files as keys and
their status as a message.
"""
return {
@@ -338,7 +327,7 @@ class ModelUtil:
@staticmethod
def clone(model):
"""
- Static method that clone a Model object passed as argoument.
+ Static method that clone a Model object passed as argument.
Parameters
----------
@@ -350,7 +339,7 @@ def clone(model):
hhnb.core.model.Model
a new Model object with the same files and properties of the cloned one
"""
- return Model(
+ return ModelBase(
features=model.get_features(),
parameters=model.get_parameters(),
morphology=model.get_morphology(),
@@ -358,6 +347,29 @@ def clone(model):
key=model.get_key()
)
+ @staticmethod
+ def create_model_tree(model_dir):
+ """
+ Create model tree folder by passing the model_dir path.
+
+ Parameters
+ ----------
+ model_dir : str
+ the path of where to create the model tree.
+
+ Raise
+ -----
+ FileExistError
+ if any subfolder already exists.
+ """
+
+ if not os.path.exists(model_dir):
+ os.mkdir(model_dir)
+ os.mkdir(os.path.join(model_dir, 'config'))
+ os.mkdir(os.path.join(model_dir, 'morphology'))
+ os.mkdir(os.path.join(model_dir, 'mechanisms'))
+ os.mkdir(os.path.join(model_dir, 'template'))
+
@staticmethod
def write_to_workflow(model, workflow_id):
"""
@@ -385,8 +397,8 @@ def write_to_workflow(model, workflow_id):
raise FileNotFoundError('%s path not found' % workflow_id)
model_dir = os.path.join(workflow_id, 'model')
if os.path.exists(model_dir):
- os.rmtree(model_dir)
- ModelUtil.create_model_subdir(model_dir)
+ shutil.rmtree(model_dir)
+ ModelUtil.create_model_tree(model_dir)
_write_file_to_directory(model.get_features().get_features(),
os.path.join(model_dir, 'config'), 'features.json')
_write_file_to_directory(model.get_features().get_protocols(),
@@ -463,7 +475,7 @@ def zip_model(model, dst_dir=None, zip_name=None):
def update_key(model, key=None):
"""
This static method update the key of all Model's files with the
- new one passed as argoument and then it will be set as the Model
+ new one passed as argument and then it will be set as the Model
global key. Otherwise the files' keys are updated using the current
Model global key.
@@ -477,7 +489,7 @@ def update_key(model, key=None):
Raises
------
TypeError
- if the model object passed is not an istance of hhnb.core.model.Model
+ if the model object passed is not an instance of hhnb.core.model.Model
shutil.Error
is any error occurred when trying to update the files' key
"""
diff --git a/hhnb/core/response.py b/hhnb/core/response.py
index edceec9b..db2dec5a 100644
--- a/hhnb/core/response.py
+++ b/hhnb/core/response.py
@@ -3,13 +3,8 @@
"""
-from dis import dis
-from genericpath import isfile
-from os import stat
-from typing import Tuple
from django.http.response import JsonResponse, HttpResponse, FileResponse, HttpResponseNotAllowed
from multipledispatch import dispatch
-import json
import os
@@ -26,7 +21,7 @@ def _json_response(status_code, data, safe=False):
This private function takes the status code, some data and
return a JsonResponse object. Optionally the safe flag can
be set True (False by default) to prevent the serialization
- of non "dict" obejcts.
+ of non "dict" objects.
"""
return JsonResponse(data=data, status=status_code, safe=safe,
content_type='application/json')
@@ -162,7 +157,7 @@ def ko_json_response(data):
@dispatch(int, str)
def ko_json_response(code, data):
"""
- Returns an error JsonResponse with a custom erro code and json.
+ Returns a JsonResponse with a custom error code and json.
"""
return _json_response(code, data)
@@ -170,7 +165,7 @@ def ko_json_response(code, data):
@dispatch(int, dict)
def ko_json_response(code, data):
"""
- Returns an error JsonResponse with a custom erro code and json.
+ Returns a JsonResponse with a custom error code and json.
"""
return _json_response(code, data)
diff --git a/hhnb/core/security.py b/hhnb/core/security.py
index 8508c1be..f32e63da 100644
--- a/hhnb/core/security.py
+++ b/hhnb/core/security.py
@@ -70,7 +70,7 @@ def decrypt(cipher_text, at_time=None):
class Sign:
"""
This class is used to sign a file and to verify the integrity of the file
- by verifing its sign.
+ by verifying its sign.
"""
def __init__(self):
@@ -110,7 +110,7 @@ def get_data_sign(cls, data):
@classmethod
def verify_data_sign(cls, sign, data):
"""
- Verify if the sign is valide for the data.
+ Verify if the sign is valid for the data.
Parameters
----------
diff --git a/hhnb/core/user.py b/hhnb/core/user.py
index 23c390b7..a07d3faf 100644
--- a/hhnb/core/user.py
+++ b/hhnb/core/user.py
@@ -48,7 +48,7 @@ def __str__(self):
def _get_user_info(self):
"""
- Returns a json containing all the user informations.
+ Returns a json containing all the user information.
If any error occurred an UserInfoError will be raised.
"""
r = requests.get(url=OIDC_OP_USER_ENDPOINT,
@@ -154,7 +154,6 @@ def validate_credentials(self):
auth=(self._username, self._password),
headers={'cipres-appkey': NSG_KEY},
verify=False)
- print(r.status_code, r.content)
if r.status_code == 200:
return True
return False
diff --git a/hhnb/core/workflow.py b/hhnb/core/workflow.py
index d67d3a72..813d0b26 100644
--- a/hhnb/core/workflow.py
+++ b/hhnb/core/workflow.py
@@ -2,7 +2,7 @@
Workspace utils classes
"""
-from hh_neuron_builder.settings import MEDIA_ROOT, HHF_TEMPLATE_DIR, TMP_DIR, LOG_ROOT_PATH
+from hh_neuron_builder.settings import MEDIA_ROOT, HHF_TEMPLATE_DIR, HHF_PARAMETERS_TEMPLATE_DIR, TMP_DIR, LOG_ROOT_PATH
from hhnb.core.conf.exec_files_conf import ExecFileConf
@@ -13,6 +13,7 @@
from pyunicore.client import PathFile as UnicorePathFile
from datetime import datetime
from sys import prefix as env_prefix
+from pathlib import Path
import shutil
import os
@@ -51,6 +52,7 @@ def __init__(self, user_sub, workflow_id):
self._optimization_settings = os.path.join(self._workflow_path,
'optimization_settings.json')
+
if os.path.exists(self._model_dir) and any(os.scandir(self._model_dir)):
self._model = Model.from_dir(self._model_dir, key=workflow_id)
@@ -160,7 +162,7 @@ def generate_user_workflow_from_zip(cls, user_sub, zip_file):
"""
Create a new workflow by generating a workflow id from
the current timestamp and unpacking all files from the
- zip passed as argoument.
+ zip passed as argument.
Parameters
----------
@@ -203,7 +205,7 @@ def generate_user_workflow_from_path(cls, user_sub, path_to_clone):
@classmethod
def get_user_workflow_by_id(cls, user_sub, workflow_id):
"""
- Get a worfklow object by specifing the user sub id and
+ Get a workflow object by specifying the user sub id and
the workflow id.
This classmethod method is useful when the folders are
already written on the disk but it is needed to reinitialize
@@ -309,7 +311,7 @@ def make_workflow_dirs(self):
for sub_path in [os.path.join(self._model_dir, f) for f in sub_dir]:
if not os.path.exists(sub_path):
os.mkdir(sub_path)
-
+
os.mkdir(self._results_dir)
os.mkdir(self._analysis_dir)
os.mkdir(self._tmp_dir)
@@ -401,41 +403,90 @@ def load_model_zip(self, model_zip):
if os.path.exists(unzipped_tmp_model_dir):
shutil.rmtree(unzipped_tmp_model_dir)
os.mkdir(unzipped_tmp_model_dir)
- shutil.unpack_archive(model_zip, unzipped_tmp_model_dir, 'zip')
- if len(os.listdir(unzipped_tmp_model_dir)) == 1:
- unzipped_tmp_model_dir = os.path.join(unzipped_tmp_model_dir,
- os.listdir(unzipped_tmp_model_dir)[0])
+ shutil.unpack_archive(model_zip, unzipped_tmp_model_dir)
+
+ for f in os.listdir(unzipped_tmp_model_dir):
+ if 'output' in f:
+ shutil.unpack_archive(os.path.join(unzipped_tmp_model_dir, f),
+ unzipped_tmp_model_dir)
+
+ for model_folder in os.listdir(unzipped_tmp_model_dir):
+ if os.path.isdir(os.path.join(unzipped_tmp_model_dir, model_folder)):
+ print(os.path.join(unzipped_tmp_model_dir, model_folder))
+ else:
+ os.remove(os.path.join(unzipped_tmp_model_dir, model_folder))
+
+ unzipped_tmp_model_dir = os.path.join(unzipped_tmp_model_dir,
+ os.listdir(unzipped_tmp_model_dir)[0])
+
+ shutil.rmtree(self._model_dir)
+ if not os.path.exists(self._model_dir):
+ os.mkdir(self._model_dir)
+ shutil.copytree(unzipped_tmp_model_dir, self._model_dir, dirs_exist_ok=True)
+
+ # The uploaded origin model case
self._model.update_optimization_files(unzipped_tmp_model_dir)
ModelUtil.update_key(model=self._model)
+
+ def _set_optimization_settings(self, optimization_settings):
+ """
+ Create the optimization settings file for the current workflow.
+
+ Parameters
+ ----------
+ optimization_settings : dict
+ the optimization settings
+ """
+ with open(self._optimization_settings, 'w') as fd:
+ json.dump(optimization_settings, fd, indent=4)
def get_optimization_settings(self):
"""
- Returns the workflow optimization settings as json if presents,
- otherwise a FileNotFoundError will be raised.
+ Returns the optimization settings of the current workflow,
+ otherwise an empty dict will be returned.
"""
try:
with open(self._optimization_settings, 'r') as fd:
try:
- return json.load(fd)
+ return json.load(fd)
except JSONDecodeError:
return {}
except FileNotFoundError:
- raise FileNotFoundError("%s not found" % self._optimization_settings)
+ return {}
- def set_optimization_settings(self, optimization_settings, job_submitted_flag=False):
+ def add_optimization_settings(self, update_json):
"""
- Write/Overwrite the workflow optimization settings in the current workflow.
+ Update the optimization settings of the current workflow
+ with the new settings provided in the "update_json" dict.
Parameters
----------
- optimization_settings : dict
- the workflow optimization settings
- job_submitted_flag : bool, optional
- this flag tells if the job was already submitted or not, by default False
+ update_json : dict
+ the update dict
"""
- optimization_settings.update({'job_submitted': job_submitted_flag})
- with open(self._optimization_settings, 'w') as fd:
- json.dump(optimization_settings, fd, indent=4)
+ settings = self.get_optimization_settings()
+ settings.update(update_json)
+ self._set_optimization_settings(settings)
+
+ def get_resume_settings(self):
+ """
+ Returns the resume settings for the current workflow,
+ otherwise an empty dict will be returned.
+
+ Returns
+ -------
+ dict
+ the resume settings, an empty dict.
+ """
+ try:
+ with open(os.path.join(self._model_dir, 'resume_job_settings.json'),
+ 'r') as fd:
+ return json.load(fd)
+ except FileNotFoundError:
+ pass
+ except JSONDecodeError:
+ pass
+ return {}
def remove_file(self, file_path):
"""
@@ -447,42 +498,45 @@ def remove_file(self, file_path):
----------
file_path : str
the file to delete, can be also "some_path/*" to
- delete all files insede the "some_path" folder
+ delete all files inside the "some_path" folder
Raises
------
PermissionError
- Raised if the file path passed as argoument points to a
+ Raised if the file path passed as argument points to a
file that not belong to the application
FileNotFoundError
Raised if the file is not found
"""
directory, filename = os.path.split(file_path)
-
target_dir = os.path.abspath(os.path.join(self._model_dir, directory))
if os.path.commonpath([os.path.abspath(self._workflow_path), target_dir]) != \
os.path.abspath(self._workflow_path):
raise PermissionError('Can\'t delete files on %s' % target_dir)
- if not os.path.exists(target_dir):
- raise FileNotFoundError('%s directory not exists' % directory)
-
if filename == '*':
shutil.rmtree(target_dir)
os.mkdir(target_dir)
+
+ elif not filename:
+ if os.path.isdir(target_dir):
+ shutil.rmtree(target_dir)
+ elif directory.endswith("*"):
+ d = directory[:-1] # removed "*" chars
+ for dd in os.listdir(self._model_dir):
+ if dd.startswith(d):
+ shutil.rmtree(os.path.join(self._model_dir, dd))
else:
full_file_path = os.path.join(self._model_dir, file_path)
if os.path.exists(full_file_path):
os.remove(full_file_path)
+ if not os.path.exists(target_dir):
+ raise FileNotFoundError('%s directory not exists' % directory)
+
def get_properties(self):
""" Returns the workflow properties. """
- try:
- job_submitted = self.get_optimization_settings()['job_submitted']
- except FileNotFoundError:
- job_submitted = False
-
analysis_flag = False
if len(os.listdir(self._analysis_dir)) == 1:
analysis_model_dir = os.path.join(self._analysis_dir,
@@ -491,14 +545,30 @@ def get_properties(self):
['mechanisms', 'morphology', 'checkpoints']:
analysis_flag = True
+ optset_flag = (False, 'Optimization parameters NOT set')
+ if os.path.exists(self._optimization_settings):
+ with open(self._optimization_settings, 'r') as fd:
+ jj = json.load(fd)
+ if jj.get('hpc') == 'SA':
+ if jj.get('sa-hpc') and jj.get('sa-project'):
+ optset_flag = (True, '')
+ elif jj.get('hpc') == 'DAINT-CSCS':
+ if jj.get('project'):
+ optset_flag = (True, '')
+ elif jj.get('hpc') == 'NSG':
+ if not jj.get('username_submit') or not jj.get('password_submit'):
+ optset_flag = (False, 'NSG credentials required')
+ else:
+ optset_flag = (True, '')
+
props = {
'id': self._id,
'model': self._model.get_properties(),
- 'optimization_settings': os.path.exists(self._optimization_settings),
+ 'optimization_settings': optset_flag,
'etraces': any(os.scandir(self._etraces_dir)),
- 'job_submitted': job_submitted,
+ 'job_submitted': self.get_optimization_settings().get('job_submitted', False),
'results': any(os.scandir(self._results_dir)),
- # 'analysis': any(os.scandir(self._analysis_dir)),
+ 'resume': os.path.exists(os.path.join(self._model_dir, 'checkpoints', 'checkpoint.pkl')),
'analysis': analysis_flag
}
return props
@@ -533,10 +603,11 @@ def set_model_key(workflow, key=None):
workflow : hhnb.core.workflow.Workflow
the workflow
key : str, optional
- the new global model key, by default the current model key is choosen
+ the new global model key, by default the current model key is chosen
"""
ModelUtil.update_key(workflow.get_model(), key)
+ # deprecated method
@staticmethod
def set_default_parameters(workflow):
"""
@@ -576,7 +647,7 @@ def clone_workflow(workflow):
@staticmethod
def make_archive(workflow, zip_name, dir_name, file_list):
"""
- Create a zip archive of the listed file containend in the workflow.
+ Create a zip archive of the listed file contained in the workflow.
Parameters
----------
@@ -643,7 +714,7 @@ def make_workflow_archive(workflow):
@staticmethod
def make_features_archive(workflow):
"""
- Create a zip archive contaning the features file of the workflow.
+ Create a zip archive containing the features file of the workflow.
Parameters
----------
@@ -761,7 +832,7 @@ def make_optimization_model(workflow):
dst=os.path.join(workflow.get_tmp_dir(), 'opt_model',
workflow.get_model().get_key()))
- # craeting directories and script
+ # creating directories and script
try:
os.mkdir(os.path.join(tmp_model_dir, 'checkpoints'))
os.mkdir(os.path.join(tmp_model_dir, 'figures'))
@@ -769,22 +840,28 @@ def make_optimization_model(workflow):
pass
settings = workflow.get_optimization_settings()
- if settings['hpc'] == 'NSG' or settings['hpc'] == 'SA-NSG':
+ if settings['hpc'] == 'NSG' or \
+ (settings['hpc'] == 'SA' and settings['sa-hpc'] == 'nsg'):
ExecFileConf.write_nsg_exec(dst_dir=tmp_model_dir,
max_gen=settings['gen-max'],
- offspring=settings['offspring'])
- elif settings['hpc'] == 'DAINT-CSCS' or settings['hpc'] == 'SA-CSCS':
+ offspring=settings['offspring'],
+ mode=settings['mode'],
+ job_name=settings['job_name'])
+ elif settings['hpc'] == 'DAINT-CSCS' or \
+ (settings['hpc'] == 'SA' and settings['sa-hpc'] == 'pizdaint'):
ExecFileConf.write_daint_exec(dst_dir=tmp_model_dir,
folder_name=workflow.get_model().get_key(),
offspring=settings['offspring'],
- max_gen=settings['gen-max'])
+ max_gen=settings['gen-max'],
+ mode=settings['mode'],
+ job_name=settings['job_name'])
return tmp_model_dir
@staticmethod
def download_from_hhf(workflow, hhf_dict):
"""
- Download all files containend in the "hhf_dict" dictionaire in
+ Download all files contained in the "hhf_dict" dictionary in
the workflow that comes from the HippocampusHub.
Parameters
@@ -792,7 +869,7 @@ def download_from_hhf(workflow, hhf_dict):
workflow : hhnb.core.workflow.Workflow
the workflow in which to download the files
hhf_dict : dict
- the dictionaire containing all files that
+ the dictionary containing all files that
comes from the HippocampusHub
"""
WorkflowUtil.set_default_parameters(workflow)
@@ -812,7 +889,7 @@ def download_from_hhf(workflow, hhf_dict):
for chunk in r.iter_content(chunk_size=4096):
fd.write(chunk)
workflow.get_model().set_morphology(morphology=file_path)
-
+
for etrace in etraces:
file_path = os.path.join(workflow.get_etraces_dir(), etrace['name'])
r = requests.get(url=etrace['url'], verify=False)
@@ -836,8 +913,8 @@ def download_from_hhf(workflow, hhf_dict):
@staticmethod
def list_model_files(workflow):
"""
- Returns a dictionaire containing all the model's files
- within the workflow. The keys of the dictionaire
+ Returns a dictionary containing all the model's files
+ within the workflow. The keys of the dictionary
represent the type of the model's files and can be one of
the following values "config", "morphology", "model" and
"root" that contains the root model folder path.
@@ -848,7 +925,7 @@ def list_model_files(workflow):
Returns
-------
dict
- a dictionaire containing the type of files as the key
+ a dictionary containing the type of files as the key
and the path of the files as value.
"""
model_files = {}
@@ -899,7 +976,7 @@ def download_job_result_files(workflow, data):
workflow : hhnb.core.workflow.Workflow
the workflow
data : dict
- the data object containing all the informations about
+ the data object containing all the information about
the job result files to download from the HPC system
Raises
@@ -957,36 +1034,41 @@ def run_analysis(workflow, job_output):
Parameters
----------
workflow : hhnb.core.workflow.Workflow
- the workflow that will contain the analysis
+ the workflow that will contain the analysis.
job_output : str
- the path where the job to analyse is placed
+ the path where the job to analyse is placed.
Raises
------
FileNotFoundError
- if any required files is not found
+ if any required files is not found.
AnalysisProcessError
if the "opt_neuron.py" script ends with an error
or if the script can't be run due to a required
- file that is not found
+ file that is not found.
MechanismsProcessError
- if the "nrnivmodl" program ends with an error
+ if the "nrnivmodl" program ends with an error.
"""
analysis_dir = workflow.get_analysis_dir()
shutil.unpack_archive(job_output, analysis_dir)
+
+ output_dir = None
for f in [os.path.join(analysis_dir, f) for f in os.listdir(analysis_dir)]:
if os.path.isdir(f):
output_dir = f
else:
os.remove(f)
+ if not output_dir:
+ raise FileNotFoundError('Output folder')
+
analysis_file = os.path.join(output_dir, 'model', 'analysis.py')
if not os.path.exists(analysis_file):
- raise FileNotFoundError('"analysis.py" not found')
+ raise FileNotFoundError('analysis.py')
evaluator_file = os.path.join(output_dir, 'model', 'evaluator.py')
if not os.path.exists(evaluator_file):
- raise FileNotFoundError('"evaluator.py" not found')
+ raise FileNotFoundError('evaluator.py')
figures_dir = os.path.join(output_dir, 'figures')
if os.path.exists(figures_dir):
@@ -1002,7 +1084,7 @@ def run_analysis(workflow, job_output):
os.rename(os.path.join(checkpoint_dir, f),
os.path.join(checkpoint_dir, 'checkpoint.pkl'))
else:
- raise AnalysisProcessError('Checkpoints folder not found! Maybe the optimization process failed.')
+ raise FileNotFoundError('checkpoint.pkl')
opt_neuron_file = os.path.join(output_dir, 'opt_neuron.py')
with open(opt_neuron_file, 'r') as fd:
@@ -1020,33 +1102,27 @@ def run_analysis(workflow, job_output):
compiled_mods_dir = os.path.join(output_dir, 'x86_64')
if os.path.exists(compiled_mods_dir):
shutil.rmtree(compiled_mods_dir)
+ curr_dir = os.getcwd()
- curr_dir = os.getcwd()
-
log_file_path = os.path.join(LOG_ROOT_PATH, 'analysis', workflow.get_user())
if not os.path.exists(log_file_path):
os.makedirs(log_file_path)
- log_file = os.path.join(log_file_path, workflow.get_id() + '.log')
+ log_file = os.path.join(log_file_path, workflow.get_id() + '.log')
+ Path(log_file).touch()
+
+ os.chdir(output_dir)
build_mechanisms_command = f'source {env_prefix}/bin/activate; nrnivmodl mechanisms > {log_file}'
- opt_neuron_analysis_command = f'source {env_prefix}/bin/activate; python ./opt_neuron.py --analyse --checkpoint ./checkpoints > {log_file}'
-
- os.chdir(output_dir)
- p0 = subprocess.call(build_mechanisms_command, shell=True, executable='/bin/bash')
- p1 = subprocess.call(opt_neuron_analysis_command, shell=True, executable='/bin/bash')
+ opt_neuron_analysis_command = f'source {env_prefix}/bin/activate; python ./opt_neuron.py --analyse --checkpoint ./checkpoints > {log_file}'
+ p0 = subprocess.run(build_mechanisms_command, shell=True, executable='/bin/bash', capture_output=True, text=True)
+ p1 = subprocess.run(opt_neuron_analysis_command, shell=True, executable='/bin/bash', capture_output=True, text=True)
+
os.chdir(curr_dir)
- if p0 > 0:
- raise MechanismsProcessError()#p0.returncode, build_mechanisms_command, stderr=p0.stderr)
- if p1 > 0:
- error = 'Can\'t identify the error.'
- for f in os.listdir(os.path.join(output_dir, 'checkpoints')):
- if not f.endswith('.pkl'):
- error = 'Checkpoint not found! Maybe the optimization process failed.'
- break
- raise AnalysisProcessError(error)#p1.returncode, opt_neuron_analysis_command, stderr=p1.stderr)
-
-
+ if p0.returncode > 0:
+ raise MechanismsProcessError(p0.returncode, build_mechanisms_command, stderr=p0.stderr)
+ if p1.returncode > 0:
+ raise AnalysisProcessError(p1.returncode, opt_neuron_analysis_command, stderr=p1.stderr)
@staticmethod
def make_naas_archive(workflow):
@@ -1078,10 +1154,13 @@ def make_naas_archive(workflow):
os.remove(f_path)
# rename .hoc file
+ hoc_file = None
checkpoints_dir = os.path.join(tmp_analysis_dir, 'checkpoints')
for f in os.listdir(checkpoints_dir):
if f.endswith('.hoc'):
hoc_file = f
+ if not hoc_file:
+ raise FileNotFoundError('".hoc" file not found')
os.rename(
src=os.path.join(checkpoints_dir, hoc_file),
dst=os.path.join(checkpoints_dir, 'cell.hoc')
@@ -1097,3 +1176,51 @@ def make_naas_archive(workflow):
# moving naas archive to tmp dir
return shutil.move(naas_archive, workflow.get_tmp_dir())
+
+ @staticmethod
+ def load_parameters_template(workflow, template_type):
+ """
+ Load the chosen parameters template in the current workflow.
+
+ Parameters
+ ----------
+ workflow : hhnb.core.workflow.Workflow
+ the workflow in which load the parameters template.
+ template_type : str
+ the parameters template type. Can be "pyramidal" or "interneuron".
+
+ Raises
+ ------
+ UnknownParametersTemplate
+ if the "template_type" is of an unknown type.
+ """
+ if template_type not in ['pyramidal', 'interneuron']:
+ raise UnknownParametersTemplate
+
+ shutil.copy(
+ os.path.join(HHF_PARAMETERS_TEMPLATE_DIR, template_type, 'parameters.json'),
+ os.path.join(workflow.get_model_dir(), 'config')
+ )
+
+ @staticmethod
+ def clean_model(workflow):
+ """
+ Clean the model folder in the workflow from all unnecessary folders and files.
+
+ Parameters
+ ----------
+ workflow : hhnb.core.workflow.Workflow
+ the workflow of the model to clean.
+ """
+ try:
+ shutil.rmtree(os.path.join(workflow.get_model_dir(), 'checkpoints'))
+ shutil.rmtree(os.path.join(workflow.get_model_dir(), 'tools'))
+ shutil.rmtree(os.path.join(workflow.get_model_dir(), 'figures'))
+ for dd in os.listdir(workflow.get_model_dir()):
+ if dd.startswith('r_seed'):
+ shutil.rmtree(os.path.join(workflow.get_model_dir(), dd))
+ shutil.rmtree(os.path.join(workflow.get_model_dir(), 'mod_nsgportal'))
+ shutil.rmtree(os.path.join(workflow.get_model_dir(), 'x86_64'))
+ except FileNotFoundError:
+ pass
+
diff --git a/hhnb/urls.py b/hhnb/urls.py
index 858bceff..bcd8a3e7 100644
--- a/hhnb/urls.py
+++ b/hhnb/urls.py
@@ -33,11 +33,8 @@
register_converter(converters.JobIdConverter, 'jobid')
register_converter(converters.FolderConverter, 'folder')
register_converter(converters.ConfigFileConverter, 'config_file')
-
-
register_converter(converters.FileTypeConverter, 'file_type')
-
urlpatterns = [
# session refresh
path('session-refresh/', views.session_refresh),
@@ -102,4 +99,9 @@
path('hhf-get-model-key/', views.hhf_get_model_key),
path('hhf-apply-model-key/', views.hhf_apply_model_key),
path('hhf-save-config-file///', views.hhf_save_config_file),
+
+ path('hhf-load-parameters-template/', views.hhf_load_parameters_template),
+
+ # get service-account content
+ path('get-service-account-content', views.get_service_account_content),
]
diff --git a/hhnb/utils/converters.py b/hhnb/utils/converters.py
index ef4d264b..d2af438f 100755
--- a/hhnb/utils/converters.py
+++ b/hhnb/utils/converters.py
@@ -13,7 +13,7 @@ def to_url(self, value):
class CtxConverter:
"""
- Unsed due to uuid built-in converter
+ Unused due to uuid built-in converter
"""
regex = '[0-9a-zA-Z]{8}-[0-9a-zA-Z]{4}-[0-9a-zA-Z]{4}-[0-9a-zA-Z]{4}-[0-9a-zA-Z]{12}'
@@ -144,4 +144,13 @@ def to_python(self, value):
def to_url(self, value):
return value
-
\ No newline at end of file
+
+
+class JobModeConverters:
+ regex = 'start|resume|'
+
+ def to_python(self, value):
+ return value
+
+ def to_url(self, value):
+ return value
\ No newline at end of file
diff --git a/hhnb/utils/messages.py b/hhnb/utils/messages.py
index d62a7d52..d40b63a2 100644
--- a/hhnb/utils/messages.py
+++ b/hhnb/utils/messages.py
@@ -1,21 +1,23 @@
# General error messages
-CRITICAL_ERROR = 'Critical Error !
Please contact the EBRAINS support if the problem persists.'
-MODEL_CATALOG_CREDENTIALS_NOT_FOUND = 'Error !
Invalid Model Catalog credentials. Set them in your configuraztion file under "/hh_neuron_builder/conf".'
+CRITICAL_ERROR = 'Critical Error !
Please contact the EBRAINS support if the problem persists.'
+MODEL_CATALOG_CREDENTIALS_NOT_FOUND = 'Error !
Invalid Model Catalog credentials. Set them in your configuration file under "/hh_neuron_builder/conf".'
GENERAL_ERROR = 'Error !'
+UNABLE_TO_FETCH_FILES = 'Something went wrong !
Unable to fetch files you choose in the HippocampusHub from their source. Please try again later and if the problem persists over the day please contact the EBRAINS support.'
+
# Model Catalog error messages
-MODEL_CATALOG_INVALID_CREDENTIALS = 'Error !
Invalid Model Catalog credentials.
Please, contanct the EBRAINS support.'
+MODEL_CATALOG_INVALID_CREDENTIALS = 'Error !
Invalid Model Catalog credentials.
Please, contact the EBRAINS support.'
MODEL_CATALOG_NOT_AVAILABLE = 'The Model Catalog is temporarily not available. Please, try again later.'
-# Files error messsages
+# Files error messages
NO_FILE_UPLOADED = 'No file was uploaded.'
NO_MORE_THEN = 'You can upload only {}.'
-WRONG_UPLAODED_FILE = 'The uploaded file/s is wrong.'
+WRONG_UPLOADED_FILE = 'The uploaded file/s is/are wrong.'
ONLY_ACCEPTED_FILE = 'You can upload only a {} file.'
INVALID_FILE = 'Invalid file !
Upload a correct {} archive.'
INVALID_SIGNATURE = 'Invalid signature !
Uploaded {} archive is corrupted or was modified.'
-MARLFORMED_FILE = 'Error !
Malformed {} file.'
+MALFORMED_FILE = 'Error !
Malformed "{}" file.'
NO_FILE_TO_DOWNLOAD = 'No file selected to download.'
NO_FILE_TO_DELETE = 'No file to delete.'
FILE_NOT_FOUND_ERROR = 'Error !
File not found.'
@@ -30,27 +32,28 @@
# HPC error messages
NO_HPC_SELECTED = 'No HPC was selected.'
NO_JOB_SELECTED = 'No job was selected.'
-JOB_FETCH_ERROR = 'Error !
Some error occurred while fetching jobs on {}. Please contact the EBRAINS support if the problem persists.'
-JOB_RESULTS_FETCH_ERRROR = 'Error !
Error while fetching job results.'
+JOB_FETCH_ERROR = 'Error !
Some error occurred while fetching jobs on {}. Please contact the EBRAINS support if the problem persists.'
+JOB_RESULTS_FETCH_ERROR = 'Error !
Error while fetching job results.'
JOB_SUBMITTED = 'Job submitted on {}.'
JOB_EXPIRED = 'Job {} has expired and no file is present'
-HPC_NOT_AVAILABLE = '{} system not available at this moment. Please, try again later...
If the problem persists contact the EBRAINS support.'
+HPC_NOT_AVAILABLE = 'HPC not available
{} system not available at this moment. Please, try again later...
If the problem persists contact the EBRAINS support.'
# Analysis error messages
-ANALYSIS_ERROR = 'Error !
Something went wrong while the analysis process was running. Please contact the EBRAINS support if the problem persists.'
-MECHANISMS_PROCESS_ERROR = 'Error !
Something went wrong while mechanisms file were building. Please, take a look on the files and if the problem persists contact the EBRAINS support.'
-ANALYSIS_PROCESS_ERROR = 'Error !
Analysis process was stopped due to the following error:
{}
If the problem persists contact the EBRAINS support.'
+ANALYSIS_ERROR = 'Analysis Error !
Something went wrong while the analysis process was running. Please contact the EBRAINS support if the problem persists.'
+MECHANISMS_PROCESS_ERROR = 'Analysis Error !
The mechanisms building process was stopped due to the following error
{}
If the problem persists contact the EBRAINS support.'
+ANALYSIS_PROCESS_ERROR = 'Analysis Error !
the analysis process was stopped due to the following error:
{}
If the problem persists contact the EBRAINS support.'
+ANALYSIS_FILE_NOT_FOUND_ERROR = 'Analysis Error !
The analysis process could\'t start due to the following missing file:
{}
Download the optimization, check the stderr file, fix the optimization error and run the optimization again to go ahead.
If the problem persists contact the EBRAINS support.'
BLUE_NAAS_NOT_AVAILABLE = 'The BlueNaas is temporarily not available. Please, try again later.'
-MODEL_SUCCESSFULLY_REGISTERED = 'Congratulation, the model was successfully registered in the Model Catalog !
Once the page is opened in a new tab, if a welcome message is displayed instead of the model instance, please click on the "Authorize" button if requested.
Leave the current tab open in case you need to recollect the model url.'
+MODEL_SUCCESSFULLY_REGISTERED = 'Congratulation, the model was successfully registered in the Model Catalog !
Once the page is opened in a new tab, if a welcome message is displayed instead of the model instance, please click on the "Authorize" button if requested.
Leave the current tab open in case you need to recollect the model url.'
MODEL_ALREADY_EXISTS = 'Model already exists on the Model Catalog.'
SIGNATURE_README_DESCRIPTION = \
"""
The "signature" file provides the ".zip" file sign generated from the Hodgikin-Huxley NeuronBuilder.
-This signature is unique and is generated using the ".zip" file readed bitwise.
+This signature is unique and is generated using the ".zip" file read bitwise.
If you want to upload a previously downloaded ".zip" file, you must provide the original "signature.txt" otherwise the file will be rejected.
"""
diff --git a/hhnb/utils/misc.py b/hhnb/utils/misc.py
index 78de1e7c..cf402545 100644
--- a/hhnb/utils/misc.py
+++ b/hhnb/utils/misc.py
@@ -6,7 +6,7 @@
import os
import shutil
import time
-
+import json
class InvalidArchiveError(Exception):
@@ -31,14 +31,14 @@ def validate_archive(archive):
"""
Validate a zip file previously download from the Hodgkin-Huxley
Neuron Builder. This function ensures that the file is not
- corrupted or modified with a malevolus intenction by verifing
+ corrupted or modified with a malicious intention by verifying
its sign.
- The validation process is done by recalcuting the hash of the
- original zip archive. If it matchs with the provided one within
+ The validation process is done by recalculating the hash of the
+ original zip archive. If it matches with the provided one within
the archive, then the archive will be accepted and its path
will be returned from the function, otherwise an
- InvlidSign error will be generated.
+ InvalidSign error will be generated.
Parameters
----------
@@ -66,7 +66,7 @@ def validate_archive(archive):
if f.endswith('.zip'):
archive_name = f
- # check if the archive zip is malfomed
+ # check if the archive zip is malformed
if not archive_name:
raise InvalidArchiveError(f'{archive} not valid.')
@@ -82,11 +82,9 @@ def validate_archive(archive):
# read zip data and validate the sign
with open(archive_path, 'rb') as fd:
Sign.verify_data_sign(signature, fd.read())
-
return archive_path
-
def get_signed_archive(arch_file):
"""
Returns a new archive that include the archive itself and its sign.
@@ -131,3 +129,28 @@ def get_signed_archive(arch_file):
shutil.rmtree(tmp_dir)
return signed_archive
+
+def validate_json_file(file):
+ # check the uploaded file if it is a json file
+ full_path = os.path.abspath(file)
+ if file.endswith('.json'):
+ fd = open(full_path, 'r')
+ jj = json.load(fd)
+ fd.close()
+
+ if full_path.endswith('parameters.json'):
+ # check for wf_id key as main key
+ if len(jj.keys()) == 1:
+ main_key = list(jj.keys())[0]
+ jj = jj[main_key]
+ # check for distribution and add an empty field if not exists
+ if not 'distributions' in jj.keys():
+ jj.update({'distributions': {}})
+
+ os.remove(full_path)
+ fd = open(full_path, 'w')
+ json.dump({main_key: jj} if main_key else jj, fd)
+ fd.close()
+
+ # return True if everything is ok, otherwise some exception will be raised
+ return True
\ No newline at end of file
diff --git a/hhnb/views.py b/hhnb/views.py
index 6d1c54ad..ea7d1e8b 100644
--- a/hhnb/views.py
+++ b/hhnb/views.py
@@ -6,6 +6,7 @@
from hh_neuron_builder.settings import MODEL_CATALOG_COLLAB_DIR, MODEL_CATALOG_COLLAB_URL, MODEL_CATALOG_CREDENTIALS, MODEL_CATALOG_FILTER, TMP_DIR
from hhnb.core.lib.exception.workflow_exception import AnalysisProcessError, MechanismsProcessError, WorkflowExists
+from hhnb.core.lib.exception.model_exception import *
from hhnb.core.response import ResponseUtil
from hhnb.core.workflow import Workflow, WorkflowUtil
from hhnb.core.user import *
@@ -19,7 +20,6 @@
from ebrains_drive.exceptions import ClientHttpError as EbrainsDriveClientError
import ebrains_drive
-from subprocess import CalledProcessError
import requests
import datetime
import os
@@ -27,13 +27,14 @@
import shutil
import logging
-from hhnb.utils.misc import InvalidArchiveError, get_signed_archive, validate_archive
+from hhnb.utils.misc import InvalidArchiveError, get_signed_archive, validate_archive, validate_json_file
logger = logging.getLogger(__name__)
-
LOG_ACTION = 'User: "{}"\t Action: {}'
+
+
def status(request):
""" Returns the status of the service if is up. """
return ResponseUtil.ok_json_response({'hh-neuron-builder-status': 1})
@@ -82,7 +83,7 @@ def index_docs(request):
def home_page(request):
"""
- By default the home page is redered, but if the "old_workflow_path"
+ By default the home page is rendered, but if the "old_workflow_path"
is found in the session stored keys, the old workflow is restored
and the workflow page is rendered.
"""
@@ -182,7 +183,7 @@ def upload_workflow(request):
fd.write(wf)
try:
- valide_wf_zip = validate_archive(wf_zip)
+ valid_wf_zip = validate_archive(wf_zip)
except InvalidArchiveError:
return ResponseUtil.ko_json_response({'response': 'KO',
'message': messages.INVALID_FILE.format(filename)})
@@ -196,7 +197,7 @@ def upload_workflow(request):
try:
workflow = Workflow.generate_user_workflow_from_zip(hhnb_user.get_sub(),
- valide_wf_zip)
+ valid_wf_zip)
except WorkflowExists as e:
logger.error(e)
return ResponseUtil.ko_json_response({'response': 'KO', 'message': str(e)})
@@ -214,14 +215,14 @@ def clone_workflow(request, exc):
return ResponseUtil.no_exc_code_response()
old_workflow, hhnb_user = get_workflow_and_user(request, exc)
- new_worfklow = WorkflowUtil.clone_workflow(old_workflow)
+ new_workflow = WorkflowUtil.clone_workflow(old_workflow)
new_exc = generate_exc_code(request)
logger.info(LOG_ACTION.format(
- hhnb_user, 'cloning old workflow %s to %s' % (old_workflow, new_worfklow))
+ hhnb_user, 'cloning old workflow %s to %s' % (old_workflow, new_workflow))
)
- request.session[new_exc]['workflow_id'] = new_worfklow.get_id()
+ request.session[new_exc]['workflow_id'] = new_workflow.get_id()
request.session.save()
return ResponseUtil.ok_json_response({'exc': new_exc})
@@ -255,7 +256,11 @@ def get_workflow_properties(request, exc):
workflow, _ = get_workflow_and_user(request, exc)
if request.session[exc].get('hhf_dict') and \
not request.session[exc]['hhf_already_downloaded']:
- WorkflowUtil.download_from_hhf(workflow, request.session[exc]['hhf_dict'])
+ try:
+ WorkflowUtil.download_from_hhf(workflow, request.session[exc]['hhf_dict'])
+ except requests.exceptions.ConnectionError as e:
+ logger.error(e)
+ return ResponseUtil.ko_response(messages.UNABLE_TO_FETCH_FILES)
request.session[exc]['hhf_already_downloaded'] = True
request.session.save()
@@ -375,7 +380,7 @@ def upload_features(request, exc):
for uploaded_file in uploaded_files:
if uploaded_file.name != 'features.json' and uploaded_file.name != 'protocols.json':
- return ResponseUtil.ko_response(messages.WRONG_UPLAODED_FILE)
+ return ResponseUtil.ko_response(messages.WRONG_UPLOADED_FILE)
if uploaded_file.name == 'features.json':
workflow.write_features(uploaded_file)
elif uploaded_file.name == 'protocols.json':
@@ -423,7 +428,7 @@ def upload_model(request, exc):
workflow.load_model_zip(zip_path)
except FileNotFoundError as e:
logger.error(e)
- return ResponseUtil.ko_response(messages.MARLFORMED_FILE.format('"model.zip"'))
+ return ResponseUtil.ko_response(messages.MALFORMED_FILE.format('model.zip'))
return ResponseUtil.ok_response()
@@ -497,7 +502,7 @@ def upload_analysis(request, exc):
print(e)
logger.error(e)
- return ResponseUtil.ko_response(messages.MARLFORMED_FILE.format(f'"{uploaded_file.name}"'))
+ return ResponseUtil.ko_response(messages.MALFORMED_FILE.format(f'"{uploaded_file.name}"'))
def upload_files(request, exc):
@@ -519,19 +524,7 @@ def upload_files(request, exc):
folder = request.POST.get('folder')
uploaded_file = request.FILES.get('file')
- if folder == 'morphology/':
- if not uploaded_file.endswith('.asc'):
- return ResponseUtil.ko_response('Morphology must be ".asc" file.')
- elif folder == 'mechanisms/':
- if not uploaded_file.endswith('.mod'):
- return ResponseUtil.ko_response('Mechanisms must be ".mod" file.')
- elif folder == 'config/':
- if not uploaded_file.name in ['features.json', 'protocols.json', 'parameters.json']:
- return ResponseUtil.ko_response(
- 'Config file must be one of the fallowing files: \
- "protocols.json", "features.json", "parameters.json"'
- )
-
+ # store the uploaded file
full_path = os.path.join(workflow.get_model_dir(), folder, uploaded_file.name)
with open(full_path, 'wb') as fd:
if uploaded_file.multiple_chunks(chunk_size=4096):
@@ -540,19 +533,46 @@ def upload_files(request, exc):
else:
fd.write(uploaded_file.read())
+ try:
+ if folder == 'morphology/':
+ workflow.get_model().set_morphology(full_path)
+ with open(os.path.join(workflow.get_model_dir(), 'config', 'morph.json'), 'w') as fd:
+ json.dump(workflow.get_model().get_morphology().get_config(), fd)
+
+ elif folder == 'mechanisms/':
+ if not uploaded_file.name.endswith('.mod'):
+ return ResponseUtil.ko_response('Mechanisms must be ".mod" file.')
+
+ elif folder == 'config/':
+ if not uploaded_file.name in ['features.json', 'protocols.json', 'parameters.json']:
+ os.remove(full_path)
+ return ResponseUtil.ko_response(
+ 'Config file must be one of the fallowing files: \
+ "protocols.json", "features.json", "parameters.json"'
+ )
+ validate_json_file(full_path)
+
+ except InvalidMorphologyFile as e:
+ os.remove(full_path)
+ return ResponseUtil.ko_response('File Format Error
";
+ }
+ return msg;
+}
+
+
+function showInfoAlert(msg, timeout=10000) {
+ showAlert(msg, "info", timeout, true, false);
+}
+
+function showSuccessAlert(msg, timeout=10000) {
+ showAlert(msg, "success", timeout, true, true);
+}
+
+function showErrorAlert(msg, timeout=10000) {
+ showAlert(msg, "danger", timeout, true, true);
+}
+
+function showWarningAlert(msg, timeout=10000) {
+ showAlert(msg, "warning", timeout, true, false);
+}
+
+function showHpcAuthAlert() {
+ console.log("showHpcAuthAlert() called.");
+ showAlert(
+ makeAlertText(
+ head="",
+ strong="You need to be logged in to use this HPC system !",
+ content="Please, click \"Cancel\" and login with the button in the top right corner before doing this operation."
+ ),
+ "warning",
+ 5000,
+ true,
+ false
+ );
+}
+
+function showJobsAuthAlert() {
+ console.log("showJobsAuthAlert()");
+ showAlert(
+ makeAlertText(
+ head="",
+ strong="You need to authenticate with Ebrains to fetch jobs on this HPC system !",
+ content="Please, click \"Cancel\" and login with the button in the top right corner before doing this operation."
+ ),
+ "warning",
+ 5000,
+ true,
+ false
+ )
+}
+
+function showServiceAccountAlert() {
+ showAlert(
+ makeAlertText(
+ head="",
+ strong="The Service Account is temporarily unreachable !",
+ content="Please, try again later or contact the support if the problem persists."
+ ),
+ "warning",
+ 5000,
+ true,
+ false
+ )
+}
+
+function splitTitleAndMessage(message) {
+ let splittedMessage = message.split("
");
+ if (splittedMessage.length == 2) {
+ return {
+ title: splittedMessage[0].replace("", ""),
+ message: splittedMessage[1]
+ };
+ }
+ return null;
+}
\ No newline at end of file
diff --git a/static/hhnb/js/workflow/workflow.js b/static/hhnb/js/workflow/workflow.js
index add8a492..76ff2136 100644
--- a/static/hhnb/js/workflow/workflow.js
+++ b/static/hhnb/js/workflow/workflow.js
@@ -7,7 +7,7 @@ const UPLOAD_FILES_BASE_URL = "/hh-neuron-builder/upload-files/";
function checkRefreshSession(response) {
- console.log(response);
+ Log.debug(response);
if (response.status === 403 && response.responseJSON.refresh_url) {
showLoadingAnimation("Session expired. Refreshing session automatically...");
$.ajax({
@@ -38,7 +38,7 @@ function disable(jObj) {
export default class Workflow {
#exc = null;
- #props = null;
+ #props = {};
#uploadFileType = null;
@@ -83,11 +83,12 @@ export default class Workflow {
if (error.status == 404) {
MessageDialog.openReloadDialog();
}
+ MessageDialog.openErrorDialog(error.responseText);
}
}).done(() => { this.updateUI() })
.fail((error) => {
if (error.status == 500) {
- return MessageDialog.openReloadDialog("/hh-neuron-builder", "A critical error occurred. Please restart the application and if the the problem persists contact us.");
+ return MessageDialog.openReloadDialog("A critical error occurred !