Skip to content

Commit

Permalink
More refactoring
Browse files Browse the repository at this point in the history
  • Loading branch information
tbarbette committed Dec 19, 2024
1 parent 0055bb7 commit 74ab6c1
Show file tree
Hide file tree
Showing 52 changed files with 788 additions and 700 deletions.
2 changes: 1 addition & 1 deletion integration/imports/run.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash
../../npf_compare.py \
../../npf.py \
local \
--force-retest \
--test main.npf \
Expand Down
2 changes: 1 addition & 1 deletion integration/integration.sh
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ fi


try integration/empty.npf $python
compare_raw npf_compare.py single $python --no-graph --no-graph-time --csv out.csv
compare_raw npf.py single $python --no-graph --no-graph-time --csv out.csv
diff out.csv integration/single.csv
if [ $? -ne 0 ] ; then
echo "single.csv changed !"
Expand Down
6 changes: 3 additions & 3 deletions integration/single.csv
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
index,build,test_index,y_D,run_index
0,Local,0,4.0,0
1,Local,0,4.0,1
2,Local,0,4.0,2
0,Click 2022,0,4.0,0
1,Click 2022,0,4.0,1
2,Click 2022,0,4.0,2
18 changes: 10 additions & 8 deletions integration/test_unittest.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,22 +6,24 @@

from npf.expdesign.zltexp import ZLTVariableExpander
from npf.output.grapher import Grapher
from npf.tests import repository
from npf.repo import repository
from npf.models.variables.RangeVariable import RangeVariable
from npf.models.variables.SimpleVariable import SimpleVariable
from npf.tests.build import Build
from npf.tests.test_driver import Comparator
from npf.cluster.node import *

from collections import OrderedDict

from npf.tests.repository import Repository
from npf.repo.repository import Repository
from npf.tests.test import Test
from npf.tests.variable import RangeVariable, SimpleVariable, dtype
from npf.types.dataset import Run
from npf.models.units import dtype
from npf.models.dataset import Run

import numpy as np
import logging

from npf.types.units import numeric_dict
from npf.models.units import numeric_dict

logger = logging.getLogger()
logger.level = logging.DEBUG
Expand All @@ -38,7 +40,7 @@ def get_args():
args = parser.parse_args(args = "")
args.tags = {}
npf.globals.set_args(args)
npf.parse_nodes(args)
npf.parsing.parse_nodes(args)
return args

def test_args():
Expand Down Expand Up @@ -155,8 +157,8 @@ def test_core():

full_args = ["--test", "integration/sections.npf",'--force-retest']
args = parser.parse_args(full_args)
npf.initialize(args)
npf.create_local()
npf.parsing.initialize(args)
npf.parsing.create_local()

repo_list = [repository.Repository.get_instance("local", options=args)]

Expand Down
6 changes: 4 additions & 2 deletions npf.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@
import npf
import npf.cmdline
import npf.osutils
from npf.tests import get_default_repository
import npf.parsing
from npf.repo.factory import get_default_repository
from npf.repo.factory import get_default_repository
from npf.tests.test_driver import Comparator
from npf.output import generate_outputs
from npf.tests.regression import *
Expand All @@ -38,7 +40,7 @@ def main():
args = parser.parse_args()

# Parse the cluster options
npf.parse_nodes(args)
npf.parsing.parse_nodes(args)

# Parsing repo list and getting last_build
repo_list = []
Expand Down
148 changes: 1 addition & 147 deletions npf/__init__.py
Original file line number Diff line number Diff line change
@@ -1,154 +1,8 @@
import os

from typing import Dict

import regex

from npf.cluster.node import Node
from npf.globals import experiment_path, npf_root_path, roles, set_args, get_options
from npf.globals import experiment_path, npf_root_path, get_options
from npf.osutils import get_valid_filename

nodePattern = regex.compile(
"(?P<role>[a-zA-Z0-9]+)=(:?(?P<user>[a-zA-Z0-9]+)@)?(?P<addr>[a-zA-Z0-9.-]+)(:?[:](?P<path>[a-zA-Z0-9_./~-]+))?")

def nodes_for_role(role, self_role=None, self_node=None, default_role_map={}):
if role is None or role == '':
role = 'default'
if role == 'self':
if self_role:
role = self_role
if self_node:
return [self_node]
else:
raise Exception("Using self without a role context. Usually, this happens when self is used in a %file")
if role not in roles:
if role in default_role_map:
role = default_role_map[role]
return roles.get(role, roles['default'])


def executor(role, default_role_map):
"""
Return the executor for a given role as associated by the cluster configuration
:param role: A role name
:return: The executor
"""
return nodes_for_role(role, default_role_map)[0].executor

def initialize(args):
set_args(args)

#other random stuffs to do
if not get_options().build_folder is None:
if not os.access(get_options().build_folder, os.W_OK):
raise Exception("The provided build path is not writeable or does not exists : %s!" % get_options().build_folder)
get_options()._build_path = get_options().build_folder
else:
get_options()._build_path = npf_writeable_root_path()+'/build/'

if type(get_options().use_last) is not int:
if get_options().use_last:
get_options().use_last = 100

if not os.path.exists(experiment_path()):
raise Exception("The experiment root '%s' is not accessible ! Please explicitely define it with --experiment-path, and ensure that directory is writable !" % experiment_path())

if not os.path.isabs(get_options().experiment_folder):
get_options().experiment_folder = os.path.abspath(get_options().experiment_folder)


get_options().search_path = set(get_options().search_path)
for t in [get_options().test_files]:
get_options().search_path.add(os.path.dirname(t))

def create_local():
# Create the test file
os.close(os.open(experiment_path() + ".access_test" , os.O_CREAT))
local = Node.makeLocal()
#Delete the test file
os.unlink(experiment_path() + ".access_test")
roles['default'] = [local]
return local

def parse_nodes(args):
initialize(args)
local = create_local()

for val in get_options().cluster:

# Create the test file
os.close(os.open(experiment_path() + ".access_test" , os.O_CREAT))

variables : list[str] = val.split(',')
if len(variables) == 0:
raise Exception("Bad definition of cluster parameter : %s" % variables)
mapping: str =variables[0].strip()
match = nodePattern.match(mapping)
if not match:
raise Exception("Bad definition of node : %s" % mapping)

path = match.group('path')

del variables[0]

nfs = None
assert isinstance(variables, list)
for opts in variables:
assert isinstance(opts, str)
var,val = opts.split('=')
if var == "nfs":
nfs = int(val)
elif var == "path":
path = val
else:
continue
variables.remove(opts)

if match.group('addr') == 'localhost':
node = local
else:
node = Node.makeSSH(user=match.group('user'), addr=match.group('addr'), path=path, nfs=nfs)
role = match.group('role')
if role in roles:
roles[role].append(node)
print("Role %s has multiple nodes. The role will be executed by multiple machines. If this is not intended, fix your --cluster option." % role)
else:
roles[role] = [node]

for opts in variables:
var,val = opts.split('=')
if var == 'nic':
node.active_nics = [ int(v) for v in val.split('+') ]
elif var == "multi":
node.multi = int(val)
elif var == "mode":
node.mode = val
else:
raise Exception("Unknown cluster variable : %s" % var)

#Delete the test file if it still exists (if the remote is the local machine, it won't)
if os.path.exists(experiment_path() + ".access_test"):
os.unlink(experiment_path() + ".access_test")

def parse_variables(args_variables, tags, sec) -> Dict:
variables = {}
for variable in args_variables:
var, val, assign = sec.parse_variable(variable,tags)
if var:
val.assign = assign
variables[var] = val
return variables


def override(args, tests):
for test in tests:
overriden_variables = parse_variables(args.variables, test.tags, test.variables)
overriden_config = parse_variables(args.config, test.tags, test.config)
test.variables.override_all(overriden_variables)
test.config.override_all(overriden_config)
return tests


def npf_writeable_root_path():
path = npf_root_path()
if not os.access(path, os.W_OK):
Expand Down
43 changes: 43 additions & 0 deletions npf/cluster/factory.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import os
import regex
from npf.cluster.node import Node
from npf.globals import experiment_path, roles


def nodes_for_role(role, self_role=None, self_node=None, default_role_map={}):
if role is None or role == '':
role = 'default'
if role == 'self':
if self_role:
role = self_role
if self_node:
return [self_node]
else:
raise Exception("Using self without a role context. Usually, this happens when self is used in a %file")
if role not in roles:
if role in default_role_map:
role = default_role_map[role]
return roles.get(role, roles['default'])


nodePattern = regex.compile(
"(?P<role>[a-zA-Z0-9]+)=(:?(?P<user>[a-zA-Z0-9]+)@)?(?P<addr>[a-zA-Z0-9.-]+)(:?[:](?P<path>[a-zA-Z0-9_./~-]+))?")


def executor(role, default_role_map):
"""
Return the executor for a given role as associated by the cluster configuration
:param role: A role name
:return: The executor
"""
return nodes_for_role(role, default_role_map)[0].executor


def create_local():
# Create the test file
os.close(os.open(experiment_path() + ".access_test" , os.O_CREAT))
local = Node.makeLocal()
#Delete the test file
os.unlink(experiment_path() + ".access_test")
roles['default'] = [local]
return local
6 changes: 4 additions & 2 deletions npf/cluster/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
from npf.executor.localexecutor import LocalExecutor
from npf.executor.sshexecutor import SSHExecutor
from npf.executor.executor import Executor
from npf.tests.variable import Variable
from npf.types.units import get_bool
from npf.models.variables.variable import Variable
from npf.models.units import get_bool

class Node:
"""
Expand Down Expand Up @@ -254,3 +254,5 @@ def makeSSH(cls, user, addr, path, port=22, nfs=None):
except Exception as e:
print(f"Error while trying to look for NICs on {node.name} : ", e)
return node


6 changes: 4 additions & 2 deletions npf/enoslib.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@
from typing import Dict, List
import enoslib as en

import npf.cluster.node
import npf.cmdline
import npf.globals
import npf.parsing
from npf.output import generate_outputs
from npf.output.grapher import Grapher
from npf.test_driver import Comparator, group_series
Expand All @@ -25,8 +27,8 @@ def run(npf_script, roles:Dict[str,en.Host], argsv:List[str]=[]):
full_args = ["--test", npf_script]
full_args.extend(argsv)
args = parser.parse_args(full_args)
npf.initialize(args)
npf.create_local()
npf.parsing.initialize(args)
npf.cluster.node.create_local()

#en.set_config(ansible_stdout="regular")

Expand Down
4 changes: 2 additions & 2 deletions npf/expdesign/zltexp.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@

import numpy as np
from npf.expdesign.fullexp import FullVariableExpander
from npf.types.dataset import Run
from npf.tests.variable import Variable
from npf.models.dataset import Run
from npf.models.variables.variable import Variable

class OptVariableExpander(FullVariableExpander):
def __init__(self, vlist:Dict[str,Variable], results, overriden, input, margin, all=False):
Expand Down
File renamed without changes.
7 changes: 4 additions & 3 deletions npf/types/dataset.py → npf/models/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
from collections import OrderedDict
import sys

from npf.types.units import get_numeric, is_numeric
from npf import build_filename
from npf.models.units import get_numeric, is_numeric

if sys.version_info < (3, 7):
from orderedset import OrderedSet
Expand All @@ -13,7 +14,7 @@
import natsort
import csv

from npf.types.units import numeric_dict
from npf.models.units import numeric_dict
from npf.output.web.web import prepare_web_export

class Run:
Expand Down Expand Up @@ -277,7 +278,7 @@ def prepare_csvs(all_result_types, datasets, statics, run_list, options, kind=No
if result_type in csvs:
type_filename,csvfile,wr = csvs[result_type]
else:
type_filename = npf.build_filename(test, build, options.output if options.output != 'graph' else options.graph_filename, statics, 'csv', type_str=result_type, show_serie=(len(datasets) > 1 or options.show_serie), force_ext=True, data_folder=True, prefix = kind + '-' if kind else None)
type_filename = build_filename(test, build, options.output if options.output != 'graph' else options.graph_filename, statics, 'csv', type_str=result_type, show_serie=(len(datasets) > 1 or options.show_serie), force_ext=True, data_folder=True, prefix = kind + '-' if kind else None)
csvfile = open(type_filename, 'w')
wr = csv.writer(csvfile, delimiter=' ',
quotechar='"', quoting=csv.QUOTE_MINIMAL)
Expand Down
File renamed without changes.
10 changes: 10 additions & 0 deletions npf/types/units.py → npf/models/units.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,3 +112,13 @@ def numeric_dict(d):
return d


def dtype(v):
if is_numeric(v):
if is_integer(v):
return int
else:
return float
else:
return str


Loading

0 comments on commit 74ab6c1

Please sign in to comment.