Skip to content

Commit

Permalink
Merge branch 'master' into update-syntax/cp2k
Browse files Browse the repository at this point in the history
  • Loading branch information
teojgo authored Dec 20, 2021
2 parents d84fd35 + d4021cc commit ea82578
Show file tree
Hide file tree
Showing 13 changed files with 354 additions and 238 deletions.
4 changes: 2 additions & 2 deletions config/cscs.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@
},
{
'type': 'Singularity',
'modules': ['singularity/3.6.4-daint']
'modules': ['singularity/3.8.0-daint']
}
],
'modules': ['daint-gpu'],
Expand Down Expand Up @@ -327,7 +327,7 @@
},
{
'type': 'Singularity',
'modules': ['singularity/3.6.4-daint']
'modules': ['singularity/3.8.0-daint']
}
],
'modules': ['daint-mc'],
Expand Down
98 changes: 58 additions & 40 deletions cscs-checks/apps/cpmd/cpmd_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,62 +3,80 @@
#
# SPDX-License-Identifier: BSD-3-Clause

import contextlib
import reframe as rfm
import reframe.utility.sanity as sn


@rfm.parameterized_test(['small'], ['large'])
@rfm.simple_test
class CPMDCheck(rfm.RunOnlyRegressionTest):
def __init__(self, scale):
self.descr = 'CPMD check (C4H6 metadynamics)'
self.maintainers = ['AJ', 'LM']
self.tags = {'production'}
scale = parameter(['small', 'large'])
descr = 'CPMD check (C4H6 metadynamics)'
maintainers = ['AJ', 'LM']
tags = {'production'}
valid_systems = ['daint:gpu']
num_tasks_per_node = 1
valid_prog_environs = ['builtin']
modules = ['CPMD']
executable = 'cpmd.x'
executable_opts = ['ana_c4h6.in > stdout.txt']
readonly_files = ['ana_c4h6.in', 'C_MT_BLYP', 'H_MT_BLYP']
use_multithreading = True
strict_check = False
extra_resources = {
'switches': {
'num_switches': 1
}
}
allref = {
'9': {
'p100': {
'time': (285.5, None, 0.20, 's')
},
},
'16': {
'p100': {
'time': (245.0, None, 0.59, 's')
}
}
}

self.valid_systems = ['daint:gpu']
if scale == 'small':
self.num_tasks = 9
@run_after('init')
def setup_by_scale(self):
if self.scale == 'small':
self.valid_systems += ['dom:gpu']
self.num_tasks = 9
else:
self.num_tasks = 16

self.num_tasks_per_node = 1
self.valid_prog_environs = ['builtin']
self.modules = ['CPMD']
self.executable = 'cpmd.x'
self.executable_opts = ['ana_c4h6.in > stdout.txt']
self.readonly_files = ['ana_c4h6.in', 'C_MT_BLYP', 'H_MT_BLYP']
self.use_multithreading = True
self.strict_check = False
self.extra_resources = {
'switches': {
'num_switches': 1
@run_before('performance')
def set_perf_reference(self):
proc = self.current_partition.processor
pname = self.current_partition.fullname
if pname in ('daint:gpu', 'dom:gpu'):
arch = 'p100'
else:
arch = proc.arch

with contextlib.suppress(KeyError):
self.reference = {
pname: {
'perf': self.allref[self.num_tasks][arch][self.benchmark]
}
}
}

@sanity_function
def assert_energy_diff(self):
# OpenMP version of CPMD segfaults
# self.variables = { 'OMP_NUM_THREADS' : '8' }
energy = sn.extractsingle(
r'CLASSICAL ENERGY\s+-(?P<result>\S+)',
'stdout.txt', 'result', float)
energy_reference = 25.81
energy_diff = sn.abs(energy - energy_reference)
self.sanity_patterns = sn.assert_lt(energy_diff, 0.26)
self.perf_patterns = {
'time': sn.extractsingle(r'^ cpmd(\s+[\d\.]+){3}\s+(?P<perf>\S+)',
'stdout.txt', 'perf', float)
}
if scale == 'small':
self.reference = {
'daint:gpu': {
'time': (285.5, None, 0.20, 's')
},
'dom:gpu': {
'time': (332.0, None, 0.15, 's')
}
}
else:
self.reference = {
'daint:gpu': {
'time': (245.0, None, 0.59, 's')
}
}
return sn.assert_lt(energy_diff, 0.26)

@performance_function('s')
def time(self):
return sn.extractsingle(r'^ cpmd(\s+[\d\.]+){3}\s+(?P<perf>\S+)',
'stdout.txt', 'perf', float)
180 changes: 102 additions & 78 deletions cscs-checks/apps/lammps/lammps_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,100 +9,137 @@
import reframe.utility.sanity as sn


class LAMMPSBaseCheck(rfm.RunOnlyRegressionTest):
def __init__(self):
class LAMMPSCheck(rfm.RunOnlyRegressionTest):
scale = parameter(['small', 'large'])
variant = parameter(['maint', 'prod'])
modules = ['cray-python', 'LAMMPS']
tags = {'scs', 'external-resources'}
maintainers = ['LM']
strict_check = False
extra_resources = {
'switches': {
'num_switches': 1
}
}

@run_after('init')
def setup_by_system(self):
# Reset sources dir relative to the SCS apps prefix
self.sourcesdir = os.path.join(self.current_system.resourcesdir,
'LAMMPS')
if self.current_system.name in ['eiger', 'pilatus']:
self.valid_prog_environs = ['cpeGNU']
else:
self.valid_prog_environs = ['builtin']
self.modules = ['LAMMPS']

# Reset sources dir relative to the SCS apps prefix
self.sourcesdir = os.path.join(self.current_system.resourcesdir,
'LAMMPS')
@performance_function('timesteps/s')
def perf(self):
return sn.extractsingle(r'\s+(?P<perf>\S+) timesteps/s',
self.stdout, 'perf', float)

@sanity_function
def assert_energy_diff(self):
energy_reference = -4.6195
energy = sn.extractsingle(
r'\s+500000(\s+\S+){3}\s+(?P<energy>\S+)\s+\S+\s\n',
self.stdout, 'energy', float)
self.perf_patterns = {
'perf': sn.extractsingle(r'\s+(?P<perf>\S+) timesteps/s',
self.stdout, 'perf', float),
}
energy_diff = sn.abs(energy-energy_reference)
self.sanity_patterns = sn.all([
energy_diff = sn.abs(energy - energy_reference)
return sn.all([
sn.assert_found(r'Total wall time:', self.stdout),
sn.assert_lt(energy_diff, 6e-4)
])
self.strict_check = False
self.extra_resources = {
'switches': {
'num_switches': 1


@rfm.simple_test
class LAMMPSGPUCheck(LAMMPSCheck):
valid_systems = ['daint:gpu']
executable = 'lmp_mpi'
executable_opts = ['-sf gpu', '-pk gpu 1', '-in in.lj.gpu']
variables = {'CRAY_CUDA_MPS': '1'}
num_gpus_per_node = 1
references_by_variant = {
'maint': {
'small': {
'dom:gpu': {'perf': (3457, -0.10, None, 'timesteps/s')},
'daint:gpu': {'perf': (2524, -0.10, None, 'timesteps/s')}
},
'large': {
'daint:gpu': {'perf': (3832, -0.05, None, 'timesteps/s')}
}
}
},
'prod': {
'small': {
'dom:gpu': {'perf': (3132, -0.05, None, 'timesteps/s')},
'daint:gpu': {'perf': (2400, -0.40, None, 'timesteps/s')}
},
'large': {
'daint:gpu': {'perf': (3260, -0.50, None, 'timesteps/s')}
}
},
}

self.tags = {'scs', 'external-resources'}
self.maintainers = ['VH']


@rfm.parameterized_test(*([s, v]
for s in ['small', 'large']
for v in ['prod', 'maint']))
class LAMMPSGPUCheck(LAMMPSBaseCheck):
def __init__(self, scale, variant):
super().__init__()
self.valid_systems = ['daint:gpu']
self.executable = 'lmp_mpi'
self.executable_opts = ['-sf gpu', '-pk gpu 1', '-in in.lj.gpu']
self.variables = {'CRAY_CUDA_MPS': '1'}
self.num_gpus_per_node = 1
if scale == 'small':
@run_after('init')
def setup_by_variant(self):
self.descr = (f'LAMMPS GPU check (version: {self.scale}, '
f'{self.variant})')
if self.scale == 'small':
self.valid_systems += ['dom:gpu']
self.num_tasks = 12
self.num_tasks_per_node = 2
else:
self.num_tasks = 32
self.num_tasks_per_node = 2

references = {
'maint': {
'small': {
'dom:gpu': {'perf': (3457, -0.10, None, 'timesteps/s')},
'daint:gpu': {'perf': (2524, -0.10, None, 'timesteps/s')}
},
'large': {
'daint:gpu': {'perf': (3832, -0.05, None, 'timesteps/s')}
}
self.reference = self.references_by_variant[self.variant][self.scale]
self.tags |= {
'maintenance' if self.variant == 'maint' else 'production'
}


@rfm.simple_test
class LAMMPSCPUCheck(LAMMPSCheck):
valid_systems = ['daint:mc', 'eiger:mc', 'pilatus:mc']
references_by_variant = {
'maint': {
'small': {
'dom:mc': {'perf': (4394, -0.05, None, 'timesteps/s')},
'daint:mc': {'perf': (3824, -0.10, None, 'timesteps/s')},
'eiger:mc': {'perf': (4500, -0.10, None, 'timesteps/s')},
'pilatus:mc': {'perf': (5000, -0.10, None, 'timesteps/s')}
},
'prod': {
'small': {
'dom:gpu': {'perf': (3132, -0.05, None, 'timesteps/s')},
'daint:gpu': {'perf': (2400, -0.40, None, 'timesteps/s')}
},
'large': {
'daint:gpu': {'perf': (3260, -0.50, None, 'timesteps/s')}
}
'large': {
'daint:mc': {'perf': (5310, -0.65, None, 'timesteps/s')},
'eiger:mc': {'perf': (6500, -0.10, None, 'timesteps/s')},
'pilatus:mc': {'perf': (7500, -0.10, None, 'timesteps/s')}
}
},
'prod': {
'small': {
'dom:mc': {'perf': (4394, -0.05, None, 'timesteps/s')},
'daint:mc': {'perf': (3824, -0.10, None, 'timesteps/s')},
'eiger:mc': {'perf': (4500, -0.10, None, 'timesteps/s')},
'pilatus:mc': {'perf': (5000, -0.10, None, 'timesteps/s')}
},
'large': {
'daint:mc': {'perf': (5310, -0.65, None, 'timesteps/s')},
'eiger:mc': {'perf': (6500, -0.10, None, 'timesteps/s')},
'pilatus:mc': {'perf': (7500, -0.10, None, 'timesteps/s')}
}
}
self.reference = references[variant][scale]
self.tags |= {'maintenance' if variant == 'maint' else 'production'}
}


@rfm.parameterized_test(*([s, v]
for s in ['small', 'large']
for v in ['prod']))
class LAMMPSCPUCheck(LAMMPSBaseCheck):
def __init__(self, scale, variant):
super().__init__()
self.valid_systems = ['daint:mc', 'eiger:mc', 'pilatus:mc']
@run_after('init')
def setup_by_variant(self):
self.descr = (f'LAMMPS CPU check (version: {self.scale}, '
f'{self.variant})')
if self.current_system.name in ['eiger', 'pilatus']:
self.executable = 'lmp_mpi'
self.executable_opts = ['-in in.lj.cpu']
else:
self.executable = 'lmp_omp'
self.executable_opts = ['-sf omp', '-pk omp 1', '-in in.lj.cpu']

self.scale = scale
if scale == 'small':
if self.scale == 'small':
self.valid_systems += ['dom:mc']
self.num_tasks = 216
self.num_tasks_per_node = 36
Expand All @@ -114,20 +151,7 @@ def __init__(self, scale, variant):
self.num_tasks_per_node = 128
self.num_tasks = 256 if self.scale == 'small' else 512

references = {
'prod': {
'small': {
'dom:mc': {'perf': (4394, -0.05, None, 'timesteps/s')},
'daint:mc': {'perf': (3824, -0.10, None, 'timesteps/s')},
'eiger:mc': {'perf': (4500, -0.10, None, 'timesteps/s')},
'pilatus:mc': {'perf': (5000, -0.10, None, 'timesteps/s')}
},
'large': {
'daint:mc': {'perf': (5310, -0.65, None, 'timesteps/s')},
'eiger:mc': {'perf': (6500, -0.10, None, 'timesteps/s')},
'pilatus:mc': {'perf': (7500, -0.10, None, 'timesteps/s')}
}
},
self.reference = self.references_by_variant[self.variant][self.scale]
self.tags |= {
'maintenance' if self.variant == 'maint' else 'production'
}
self.reference = references[variant][scale]
self.tags |= {'maintenance' if variant == 'maint' else 'production'}
Loading

0 comments on commit ea82578

Please sign in to comment.