Skip to content

Commit

Permalink
Only benchmark small configs
Browse files Browse the repository at this point in the history
  • Loading branch information
eivindjahren committed Dec 19, 2024
1 parent 8e6570e commit 096032f
Showing 1 changed file with 22 additions and 7 deletions.
29 changes: 22 additions & 7 deletions tests/ert/performance_tests/test_obs_and_responses_performance.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,14 +360,15 @@ class _Benchmark:
@pytest.fixture(
params=[
(
b.alias,
b.config,
b.expected_join_performance,
)
for b in _BenchMarks
],
)
def setup_benchmark(tmp_path, request):
config, expected_performance = request.param
alias, config, expected_performance = request.param
info = create_experiment_args(
config.num_parameters,
config.num_gen_data_keys,
Expand Down Expand Up @@ -397,6 +398,7 @@ def setup_benchmark(tmp_path, request):
ens.save_response("gen_data", info.gen_data_responses.clone(), real)

yield (
alias,
ens,
experiment.observation_keys,
np.array(range(config.num_realizations)),
Expand All @@ -407,7 +409,7 @@ def setup_benchmark(tmp_path, request):
def test_memory_performance_of_joining_observations_and_responses(
setup_benchmark, tmp_path
):
ens, observation_keys, mask, expected_performance = setup_benchmark
_, ens, observation_keys, mask, expected_performance = setup_benchmark

with memray.Tracker(tmp_path / "memray.bin"):
ens.get_observations_and_responses(observation_keys, mask)
Expand All @@ -420,7 +422,10 @@ def test_memory_performance_of_joining_observations_and_responses(
def test_time_performance_of_joining_observations_and_responses(
setup_benchmark, benchmark
):
ens, observation_keys, mask, _ = setup_benchmark
alias, ens, observation_keys, mask, _ = setup_benchmark

if alias not in ["small", "medium"]:
pytest.skip()

def run():
ens.get_observations_and_responses(observation_keys, mask)
Expand All @@ -431,14 +436,15 @@ def run():
@pytest.fixture(
params=[
(
b.alias,
b.config,
b.expected_update_performance,
)
for b in _BenchMarks
],
)
def setup_es_benchmark(tmp_path, request):
config, expected_performance = request.param
alias, config, expected_performance = request.param
info = create_experiment_args(
config.num_parameters,
config.num_gen_data_keys,
Expand Down Expand Up @@ -483,11 +489,17 @@ def setup_es_benchmark(tmp_path, request):
iteration=1,
)

yield prior, posterior, info.gen_kw_config.name, expected_performance
yield (
alias,
prior,
posterior,
info.gen_kw_config.name,
expected_performance,
)


def test_memory_performance_of_doing_es_update(setup_es_benchmark, tmp_path):
prior, posterior, gen_kw_name, expected_performance = setup_es_benchmark
_, prior, posterior, gen_kw_name, expected_performance = setup_es_benchmark
with memray.Tracker(tmp_path / "memray.bin"):
smoother_update(
prior,
Expand All @@ -502,7 +514,10 @@ def test_memory_performance_of_doing_es_update(setup_es_benchmark, tmp_path):


def test_speed_performance_of_doing_es_update(setup_es_benchmark, benchmark):
prior, posterior, gen_kw_name, _ = setup_es_benchmark
alias, prior, posterior, gen_kw_name, _ = setup_es_benchmark

if alias not in ["small", "medium"]:
pytest.skip()

def run():
smoother_update(
Expand Down

0 comments on commit 096032f

Please sign in to comment.