Skip to content

Commit

Permalink
Split everserver functionality between starting server and submitting…
Browse files Browse the repository at this point in the history
… experiment
  • Loading branch information
frode-aarstad committed Dec 12, 2024
1 parent d875982 commit cfa67d4
Show file tree
Hide file tree
Showing 11 changed files with 650 additions and 445 deletions.
16 changes: 10 additions & 6 deletions src/ert/run_models/everest_run_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,14 @@
from collections import defaultdict
from collections.abc import Callable, Mapping
from dataclasses import dataclass
from enum import IntEnum
from pathlib import Path
from types import TracebackType
from typing import (
TYPE_CHECKING,
Any,
Literal,
Callable,
Mapping,
Protocol,
)

Expand Down Expand Up @@ -147,6 +149,10 @@ def from_seba_optimal_result(
)


class EverestRunModelExitCode(IntEnum):
MAX_BATCH_NUM_REACHED = 1


class EverestRunModel(BaseRunModel):
def __init__(
self,
Expand Down Expand Up @@ -175,9 +181,7 @@ def __init__(
)
self._display_all_jobs = display_all_jobs
self._result: OptimalResult | None = None
self._exit_code: Literal["max_batch_num_reached"] | OptimizerExitCode | None = (
None
)
self._exit_code: EverestRunModelExitCode | OptimizerExitCode | None = None
self._max_batch_num_reached = False
self._simulator_cache: SimulatorCache | None = None
if (
Expand Down Expand Up @@ -285,7 +289,7 @@ def run_experiment(
)

self._exit_code = (
"max_batch_num_reached"
EverestRunModelExitCode.MAX_BATCH_NUM_REACHED
if self._max_batch_num_reached
else optimizer_exit_code
)
Expand Down Expand Up @@ -431,7 +435,7 @@ def description(cls) -> str:
@property
def exit_code(
self,
) -> Literal["max_batch_num_reached"] | OptimizerExitCode | None:
) -> EverestRunModelExitCode | OptimizerExitCode | None:
return self._exit_code

@property
Expand Down
14 changes: 12 additions & 2 deletions src/everest/bin/everest_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@
import threading
from functools import partial

from ert.run_models.everest_run_model import EverestRunModel
from everest.config import EverestConfig, ServerConfig
from everest.detached import (
ServerStatus,
everserver_status,
server_is_running,
start_experiment,
start_server,
wait_for_server,
)
Expand Down Expand Up @@ -114,7 +114,11 @@ async def run_everest(options):
except ValueError as exc:
raise SystemExit(f"Config validation error: {exc}") from exc

if EverestRunModel.create(options.config).check_if_runpath_exists():
if (
options.config.simulation_dir is not None
and os.path.exists(options.config.simulation_dir)
and any(os.listdir(options.config.simulation_dir))
):
warn_user_that_runpath_is_nonempty()

try:
Expand All @@ -128,6 +132,12 @@ async def run_everest(options):
print("Waiting for server ...")
wait_for_server(options.config.output_dir, timeout=600)
print("Everest server found!")

start_experiment(
server_context=ServerConfig.get_server_context(options.config.output_dir),
config=options.config,
)

run_detached_monitor(
server_context=ServerConfig.get_server_context(options.config.output_dir),
optimization_output_dir=options.config.optimization_output_dir,
Expand Down
20 changes: 20 additions & 0 deletions src/everest/detached/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
OPT_PROGRESS_ID,
SIM_PROGRESS_ENDPOINT,
SIM_PROGRESS_ID,
START_ENDPOINT,
STOP_ENDPOINT,
)

Expand All @@ -52,6 +53,25 @@
# everest.log file instead


def start_experiment(
server_context: Tuple[str, str, Tuple[str, str]],
config: EverestConfig,
) -> None:
try:
url, cert, auth = server_context
start_endpoint = "/".join([url, START_ENDPOINT])
response = requests.post(
start_endpoint,
verify=cert,
auth=auth,
proxies=PROXY, # type: ignore
json=config.to_dict(),
)
response.raise_for_status()
except:
raise ValueError("Failed to start experiment") from None


async def start_server(config: EverestConfig, debug: bool = False) -> Driver:
"""
Start an Everest server running the optimization defined in the config
Expand Down
Loading

0 comments on commit cfa67d4

Please sign in to comment.