Skip to content

Commit

Permalink
(dev) write csv for easier inspection
Browse files Browse the repository at this point in the history
  • Loading branch information
yngve-sk committed Nov 18, 2024
1 parent c563dcd commit d1888a8
Showing 1 changed file with 44 additions and 5 deletions.
49 changes: 44 additions & 5 deletions src/everest/everest_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,9 @@ class EverestStorageDataFrames:
nonlinear_constraints: Optional[polars.DataFrame] = None
realization_weights: Optional[polars.DataFrame] = None

def write_to_experiment(self, experiment: _OptimizerOnlyExperiment):
def write_to_experiment(
self, experiment: _OptimizerOnlyExperiment, write_csv=False
):
# Stored in ensembles instead
# self.objective_for_batch.write_parquet(path / "objective_data.parquet")
# self.gradient_evaluation.write_parquet(path / "gradient_evaluation.parquet")
Expand Down Expand Up @@ -160,8 +162,35 @@ def write_to_experiment(self, experiment: _OptimizerOnlyExperiment):
)

for batch_data in self.batches:
ensemble = experiment.get_ensemble_by_name(f"batch_{batch_data.batch_id}")
for df_key, df in batch_data.existing_dataframes.items():
df.write_parquet(experiment.optimizer_mount_point / f"{df_key}.parquet")
df.write_parquet(ensemble.optimizer_mount_point / f"{df_key}.parquet")

if write_csv:
self.initial_values.write_csv(
experiment.optimizer_mount_point / "initial_values.csv"
)

self.objective_functions.write_csv(
experiment.optimizer_mount_point / "objective_functions.csv"
)

if self.nonlinear_constraints is not None:
self.nonlinear_constraints.write_csv(
experiment.optimizer_mount_point / "nonlinear_constraints.csv"
)

if self.realization_weights is not None:
self.realization_weights.write_csv(
experiment.optimizer_mount_point / "realization_weights.csv"
)

for batch_data in self.batches:
ensemble = experiment.get_ensemble_by_name(
f"batch_{batch_data.batch_id}"
)
for df_key, df in batch_data.existing_dataframes.items():
df.write_csv(ensemble.optimizer_mount_point / f"{df_key}.csv")

def read_from_experiment(self, experiment: _OptimizerOnlyExperiment) -> None:
self.initial_values = polars.read_parquet(
Expand Down Expand Up @@ -250,11 +279,20 @@ def __init__(
self._control_ensemble_id = 0
self._gradient_ensemble_id = 0

self._output_dir = output_dir
self._merit_file = merit_file

self._experiment = _OptimizerOnlyExperiment(output_dir)
self._dataframes = EverestStorageDataFrames()

def write_to_output_dir(self) -> None:
exp = _OptimizerOnlyExperiment(self._output_dir)

# csv writing mostly for dev/debugging/quick inspection
self._dataframes.write_to_experiment(exp, write_csv=True)

def read_from_output_dir(self) -> None:
exp = _OptimizerOnlyExperiment(self._output_dir)
self._dataframes.read_from_experiment(exp)

def observe_optimizer(self, optimizer: BasicOptimizer) -> None:
# Q: Do these observers have to be explicitly disconnected/destroyed?
optimizer.add_observer(EventType.START_OPTIMIZER_STEP, self._initialize)
Expand Down Expand Up @@ -782,7 +820,8 @@ def _handle_finished_event(self, event):
polars.lit(merit_value).alias("merit_value")
)

self._dataframes.write_to_experiment(self.experiment)
self.write_to_output_dir()
print("yo")

def get_optimal_result(self) -> Optional[OptimalResult]:
# Only used in tests, not super important
Expand Down

0 comments on commit d1888a8

Please sign in to comment.