Skip to content

Commit

Permalink
Remove plotting for now
Browse files Browse the repository at this point in the history
  • Loading branch information
jacobbieker committed Dec 5, 2023
1 parent 66de58b commit 149180d
Showing 1 changed file with 9 additions and 9 deletions.
18 changes: 9 additions & 9 deletions pvnet/models/base_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -428,9 +428,9 @@ def _training_accumulate_log(self, batch, batch_idx, losses, y_hat):

# We only create the figure every 8 log steps
# This was reduced as it was creating figures too often
if grad_batch_num % (8 * self.trainer.log_every_n_steps) == 0:
fig = plot_batch_forecasts(batch, y_hat, batch_idx, quantiles=self.output_quantiles)
fig.savefig("latest_logged_train_batch.png")
#if grad_batch_num % (8 * self.trainer.log_every_n_steps) == 0:
# fig = plot_batch_forecasts(batch, y_hat, batch_idx, quantiles=self.output_quantiles)
# fig.savefig("latest_logged_train_batch.png")

def training_step(self, batch, batch_idx):
"""Run training step"""
Expand Down Expand Up @@ -485,13 +485,13 @@ def validation_step(self, batch: dict, batch_idx):
y_hat = self._val_y_hats.flush()
batch = self._val_batches.flush()

fig = plot_batch_forecasts(batch, y_hat, quantiles=self.output_quantiles)
#fig = plot_batch_forecasts(batch, y_hat, quantiles=self.output_quantiles)

self.logger.experiment.log(
{
f"val_forecast_samples/batch_idx_{accum_batch_num}": wandb.Image(fig),
}
)
#self.logger.experiment.log(
# {
# f"val_forecast_samples/batch_idx_{accum_batch_num}": wandb.Image(fig),
# }
#)
del self._val_y_hats
del self._val_batches

Expand Down

0 comments on commit 149180d

Please sign in to comment.