Skip to content

Commit

Permalink
Merge pull request #5 from addelany/master
Browse files Browse the repository at this point in the history
add new configurations
  • Loading branch information
addelany authored Feb 29, 2024
2 parents c5caac8 + 8a5d95e commit 52489ad
Show file tree
Hide file tree
Showing 6 changed files with 132 additions and 29 deletions.
22 changes: 12 additions & 10 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,10 +1,21 @@
flare_tempdir/*
forecasts/*
data_processed/*
data_raw/*
drivers/*
analysis/*
automation/*.log
restart/*
targets/*
scores/*


# History files
.Rhistory
.Rapp.history

# Session Data files
.RData
.RDataTmp

# User-specific files
.Ruserdata
Expand Down Expand Up @@ -38,12 +49,3 @@ vignettes/*.pdf

# R Environment Variables
.Renviron

# pkgdown site
docs/

# translation temp files
po/*~

# RStudio Connect folder
rsconnect/
Empty file added UntitledR.R
Empty file.
27 changes: 12 additions & 15 deletions configuration/default/configure_flare_feea.yml
Original file line number Diff line number Diff line change
@@ -1,25 +1,22 @@
s3:
drivers:
endpoint: s3.flare-forecast.org
bucket: drivers/noaa/gefs-v12-reprocess
inflow_drivers:
endpoint: s3.flare-forecast.org
bucket: drivers/inflow/feea/parquet
endpoint: renc.osn.xsede.org
bucket: bio230121-bucket01/flare/drivers/met/gefs-v12
targets:
endpoint: s3.flare-forecast.org
bucket: targets/feea
endpoint: renc.osn.xsede.org
bucket: bio230121-bucket01/flare/targets/ccre
forecasts:
endpoint: s3.flare-forecast.org
bucket: forecasts
endpoint: renc.osn.xsede.org
bucket: bio230121-bucket01/flare/forecasts/netcdf
forecasts_parquet:
endpoint: s3.flare-forecast.org
bucket: forecasts/parquet
endpoint: renc.osn.xsede.org
bucket: bio230121-bucket01/flare/forecasts/parquet
warm_start:
endpoint: s3.flare-forecast.org
bucket: restart/feea
endpoint: renc.osn.xsede.org
bucket: bio230121-bucket01/flare/restart
scores:
endpoint: s3.flare-forecast.org
bucket: scores/parquet
endpoint: renc.osn.xsede.org
bucket: bio230121-bucket01/flare/scores/parquet

location:
site_id: feea
Expand Down
6 changes: 3 additions & 3 deletions configuration/default/configure_run_feea.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
restart_file: .na
start_datetime: 2023-06-15 00:00:00
start_datetime: 2024-02-01 00:00:00
end_datetime: .na
forecast_start_datetime: 2023-08-15 00:00:00
forecast_start_datetime: 2024-02-27 00:00:00
forecast_horizon: 34.0
sim_name: feea_test2
sim_name: glm_flare_v1
configure_flare: configure_flare_feea.yml
configure_obs: observation_processing_feea.yml
use_s3: TRUE
86 changes: 86 additions & 0 deletions memprof_flare.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
library(lineprof)

library(tidyverse)
library(lubridate)
lake_directory <- here::here()
setwd(lake_directory)
forecast_site <- c("feea")


flare_test <- function(){
configure_run_file <- paste0("configure_run_",forecast_site,".yml")
config_set_name <- "default"

config <- FLAREr::set_configuration(configure_run_file,lake_directory, config_set_name = config_set_name)

# Generate the targets
source('workflows/default/generate_targets.R')
# Read in the targets
cuts <- tibble::tibble(cuts = as.integer(factor(config$model_settings$modeled_depths)),
depth = config$model_settings$modeled_depths)

cleaned_insitu_file <- file.path(lake_directory, "targets", config$location$site_id, config$da_setup$obs_filename)
readr::read_csv(cleaned_insitu_file, show_col_types = FALSE) |>
dplyr::mutate(cuts = cut(depth, breaks = config$model_settings$modeled_depths, include.lowest = TRUE, right = FALSE, labels = FALSE)) |>
dplyr::filter(lubridate::hour(datetime) == 0) |>
dplyr::group_by(cuts, variable, datetime, site_id) |>
dplyr::summarize(observation = mean(observation, na.rm = TRUE), .groups = "drop") |>
dplyr::left_join(cuts, by = "cuts") |>
dplyr::select(site_id, datetime, variable, depth, observation) |>
write_csv(cleaned_insitu_file)

# Move targets to s3 bucket

message("Successfully generated targets")

FLAREr::put_targets(site_id = config$location$site_id,
cleaned_insitu_file = cleaned_insitu_file,
cleaned_met_file = NA,
cleaned_inflow_file = NA,
use_s3 = config$run_config$use_s3,
config = config)

if(config$run_config$use_s3){
message("Successfully moved targets to s3 bucket")
}

noaa_ready <- TRUE
while(noaa_ready){

config <- FLAREr::set_configuration(configure_run_file,lake_directory, config_set_name = config_set_name)

# Run FLARE
output <- FLAREr::run_flare(lake_directory = lake_directory,
configure_run_file = configure_run_file,
config_set_name = config_set_name)

forecast_start_datetime <- lubridate::as_datetime(config$run_config$forecast_start_datetime) + lubridate::days(1)
start_datetime <- lubridate::as_datetime(config$run_config$forecast_start_datetime) - lubridate::days(1)
restart_file <- paste0(config$location$site_id,"-", (lubridate::as_date(forecast_start_datetime)- days(1)), "-",config$run_config$sim_name ,".nc")

FLAREr::update_run_config2(lake_directory = lake_directory,
configure_run_file = configure_run_file,
restart_file = restart_file,
start_datetime = start_datetime,
end_datetime = NA,
forecast_start_datetime = forecast_start_datetime,
forecast_horizon = config$run_config$forecast_horizon,
sim_name = config$run_config$sim_name,
site_id = config$location$site_id,
configure_flare = config$run_config$configure_flare,
configure_obs = config$run_config$configure_obs,
use_s3 = config$run_config$use_s3,
bucket = config$s3$warm_start$bucket,
endpoint = config$s3$warm_start$endpoint,
use_https = TRUE)

#RCurl::url.exists("https://hc-ping.com/551392ce-43f3-49b1-8a57-6a60bad1c377", timeout = 5)

noaa_ready <- FLAREr::check_noaa_present_arrow(lake_directory,
configure_run_file,
config_set_name = config_set_name)
}
}


a <- lineprof(flare_test())
20 changes: 19 additions & 1 deletion workflows/default/forecast_workflow.R
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,31 @@ lake_directory <- here::here()
setwd(lake_directory)
forecast_site <- c("feea")

Sys.setenv("AWS_DEFAULT_REGION" = "renc",
"AWS_S3_ENDPOINT" = "osn.xsede.org",
"USE_HTTPS" = TRUE)

configure_run_file <- paste0("configure_run_",forecast_site,".yml")
config_set_name <- "default"

config <- FLAREr::set_configuration(configure_run_file,lake_directory, config_set_name = config_set_name)

# Generate the targets
source('workflows/default/generate_targets.R')
config_obs <- FLAREr::initialize_obs_processing(lake_directory,
observation_yml = paste0("observation_processing_",forecast_site,".yml"),
config_set_name = config_set_name)

dir.create(file.path(lake_directory, "targets", config$location$site_id), showWarnings = FALSE)

cleaned_insitu_file <- read_csv("https://raw.githubusercontent.com/RicardoDkIT/observations_feea/main/Observations_feea.csv")


write_csv(cleaned_insitu_file,file.path(lake_directory,"targets",
config$location$site_id,
paste0(config$location$site_id,"-targets-insitu.csv")))



# Read in the targets
cuts <- tibble::tibble(cuts = as.integer(factor(config$model_settings$modeled_depths)),
depth = config$model_settings$modeled_depths)
Expand Down

0 comments on commit 52489ad

Please sign in to comment.