Skip to content

Commit

Permalink
Merge pull request #12 from addelany/main
Browse files Browse the repository at this point in the history
add flare v3 workflow
  • Loading branch information
addelany authored Sep 6, 2024
2 parents 3a13249 + 0dad9a3 commit d8cec41
Show file tree
Hide file tree
Showing 32 changed files with 1,725 additions and 1,383 deletions.
22 changes: 22 additions & 0 deletions .github/workflows/run_flare_v3.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
on:
workflow_dispatch:
schedule:
- cron: "0 12 * * *"
jobs:
build:
runs-on: ubuntu-latest
container:
image: rqthomas/flare-rocker:4.3.2
steps:
- name: Checkout repo
uses: actions/checkout@v3
with:
fetch-depth: 0

# Point to the right path, run the right Rscript command
- name: Run automatic prediction file
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
run: |
/usr/local/bin/r $GITHUB_WORKSPACE/workflows/glm_flare_v3/combined_forecast.R
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ automation/*.log
restart/*
targets/*
scores/*
scoring/*
plots/*


# History files
Expand Down
21 changes: 0 additions & 21 deletions Dockerfile

This file was deleted.

47 changes: 47 additions & 0 deletions R/generate_forecast_score_arrow.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
#' Score a forecast using score4cast package and arrow
#' @param targets_file observation file
#' @param forecast_df forecast file
#' @output_directory directory to save scored file
#' @return
#' @export
#'
#' @examples
generate_forecast_score_arrow <- function(targets_df,
forecast_df,
use_s3 = FALSE,
bucket = NULL,
endpoint = NULL,
local_directory = NULL,
variable_types = "state"){


if(use_s3){
if(is.null(bucket) | is.null(endpoint)){
stop("scoring function needs bucket and endpoint if use_s3=TRUE")
}
vars <- arrow_env_vars()
output_directory <- arrow::s3_bucket(bucket = bucket,
endpoint_override = endpoint)
unset_arrow_vars(vars)
}else{
if(is.null(local_directory)){
stop("scoring function needs local_directory if use_s3=FALSE")
}
output_directory <- arrow::SubTreeFileSystem$create(local_directory)
}


df <- forecast_df %>%
dplyr::filter(variable_type %in% variable_types) |>
dplyr::mutate(family = as.character(family)) |>
score4cast::crps_logs_score(targets_df, extra_groups = c('depth')) |>
dplyr::mutate(horizon = datetime-lubridate::as_datetime(reference_datetime)) |>
dplyr::mutate(horizon = as.numeric(lubridate::as.duration(horizon),
units = "seconds"),
horizon = horizon / 86400)

df <- df |> dplyr::mutate(reference_date = lubridate::as_date(reference_datetime))

arrow::write_dataset(df, path = output_directory, partitioning = c("site_id","model_id","reference_date"))

}
11 changes: 11 additions & 0 deletions R/get_edi_file.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
get_edi_file <- function(edi_https, file, lake_directory){ #, curl_timeout = 60){

if(!file.exists(file.path(lake_directory, "data_raw", file))){
if(!dir.exists(dirname(file.path(lake_directory, "data_raw", file)))){
dir.create(dirname(file.path(lake_directory, "data_raw", file)))
}
url_download <- httr::RETRY("GET",edi_https, httr::timeout(1500), pause_base = 5, pause_cap = 20, pause_min = 5, times = 3, quiet = FALSE)
test_bin <- httr::content(url_download,'raw')
writeBin(test_bin, file.path(lake_directory, "data_raw", file))
}
}
122 changes: 122 additions & 0 deletions configuration/glm_flare_v3/configure_flare.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
s3:
drivers:
endpoint: renc.osn.xsede.org
bucket: bio230121-bucket01/flare/drivers/met
inflow_drivers:
endpoint: renc.osn.xsede.org
bucket: bio230121-bucket01/flare/drivers/inflow/ccre/lm_ar
outflow_drivers:
endpoint: renc.osn.xsede.org
bucket: bio230121-bucket01/flare/drivers/inflow/ccre/lm_ar
targets:
endpoint: renc.osn.xsede.org
bucket: bio230121-bucket01/flare/targets/ccre
forecasts:
endpoint: renc.osn.xsede.org
bucket: bio230121-bucket01/flare/forecasts/netcdf
forecasts_parquet:
endpoint: renc.osn.xsede.org
bucket: bio230121-bucket01/flare/forecasts/parquet
restart:
endpoint: renc.osn.xsede.org
bucket: bio230121-bucket01/flare/restart
scores:
endpoint: renc.osn.xsede.org
bucket: bio230121-bucket01/flare/scores/parquet
location:
site_id: ccre
name: Carvins Cove Reservoir
latitude: 37.3697 #Degrees North
longitude: -79.9580 #Degrees West
da_setup:
da_method: enkf
par_fit_method: perturb
ensemble_size: 217
no_negative_states: TRUE
assimilate_first_step: FALSE
use_obs_constraint: TRUE
obs_filename: ccre-targets-insitu.csv
model_settings:
ncore: 4
model_name: glm
base_GLM_nml: glm3.nml
max_model_layers: 100
# modeled_depths: [0.00,0.25,0.50,0.75,
# 1.00,1.25,1.50,1.75,
# 2.00,2.25,2.50,2.75,
# 3.00,3.25,3.50,3.75,
# 4.00,4.25,4.50,4.75,
# 5.00,5.25,5.50,5.75,
# 6.00,6.25,6.50,6.75,
# 7.00,7.25,7.50,7.75,
# 8.00,8.25,8.50,8.75,
# 9.00,9.25,9.50,9.75,
# 10.00,10.25,10.50,10.75,
# 11.00,11.25,11.50,11.75,
# 12.00,12.25,12.50,12.75,
# 13.00,13.25,13.50,13.75,
# 14.00,14.25,14.50,14.75,
# 15.00,15.25,15.50,15.75,
# 16.00,16.25,16.50,16.75,
# 17.00,17.25,17.50,17.75,
# 18.00,18.25,18.50,18.75,
# 19.00,19.25,19.50,19.75,
# 20.00,20.25,20.50,20.75,
# 21.00]
modeled_depths: [0.00,1.00,2.00,3.00,4.00,
5.00,6.00,7.00,8.00,9.00,
10.00,12.00,14.00,16.00,
18.00,20.00,21.00]
par_config_file: parameter_calibration_config.csv
obs_config_file: observations_config.csv
states_config_file: states_config.csv
depth_model_sd_config_file: depth_model_sd.csv
default_init:
lake_depth: 22.8 #not a modeled state
temp: [20,20,20,20,20,20,
20,20,20,20,20,20,20,20]
temp_depths: [0.1,1.0,2.0,3.0,4.0,5.0,
6.0,7.0,8.0,9.0,10.0,11.0,15.0,19.0]
salinity: 0.0
snow_thickness: 0.0
white_ice_thickness: 0.0
blue_ice_thickness: 0.0
flows:
include_inflow: FALSE
include_outflow: FALSE
use_forecasted_inflow: FALSE
forecast_inflow_model: NA
observed_inflow_filename: NA
inflow_variables: NA
use_ler_vars: FALSE
historical_inflow_model: NA
future_inflow_model: NA
historic_outflow_model: NA
future_outflow_model: NA
local_inflow_directory: NA
local_outflow_directory: NA
use_flows_s3: NA
met:
future_met_model: 'gefs-v12/stage2'
historical_met_model: 'gefs-v12/stage3'
historical_met_use_s3: TRUE
future_met_use_s3: TRUE
use_openmeteo: FALSE
local_met_directory: 'drivers'
use_forecasted_met: TRUE
# use_observed_met: FALSE
# local_directory: .na
# observed_met_filename: .na
use_ler_vars: FALSE
forecast_lag_days: 1
uncertainty:
observation: TRUE
process: TRUE
weather: TRUE
initial_condition: TRUE
parameter: TRUE
met_downscale: TRUE
inflow: TRUE
output_settings:
diagnostics_names: [extc]
generate_plots: TRUE
9 changes: 9 additions & 0 deletions configuration/glm_flare_v3/configure_run.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
restart_file: .na
start_datetime: 2024-08-01 00:00:00
end_datetime: .na
forecast_start_datetime: 2024-09-03 00:00:00
forecast_horizon: 34.0
sim_name: glm_flare_v3
configure_flare: configure_flare.yml
use_s3: FALSE

16 changes: 16 additions & 0 deletions configuration/glm_flare_v3/depth_model_sd.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
depth,temp
0,0.75
0.1,0.75
1,0.75
2,0.75
3,0.75
4,0.75
5,0.75
6,0.75
7,0.75
8,0.75
9,0.75
10,0.75
11,0.75
15,0.75
19,0.75
Loading

0 comments on commit d8cec41

Please sign in to comment.