From cfdc553dceb79e33713744380680cd29831a20e9 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 27 Oct 2024 00:20:21 +0000 Subject: [PATCH] update catalog --- .../Daily_Chlorophyll_a/collection.json | 30 +++--- .../Daily_Chlorophyll_a/models/USGSHABs1.json | 5 +- .../models/cb_prophet.json | 5 +- .../models/climatology.json | 5 +- .../models/persistenceRW.json | 5 +- .../models/procBlanchardMonod.json | 5 +- .../models/procCTMIMonod.json | 5 +- .../models/procEppleyNorbergMonod.json | 5 +- .../models/procEppleyNorbergSteele.json | 5 +- .../models/procHinshelwoodMonod.json | 5 +- .../models/procHinshelwoodSteele.json | 5 +- .../Daily_Chlorophyll_a/models/tg_arima.json | 5 +- .../Daily_Chlorophyll_a/models/tg_ets.json | 5 +- .../models/tg_humidity_lm.json | 5 +- .../models/tg_humidity_lm_all_sites.json | 5 +- .../Daily_Chlorophyll_a/models/tg_lasso.json | 5 +- .../models/tg_precip_lm.json | 5 +- .../models/tg_precip_lm_all_sites.json | 5 +- .../models/tg_randfor.json | 5 +- .../Daily_Chlorophyll_a/models/tg_tbats.json | 15 +-- .../models/tg_temp_lm.json | 5 +- .../models/tg_temp_lm_all_sites.json | 5 +- .../Daily_Dissolved_oxygen/collection.json | 36 ++++---- .../models/AquaticEcosystemsOxygen.json | 5 +- .../models/GLEON_lm_lag_1day.json | 5 +- .../models/air2waterSat_2.json | 5 +- .../models/cb_prophet.json | 5 +- .../models/climatology.json | 35 +++---- .../models/hotdeck.json | 15 +-- .../models/persistenceRW.json | 5 +- .../models/tg_arima.json | 5 +- .../Daily_Dissolved_oxygen/models/tg_ets.json | 5 +- .../models/tg_humidity_lm.json | 35 +++---- .../models/tg_humidity_lm_all_sites.json | 51 ++++++----- .../models/tg_lasso.json | 47 +++++----- .../models/tg_precip_lm.json | 5 +- .../models/tg_precip_lm_all_sites.json | 27 +++--- .../models/tg_randfor.json | 31 ++++--- .../models/tg_tbats.json | 51 ++++++----- .../models/tg_temp_lm.json | 5 +- .../models/tg_temp_lm_all_sites.json | 5 +- .../Daily_Water_temperature/collection.json | 48 +++++----- .../models/GAM_air_wind.json | 5 +- .../models/GLEON_JRabaey_temp_physics.json | 5 +- .../models/GLEON_lm_lag_1day.json | 5 +- .../models/GLEON_physics.json | 5 +- .../models/TSLM_seasonal_JM.json | 5 +- .../models/acp_fableLM.json | 5 +- .../models/air2waterSat_2.json | 5 +- .../models/baseline_ensemble.json | 5 +- .../models/bee_bake_RFModel_2024.json | 15 +-- .../models/cb_prophet.json | 5 +- .../models/climatology.json | 39 ++++---- .../models/fARIMA_clim_ensemble.json | 11 ++- .../models/fTSLM_lag.json | 5 +- .../models/flareGLM.json | 5 +- .../models/flareGLM_noDA.json | 5 +- .../models/flareGOTM_noDA.json | 5 +- .../models/flareSimstrat_noDA.json | 5 +- .../models/flare_ler.json | 5 +- .../models/flare_ler_baselines.json | 5 +- .../models/hotdeck.json | 11 ++- .../models/lm_AT_WTL_WS.json | 5 +- .../models/mkricheldorf_w_lag.json | 5 +- .../models/mlp1_wtempforecast_LF.json | 19 ++-- .../models/persistenceRW.json | 5 +- .../models/precip_mod.json | 5 +- .../models/tg_arima.json | 5 +- .../models/tg_ets.json | 5 +- .../models/tg_humidity_lm.json | 5 +- .../models/tg_humidity_lm_all_sites.json | 5 +- .../models/tg_lasso.json | 5 +- .../models/tg_precip_lm.json | 5 +- .../models/tg_precip_lm_all_sites.json | 5 +- .../models/tg_randfor.json | 5 +- .../models/tg_tbats.json | 5 +- .../models/tg_temp_lm.json | 5 +- .../models/tg_temp_lm_all_sites.json | 5 +- .../models/zimmerman_proj1.json | 5 +- .../models/tg_arima.json | 5 +- .../models/tg_ets.json | 5 +- .../models/tg_humidity_lm.json | 5 +- .../models/tg_humidity_lm_all_sites.json | 5 +- .../models/tg_lasso.json | 5 +- .../models/tg_precip_lm.json | 5 +- .../models/tg_precip_lm_all_sites.json | 5 +- .../models/tg_randfor.json | 5 +- .../models/tg_tbats.json | 5 +- .../models/tg_temp_lm.json | 5 +- .../models/tg_temp_lm_all_sites.json | 5 +- .../models/tg_arima.json | 5 +- .../models/tg_ets.json | 5 +- .../models/tg_humidity_lm.json | 5 +- .../models/tg_humidity_lm_all_sites.json | 5 +- .../models/tg_lasso.json | 5 +- .../models/tg_precip_lm.json | 5 +- .../models/tg_precip_lm_all_sites.json | 5 +- .../models/tg_randfor.json | 5 +- .../models/tg_tbats.json | 5 +- .../models/tg_temp_lm.json | 5 +- .../models/tg_temp_lm_all_sites.json | 5 +- .../collection.json | 10 +- .../models/ChlorophyllCrusaders.json | 5 +- .../models/PEG.json | 5 +- .../models/cb_prophet.json | 71 ++++++++------- .../models/climatology.json | 15 +-- .../models/persistenceRW.json | 79 ++++++++-------- .../models/tg_arima.json | 5 +- .../models/tg_ets.json | 83 ++++++++--------- .../models/tg_humidity_lm.json | 79 ++++++++-------- .../models/tg_humidity_lm_all_sites.json | 5 +- .../models/tg_lasso.json | 5 +- .../models/tg_precip_lm.json | 5 +- .../models/tg_precip_lm_all_sites.json | 5 +- .../models/tg_randfor.json | 5 +- .../models/tg_tbats.json | 63 ++++++------- .../models/tg_temp_lm.json | 75 +++++++-------- .../models/tg_temp_lm_all_sites.json | 5 +- .../collection.json | 28 +++--- .../models/PEG.json | 5 +- .../models/baseline_ensemble.json | 5 +- .../models/cb_prophet.json | 79 ++++++++-------- .../models/climatology.json | 15 +-- .../models/persistenceRW.json | 39 ++++---- .../models/tg_arima.json | 91 ++++++++++--------- .../models/tg_ets.json | 5 +- .../models/tg_humidity_lm.json | 5 +- .../models/tg_humidity_lm_all_sites.json | 5 +- .../models/tg_lasso.json | 39 ++++---- .../models/tg_precip_lm.json | 71 ++++++++------- .../models/tg_precip_lm_all_sites.json | 59 ++++++------ .../models/tg_randfor.json | 75 +++++++-------- .../models/tg_tbats.json | 27 +++--- .../models/tg_temp_lm.json | 5 +- .../models/tg_temp_lm_all_sites.json | 5 +- .../models/climatology.json | 5 +- .../models/climatology.json | 31 ++++--- .../models/USUNEEDAILY.json | 5 +- .../models/bookcast_forest.json | 5 +- .../models/cb_prophet.json | 5 +- .../models/climatology.json | 5 +- .../models/persistenceRW.json | 5 +- .../models/tg_arima.json | 5 +- .../models/tg_ets.json | 5 +- .../models/tg_humidity_lm.json | 5 +- .../models/tg_humidity_lm_all_sites.json | 5 +- .../models/tg_precip_lm.json | 5 +- .../models/tg_precip_lm_all_sites.json | 5 +- .../models/tg_randfor.json | 5 +- .../models/tg_tbats.json | 5 +- .../models/tg_temp_lm.json | 5 +- .../models/tg_temp_lm_all_sites.json | 5 +- .../Daily_latent_heat_flux/collection.json | 20 ++-- .../models/cb_prophet.json | 5 +- .../models/climatology.json | 5 +- .../models/tg_arima.json | 71 ++++++++------- .../Daily_latent_heat_flux/models/tg_ets.json | 5 +- .../models/tg_humidity_lm.json | 19 ++-- .../models/tg_humidity_lm_all_sites.json | 5 +- .../models/tg_precip_lm.json | 71 ++++++++------- .../models/tg_precip_lm_all_sites.json | 5 +- .../models/tg_randfor.json | 5 +- .../models/tg_tbats.json | 5 +- .../models/tg_temp_lm.json | 5 +- .../models/tg_temp_lm_all_sites.json | 5 +- .../models/tg_arima.json | 5 +- .../models/tg_ets.json | 5 +- .../models/tg_humidity_lm.json | 5 +- .../models/tg_humidity_lm_all_sites.json | 5 +- .../models/tg_lasso.json | 5 +- .../models/tg_precip_lm.json | 5 +- .../models/tg_precip_lm_all_sites.json | 5 +- .../models/tg_randfor.json | 5 +- .../models/tg_tbats.json | 5 +- .../models/tg_temp_lm.json | 5 +- .../models/tg_temp_lm_all_sites.json | 5 +- 176 files changed, 1288 insertions(+), 1118 deletions(-) diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/collection.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/collection.json index cd0b2681b1..3302c7e036 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/collection.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/collection.json @@ -8,21 +8,6 @@ ], "type": "Collection", "links": [ - { - "rel": "item", - "type": "application/json", - "href": "./models/tg_tbats.json" - }, - { - "rel": "item", - "type": "application/json", - "href": "./models/tg_temp_lm.json" - }, - { - "rel": "item", - "type": "application/json", - "href": "./models/tg_temp_lm_all_sites.json" - }, { "rel": "item", "type": "application/json", @@ -113,6 +98,21 @@ "type": "application/json", "href": "./models/tg_randfor.json" }, + { + "rel": "item", + "type": "application/json", + "href": "./models/tg_tbats.json" + }, + { + "rel": "item", + "type": "application/json", + "href": "./models/tg_temp_lm.json" + }, + { + "rel": "item", + "type": "application/json", + "href": "./models/tg_temp_lm_all_sites.json" + }, { "rel": "parent", "type": "application/json", diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/USGSHABs1.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/USGSHABs1.json index a539300b63..915603c092 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/USGSHABs1.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/USGSHABs1.json @@ -17,7 +17,8 @@ "properties": { "title": "USGSHABs1", "description": "All summaries for the Daily_Chlorophyll_a variable for the USGSHABs1 model. Information for the model is provided as follows: NA.\n The model predicts this variable at the following sites: BLWA, TOMB, FLNT.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-05", + "updated": "2024-07-02", "start_datetime": "2023-11-12T00:00:00Z", "end_datetime": "2024-03-09T00:00:00Z", "providers": [ @@ -197,7 +198,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=USGSHABs1?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=USGSHABs1?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=USGSHABs1?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=USGSHABs1\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/cb_prophet.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/cb_prophet.json index fe819a5358..348e019158 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/cb_prophet.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/cb_prophet.json @@ -23,7 +23,8 @@ "properties": { "title": "cb_prophet", "description": "All summaries for the Daily_Chlorophyll_a variable for the cb_prophet model. Information for the model is provided as follows: The Prophet model is an empirical model, specifically a non-linear regression model that includes\nseasonality effects (Taylor & Letham, 2018). The model relies on Bayesian estimation with an additive\nwhite noise error term.\n The model predicts this variable at the following sites: BARC, BLWA, CRAM, FLNT, LIRO, PRLA, PRPO, SUGG, TOMB.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-06", + "updated": "2024-02-07", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-10T00:00:00Z", "providers": [ @@ -209,7 +210,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=cb_prophet\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/climatology.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/climatology.json index 6255962930..bdf7a7c2a9 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/climatology.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/climatology.json @@ -24,7 +24,8 @@ "properties": { "title": "climatology", "description": "All summaries for the Daily_Chlorophyll_a variable for the climatology model. Information for the model is provided as follows: Historical DOY mean and sd. Assumes normal distribution.\n The model predicts this variable at the following sites: BARC, BLWA, FLNT, SUGG, TOMB, CRAM, LIRO, PRPO, PRLA, TOOK, USGS-01427510, USGS-01463500, USGS-05543010, USGS-05553700, USGS-05558300, USGS-05586300, USGS-14181500, USGS-14211010, USGS-14211720.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2023-01-02T00:00:00Z", "end_datetime": "2024-09-26T00:00:00Z", "providers": [ @@ -220,7 +221,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=climatology?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=climatology\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/persistenceRW.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/persistenceRW.json index 26337af7df..03952da4b0 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/persistenceRW.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/persistenceRW.json @@ -24,7 +24,8 @@ "properties": { "title": "persistenceRW", "description": "All summaries for the Daily_Chlorophyll_a variable for the persistenceRW model. Information for the model is provided as follows: Random walk from the fable package with ensembles used to represent uncertainty.\n The model predicts this variable at the following sites: LIRO, PRLA, PRPO, SUGG, TOMB, TOOK, BARC, BLWA, CRAM, FLNT.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2023-11-15T00:00:00Z", "end_datetime": "2024-09-25T00:00:00Z", "providers": [ @@ -211,7 +212,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=persistenceRW\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procBlanchardMonod.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procBlanchardMonod.json index 1c22d0e446..e8f867a7e5 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procBlanchardMonod.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procBlanchardMonod.json @@ -21,7 +21,8 @@ "properties": { "title": "procBlanchardMonod", "description": "All summaries for the Daily_Chlorophyll_a variable for the procBlanchardMonod model. Information for the model is provided as follows: NA.\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-05", + "updated": "2024-02-07", "start_datetime": "2023-11-13T00:00:00Z", "end_datetime": "2024-03-06T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procBlanchardMonod?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procBlanchardMonod?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procBlanchardMonod?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=procBlanchardMonod\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procCTMIMonod.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procCTMIMonod.json index 30c87d6477..d887a099de 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procCTMIMonod.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procCTMIMonod.json @@ -21,7 +21,8 @@ "properties": { "title": "procCTMIMonod", "description": "All summaries for the Daily_Chlorophyll_a variable for the procCTMIMonod model. Information for the model is provided as follows: NA.\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-05", + "updated": "2024-02-07", "start_datetime": "2023-11-13T00:00:00Z", "end_datetime": "2024-03-06T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procCTMIMonod?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procCTMIMonod?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procCTMIMonod?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=procCTMIMonod\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procEppleyNorbergMonod.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procEppleyNorbergMonod.json index 34ca11a804..a7e05e68e0 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procEppleyNorbergMonod.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procEppleyNorbergMonod.json @@ -21,7 +21,8 @@ "properties": { "title": "procEppleyNorbergMonod", "description": "All summaries for the Daily_Chlorophyll_a variable for the procEppleyNorbergMonod model. Information for the model is provided as follows: NA.\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-05", + "updated": "2024-02-07", "start_datetime": "2023-11-13T00:00:00Z", "end_datetime": "2024-03-06T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procEppleyNorbergMonod?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procEppleyNorbergMonod?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procEppleyNorbergMonod?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=procEppleyNorbergMonod\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procEppleyNorbergSteele.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procEppleyNorbergSteele.json index 97099451cf..bb835f0848 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procEppleyNorbergSteele.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procEppleyNorbergSteele.json @@ -21,7 +21,8 @@ "properties": { "title": "procEppleyNorbergSteele", "description": "All summaries for the Daily_Chlorophyll_a variable for the procEppleyNorbergSteele model. Information for the model is provided as follows: NA.\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-05", + "updated": "2024-02-07", "start_datetime": "2023-11-13T00:00:00Z", "end_datetime": "2024-03-06T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procEppleyNorbergSteele?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procEppleyNorbergSteele?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procEppleyNorbergSteele?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=procEppleyNorbergSteele\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procHinshelwoodMonod.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procHinshelwoodMonod.json index 2d440bbe72..5324ffafa2 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procHinshelwoodMonod.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procHinshelwoodMonod.json @@ -21,7 +21,8 @@ "properties": { "title": "procHinshelwoodMonod", "description": "All summaries for the Daily_Chlorophyll_a variable for the procHinshelwoodMonod model. Information for the model is provided as follows: NA.\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-05", + "updated": "2024-02-07", "start_datetime": "2023-11-13T00:00:00Z", "end_datetime": "2024-03-06T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procHinshelwoodMonod?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procHinshelwoodMonod?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procHinshelwoodMonod?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=procHinshelwoodMonod\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procHinshelwoodSteele.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procHinshelwoodSteele.json index 880aab675f..ef6d99b712 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procHinshelwoodSteele.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/procHinshelwoodSteele.json @@ -21,7 +21,8 @@ "properties": { "title": "procHinshelwoodSteele", "description": "All summaries for the Daily_Chlorophyll_a variable for the procHinshelwoodSteele model. Information for the model is provided as follows: NA.\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-05", + "updated": "2024-02-07", "start_datetime": "2023-11-13T00:00:00Z", "end_datetime": "2024-03-06T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procHinshelwoodSteele?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procHinshelwoodSteele?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=procHinshelwoodSteele?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=procHinshelwoodSteele\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_arima.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_arima.json index 984fed5222..979210ea73 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_arima.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_arima.json @@ -24,7 +24,8 @@ "properties": { "title": "tg_arima", "description": "All summaries for the Daily_Chlorophyll_a variable for the tg_arima model. Information for the model is provided as follows: The tg_arima model is an AutoRegressive Integrated Moving Average (ARIMA) model fit using\nthe function auto.arima() from the forecast package in R (Hyndman et al. 2023; Hyndman et al., 2008).\nThis is an empirical time series model with no covariates.\n The model predicts this variable at the following sites: BARC, BLWA, CRAM, FLNT, LIRO, PRLA, PRPO, SUGG, TOMB, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-07-19", + "updated": "2024-08-23", "start_datetime": "2023-01-01T00:00:00Z", "end_datetime": "2024-08-18T00:00:00Z", "providers": [ @@ -211,7 +212,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_arima\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_ets.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_ets.json index e722c22bb6..6d9a3ca096 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_ets.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_ets.json @@ -24,7 +24,8 @@ "properties": { "title": "tg_ets", "description": "All summaries for the Daily_Chlorophyll_a variable for the tg_ets model. Information for the model is provided as follows: The tg_ets model is an Error, Trend, Seasonal (ETS) model fit using the function ets() from the\nforecast package in R (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series\nmodel with no covariates..\n The model predicts this variable at the following sites: BARC, BLWA, CRAM, FLNT, LIRO, PRLA, PRPO, SUGG, TOMB, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-07-19", + "updated": "2024-08-23", "start_datetime": "2023-01-01T00:00:00Z", "end_datetime": "2024-08-18T00:00:00Z", "providers": [ @@ -211,7 +212,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_ets\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_humidity_lm.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_humidity_lm.json index 22e58d4578..9d8fe295ef 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_humidity_lm.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_humidity_lm.json @@ -24,7 +24,8 @@ "properties": { "title": "tg_humidity_lm", "description": "All summaries for the Daily_Chlorophyll_a variable for the tg_humidity_lm model. Information for the model is provided as follows: The tg_humidity_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity.\n The model predicts this variable at the following sites: BARC, BLWA, CRAM, FLNT, LIRO, PRLA, PRPO, SUGG, TOMB, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -211,7 +212,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_humidity_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_humidity_lm_all_sites.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_humidity_lm_all_sites.json index aac7f5f194..5ea2444e65 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_humidity_lm_all_sites.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_humidity_lm_all_sites.json @@ -24,7 +24,8 @@ "properties": { "title": "tg_humidity_lm_all_sites", "description": "All summaries for the Daily_Chlorophyll_a variable for the tg_humidity_lm_all_sites model. Information for the model is provided as follows: The tg_humidity_lm_all_sites model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity. This model was used to forecast water temperature and dissolved oxygen concentration at the\nseven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: BARC, BLWA, CRAM, FLNT, LIRO, PRLA, PRPO, SUGG, TOMB, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -211,7 +212,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_humidity_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_lasso.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_lasso.json index ce4b6da8cd..fc8074d46d 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_lasso.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_lasso.json @@ -24,7 +24,8 @@ "properties": { "title": "tg_lasso", "description": "All summaries for the Daily_Chlorophyll_a variable for the tg_lasso model. Information for the model is provided as follows: Lasso is a machine learning model implemented in the same workflow as tg_randfor, but with\ndifferent hyperparameter tuning. The model drivers are unlagged air temperature, air pressure, relative\nhumidity, surface downwelling longwave and shortwave radiation, precipitation, and northward and\neastward wind. Lasso regressions were fitted with the function glmnet() in\nthe package glmnet (Tay et al. 2023), where the regularization hyperparameter (lambda) is tuned and\nselected with 10-fold cross validation..\n The model predicts this variable at the following sites: BARC, BLWA, CRAM, FLNT, LIRO, PRLA, PRPO, SUGG, TOMB, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -211,7 +212,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_lasso\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_precip_lm.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_precip_lm.json index 08072705dd..ad3cd925e8 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_precip_lm.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_precip_lm.json @@ -24,7 +24,8 @@ "properties": { "title": "tg_precip_lm", "description": "All summaries for the Daily_Chlorophyll_a variable for the tg_precip_lm model. Information for the model is provided as follows: The tg_precip_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only total precipitation used as a model covariate..\n The model predicts this variable at the following sites: BARC, BLWA, CRAM, FLNT, LIRO, PRLA, PRPO, SUGG, TOMB, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -211,7 +212,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_precip_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_precip_lm_all_sites.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_precip_lm_all_sites.json index aeec490641..5c8d4e82f9 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_precip_lm_all_sites.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_precip_lm_all_sites.json @@ -24,7 +24,8 @@ "properties": { "title": "tg_precip_lm_all_sites", "description": "All summaries for the Daily_Chlorophyll_a variable for the tg_precip_lm_all_sites model. Information for the model is provided as follows: The tg_precip_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation. y. This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together..\n The model predicts this variable at the following sites: BARC, BLWA, CRAM, FLNT, LIRO, PRLA, PRPO, SUGG, TOMB, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-16", + "updated": "2024-08-23", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-09-19T00:00:00Z", "providers": [ @@ -211,7 +212,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_precip_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_randfor.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_randfor.json index 4b8f6e1e39..d08444c664 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_randfor.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_randfor.json @@ -24,7 +24,8 @@ "properties": { "title": "tg_randfor", "description": "All summaries for the Daily_Chlorophyll_a variable for the tg_randfor model. Information for the model is provided as follows: Random Forest is a machine learning model that is fitted with the ranger() function in the ranger\nR package (Wright & Ziegler 2017) within the tidymodels framework (Kuhn & Wickham 2020). The\nmodel drivers are unlagged air temperature, air pressure, relative humidity, surface downwelling\nlongwave and shortwave radiation, precipitation, and northward and eastward wind.\n The model predicts this variable at the following sites: BARC, BLWA, CRAM, FLNT, LIRO, PRLA, PRPO, SUGG, TOMB, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-16", + "updated": "2024-08-23", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-09-18T00:00:00Z", "providers": [ @@ -211,7 +212,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_randfor\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_tbats.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_tbats.json index 7add233463..30fef4c75e 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_tbats.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_tbats.json @@ -9,6 +9,7 @@ "geometry": { "type": "MultiPoint", "coordinates": [ + [-82.0084, 29.676], [-87.7982, 32.5415], [-89.4737, 46.2097], [-84.4374, 31.1854], @@ -17,14 +18,14 @@ [-99.2531, 47.1298], [-82.0177, 29.6878], [-88.1589, 31.8534], - [-149.6106, 68.6307], - [-82.0084, 29.676] + [-149.6106, 68.6307] ] }, "properties": { "title": "tg_tbats", - "description": "All summaries for the Daily_Chlorophyll_a variable for the tg_tbats model. Information for the model is provided as follows: The tg_tbats model is a TBATS (Trigonometric seasonality, Box-Cox transformation, ARMA\nerrors, Trend and Seasonal components) model fit using the function tbats() from the forecast package in\nR (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series model with no\ncovariates..\n The model predicts this variable at the following sites: BLWA, CRAM, FLNT, LIRO, PRLA, PRPO, SUGG, TOMB, TOOK, BARC.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "description": "All summaries for the Daily_Chlorophyll_a variable for the tg_tbats model. Information for the model is provided as follows: The tg_tbats model is a TBATS (Trigonometric seasonality, Box-Cox transformation, ARMA\nerrors, Trend and Seasonal components) model fit using the function tbats() from the forecast package in\nR (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series model with no\ncovariates..\n The model predicts this variable at the following sites: BARC, BLWA, CRAM, FLNT, LIRO, PRLA, PRPO, SUGG, TOMB, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-07-19", + "updated": "2024-08-23", "start_datetime": "2023-01-01T00:00:00Z", "end_datetime": "2024-08-18T00:00:00Z", "providers": [ @@ -55,6 +56,7 @@ "chla", "Daily", "P1D", + "BARC", "BLWA", "CRAM", "FLNT", @@ -63,8 +65,7 @@ "PRPO", "SUGG", "TOMB", - "TOOK", - "BARC" + "TOOK" ], "table:columns": [ { @@ -211,7 +212,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_tbats\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_temp_lm.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_temp_lm.json index c65be41545..9fee336300 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_temp_lm.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_temp_lm.json @@ -24,7 +24,8 @@ "properties": { "title": "tg_temp_lm", "description": "All summaries for the Daily_Chlorophyll_a variable for the tg_temp_lm model. Information for the model is provided as follows: The tg_temp_lm model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation..\n The model predicts this variable at the following sites: BARC, BLWA, CRAM, FLNT, LIRO, PRLA, PRPO, SUGG, TOMB, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -211,7 +212,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_temp_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_temp_lm_all_sites.json b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_temp_lm_all_sites.json index 183aab2333..05a2a10d07 100644 --- a/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_temp_lm_all_sites.json +++ b/catalog/summaries/Aquatics/Daily_Chlorophyll_a/models/tg_temp_lm_all_sites.json @@ -24,7 +24,8 @@ "properties": { "title": "tg_temp_lm_all_sites", "description": "All summaries for the Daily_Chlorophyll_a variable for the tg_temp_lm_all_sites model. Information for the model is provided as follows: The tg_temp_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation.This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: BARC, BLWA, CRAM, FLNT, LIRO, PRLA, PRPO, SUGG, TOMB, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -211,7 +212,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Chlorophyll_a", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=chla/model_id=tg_temp_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/collection.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/collection.json index 1583085f45..5333ac6229 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/collection.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/collection.json @@ -11,92 +11,92 @@ { "rel": "item", "type": "application/json", - "href": "./models/GLEON_lm_lag_1day.json" + "href": "./models/tg_ets.json" }, { "rel": "item", "type": "application/json", - "href": "./models/air2waterSat_2.json" + "href": "./models/tg_humidity_lm.json" }, { "rel": "item", "type": "application/json", - "href": "./models/persistenceRW.json" + "href": "./models/tg_humidity_lm_all_sites.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_arima.json" + "href": "./models/tg_lasso.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_ets.json" + "href": "./models/tg_precip_lm.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_tbats.json" + "href": "./models/tg_precip_lm_all_sites.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_temp_lm.json" + "href": "./models/tg_randfor.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_temp_lm_all_sites.json" + "href": "./models/tg_tbats.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_lasso.json" + "href": "./models/GLEON_lm_lag_1day.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_precip_lm.json" + "href": "./models/air2waterSat_2.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_precip_lm_all_sites.json" + "href": "./models/cb_prophet.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_randfor.json" + "href": "./models/climatology.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_humidity_lm_all_sites.json" + "href": "./models/persistenceRW.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_humidity_lm.json" + "href": "./models/tg_arima.json" }, { "rel": "item", "type": "application/json", - "href": "./models/climatology.json" + "href": "./models/tg_temp_lm.json" }, { "rel": "item", "type": "application/json", - "href": "./models/cb_prophet.json" + "href": "./models/tg_temp_lm_all_sites.json" }, { "rel": "item", "type": "application/json", - "href": "./models/hotdeck.json" + "href": "./models/AquaticEcosystemsOxygen.json" }, { "rel": "item", "type": "application/json", - "href": "./models/AquaticEcosystemsOxygen.json" + "href": "./models/hotdeck.json" }, { "rel": "parent", diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/AquaticEcosystemsOxygen.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/AquaticEcosystemsOxygen.json index 3fba5385f6..e921f71d8a 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/AquaticEcosystemsOxygen.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/AquaticEcosystemsOxygen.json @@ -17,7 +17,8 @@ "properties": { "title": "AquaticEcosystemsOxygen", "description": "All summaries for the Daily_Dissolved_oxygen variable for the AquaticEcosystemsOxygen model. Information for the model is provided as follows: Used a Bayesian Dynamic Linear Model using the fit_dlm function from the ecoforecastR package.\n The model predicts this variable at the following sites: BARC, WLOU, ARIK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-07-07", + "updated": "2024-08-23", "start_datetime": "2024-04-03T00:00:00Z", "end_datetime": "2024-08-04T00:00:00Z", "providers": [ @@ -197,7 +198,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=AquaticEcosystemsOxygen?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=AquaticEcosystemsOxygen?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=AquaticEcosystemsOxygen?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=AquaticEcosystemsOxygen\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/GLEON_lm_lag_1day.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/GLEON_lm_lag_1day.json index 144f5899f6..7e19bff12f 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/GLEON_lm_lag_1day.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/GLEON_lm_lag_1day.json @@ -21,7 +21,8 @@ "properties": { "title": "GLEON_lm_lag_1day", "description": "All summaries for the Daily_Dissolved_oxygen variable for the GLEON_lm_lag_1day model. Information for the model is provided as follows: NA.\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2023-12-29", + "updated": "2024-01-01", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-02-02T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=GLEON_lm_lag_1day?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=GLEON_lm_lag_1day?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=GLEON_lm_lag_1day?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=GLEON_lm_lag_1day\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/air2waterSat_2.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/air2waterSat_2.json index dea3bbfc06..801fcd001b 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/air2waterSat_2.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/air2waterSat_2.json @@ -48,7 +48,8 @@ "properties": { "title": "air2waterSat_2", "description": "All summaries for the Daily_Dissolved_oxygen variable for the air2waterSat_2 model. Information for the model is provided as follows: The air2water model is a linear model fit using the function lm() in R and uses air temperature as\na covariate.\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, TOMB, TOOK, WALK, WLOU, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-01", + "updated": "2024-02-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=air2waterSat_2?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=air2waterSat_2?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=air2waterSat_2?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=air2waterSat_2\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/cb_prophet.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/cb_prophet.json index d6c5ec33db..3b062397f4 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/cb_prophet.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/cb_prophet.json @@ -46,7 +46,8 @@ "properties": { "title": "cb_prophet", "description": "All summaries for the Daily_Dissolved_oxygen variable for the cb_prophet model. Information for the model is provided as follows: The Prophet model is an empirical model, specifically a non-linear regression model that includes\nseasonality effects (Taylor & Letham, 2018). The model relies on Bayesian estimation with an additive\nwhite noise error term.\n The model predicts this variable at the following sites: BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, WALK, WLOU, ARIK, BARC, BIGC, BLDE.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-06", + "updated": "2024-02-07", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-10T00:00:00Z", "providers": [ @@ -255,7 +256,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=cb_prophet\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/climatology.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/climatology.json index ab178ecd4b..84d9134b54 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/climatology.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/climatology.json @@ -9,8 +9,11 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-87.7982, 32.5415], - [-147.504, 65.1532], + [-102.4471, 39.7582], + [-82.0084, 29.676], + [-119.2575, 37.0597], + [-110.5871, 44.9501], + [-96.6242, 34.4442], [-105.5442, 40.035], [-66.9868, 18.1135], [-84.4374, 31.1854], @@ -31,13 +34,10 @@ [-119.0274, 36.9559], [-84.2793, 35.9574], [-105.9154, 39.8914], - [-102.4471, 39.7582], - [-82.0084, 29.676], - [-119.2575, 37.0597], - [-110.5871, 44.9501], - [-96.6242, 34.4442], [-88.1589, 31.8534], + [-87.7982, 32.5415], [-89.4737, 46.2097], + [-147.504, 65.1532], [-89.7048, 45.9983], [-99.2531, 47.1298], [-99.1139, 47.1591], @@ -47,8 +47,9 @@ }, "properties": { "title": "climatology", - "description": "All summaries for the Daily_Dissolved_oxygen variable for the climatology model. Information for the model is provided as follows: Historical DOY mean and sd. Assumes normal distribution.\n The model predicts this variable at the following sites: BLWA, CARI, COMO, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, MART, MAYF, MCDI, MCRA, POSE, PRIN, REDB, SUGG, SYCA, TECR, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, TOMB, CRAM, LIRO, PRPO, PRLA, TOOK, OKSR.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "description": "All summaries for the Daily_Dissolved_oxygen variable for the climatology model. Information for the model is provided as follows: Historical DOY mean and sd. Assumes normal distribution.\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, COMO, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, MART, MAYF, MCDI, MCRA, POSE, PRIN, REDB, SUGG, SYCA, TECR, WALK, WLOU, TOMB, BLWA, CRAM, CARI, LIRO, PRPO, PRLA, TOOK, OKSR.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2023-01-02T00:00:00Z", "end_datetime": "2024-09-26T00:00:00Z", "providers": [ @@ -79,8 +80,11 @@ "oxygen", "Daily", "P1D", - "BLWA", - "CARI", + "ARIK", + "BARC", + "BIGC", + "BLDE", + "BLUE", "COMO", "CUPE", "FLNT", @@ -101,13 +105,10 @@ "TECR", "WALK", "WLOU", - "ARIK", - "BARC", - "BIGC", - "BLDE", - "BLUE", "TOMB", + "BLWA", "CRAM", + "CARI", "LIRO", "PRPO", "PRLA", @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=climatology?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=climatology\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/hotdeck.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/hotdeck.json index d36daefcdc..6a772f579c 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/hotdeck.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/hotdeck.json @@ -11,14 +11,14 @@ "coordinates": [ [-82.0084, 29.676], [-82.0177, 29.6878], + [-89.4737, 46.2097], + [-89.7048, 45.9983], [-96.6038, 39.1051], [-111.5081, 33.751], [-110.5871, 44.9501], [-119.2575, 37.0597], [-122.1655, 44.2596], [-111.7979, 40.7839], - [-89.4737, 46.2097], - [-89.7048, 45.9983], [-97.7823, 33.3785], [-78.1473, 38.8943], [-87.4077, 32.9604], @@ -27,8 +27,9 @@ }, "properties": { "title": "hotdeck", - "description": "All summaries for the Daily_Dissolved_oxygen variable for the hotdeck model. Information for the model is provided as follows: Uses a hot deck approach: - Take the latest observation/forecast. - Past observations from around the same window of the season are collected. - Values close to the latest observation/forecast are collected. - One of these is randomly sampled. - Its \"tomorrow\" observation is used as the forecast. - Repeat until forecast at step h..\n The model predicts this variable at the following sites: BARC, SUGG, KING, SYCA, BLDE, BIGC, MCRA, REDB, CRAM, LIRO, PRIN, POSE, MAYF, LEWI.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "description": "All summaries for the Daily_Dissolved_oxygen variable for the hotdeck model. Information for the model is provided as follows: Uses a hot deck approach: - Take the latest observation/forecast. - Past observations from around the same window of the season are collected. - Values close to the latest observation/forecast are collected. - One of these is randomly sampled. - Its \"tomorrow\" observation is used as the forecast. - Repeat until forecast at step h..\n The model predicts this variable at the following sites: BARC, SUGG, CRAM, LIRO, KING, SYCA, BLDE, BIGC, MCRA, REDB, PRIN, POSE, MAYF, LEWI.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2024-04-05T00:00:00Z", "end_datetime": "2024-09-21T00:00:00Z", "providers": [ @@ -61,14 +62,14 @@ "P1D", "BARC", "SUGG", + "CRAM", + "LIRO", "KING", "SYCA", "BLDE", "BIGC", "MCRA", "REDB", - "CRAM", - "LIRO", "PRIN", "POSE", "MAYF", @@ -219,7 +220,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=hotdeck?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=hotdeck?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=hotdeck?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=hotdeck\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/persistenceRW.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/persistenceRW.json index 4e9ff1d534..bcf8507c71 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/persistenceRW.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/persistenceRW.json @@ -48,7 +48,8 @@ "properties": { "title": "persistenceRW", "description": "All summaries for the Daily_Dissolved_oxygen variable for the persistenceRW model. Information for the model is provided as follows: Random walk from the fable package with ensembles used to represent uncertainty.\n The model predicts this variable at the following sites: CARI, COMO, CRAM, CUPE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, MAYF, MCDI, MCRA, OKSR, POSE, BLUE, BLWA, WLOU, ARIK, BARC, BIGC, BLDE, TECR, TOMB, TOOK, WALK, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2023-11-15T00:00:00Z", "end_datetime": "2024-09-25T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=persistenceRW\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_arima.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_arima.json index 4ecbd2cead..662f727e58 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_arima.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_arima.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_arima", "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_arima model. Information for the model is provided as follows: The tg_arima model is an AutoRegressive Integrated Moving Average (ARIMA) model fit using\nthe function auto.arima() from the forecast package in R (Hyndman et al. 2023; Hyndman et al., 2008).\nThis is an empirical time series model with no covariates.\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-07-19", + "updated": "2024-08-23", "start_datetime": "2023-01-01T00:00:00Z", "end_datetime": "2024-08-18T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_arima\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_ets.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_ets.json index 09947adecc..2d233e048a 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_ets.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_ets.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_ets", "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_ets model. Information for the model is provided as follows: The tg_ets model is an Error, Trend, Seasonal (ETS) model fit using the function ets() from the\nforecast package in R (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series\nmodel with no covariates..\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-07-19", + "updated": "2024-08-23", "start_datetime": "2023-01-01T00:00:00Z", "end_datetime": "2024-08-18T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_ets\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_humidity_lm.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_humidity_lm.json index 9697bae93e..e93cabd012 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_humidity_lm.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_humidity_lm.json @@ -9,6 +9,12 @@ "geometry": { "type": "MultiPoint", "coordinates": [ + [-111.5081, 33.751], + [-119.0274, 36.9559], + [-88.1589, 31.8534], + [-149.6106, 68.6307], + [-84.2793, 35.9574], + [-105.9154, 39.8914], [-102.4471, 39.7582], [-82.0084, 29.676], [-119.2575, 37.0597], @@ -36,19 +42,14 @@ [-99.1139, 47.1591], [-99.2531, 47.1298], [-111.7979, 40.7839], - [-82.0177, 29.6878], - [-111.5081, 33.751], - [-119.0274, 36.9559], - [-88.1589, 31.8534], - [-149.6106, 68.6307], - [-84.2793, 35.9574], - [-105.9154, 39.8914] + [-82.0177, 29.6878] ] }, "properties": { "title": "tg_humidity_lm", - "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_humidity_lm model. Information for the model is provided as follows: The tg_humidity_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity.\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_humidity_lm model. Information for the model is provided as follows: The tg_humidity_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity.\n The model predicts this variable at the following sites: SYCA, TECR, TOMB, TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-02-03", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -79,6 +80,12 @@ "oxygen", "Daily", "P1D", + "SYCA", + "TECR", + "TOMB", + "TOOK", + "WALK", + "WLOU", "ARIK", "BARC", "BIGC", @@ -106,13 +113,7 @@ "PRLA", "PRPO", "REDB", - "SUGG", - "SYCA", - "TECR", - "TOMB", - "TOOK", - "WALK", - "WLOU" + "SUGG" ], "table:columns": [ { @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_humidity_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_humidity_lm_all_sites.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_humidity_lm_all_sites.json index 6380324ffe..4283f31c23 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_humidity_lm_all_sites.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_humidity_lm_all_sites.json @@ -9,16 +9,6 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-122.1655, 44.2596], - [-149.143, 68.6698], - [-78.1473, 38.8943], - [-97.7823, 33.3785], - [-99.1139, 47.1591], - [-99.2531, 47.1298], - [-111.7979, 40.7839], - [-82.0177, 29.6878], - [-111.5081, 33.751], - [-119.0274, 36.9559], [-88.1589, 31.8534], [-149.6106, 68.6307], [-84.2793, 35.9574], @@ -42,13 +32,24 @@ [-89.7048, 45.9983], [-121.9338, 45.7908], [-87.4077, 32.9604], - [-96.443, 38.9459] + [-96.443, 38.9459], + [-122.1655, 44.2596], + [-149.143, 68.6698], + [-78.1473, 38.8943], + [-97.7823, 33.3785], + [-99.1139, 47.1591], + [-99.2531, 47.1298], + [-111.7979, 40.7839], + [-82.0177, 29.6878], + [-111.5081, 33.751], + [-119.0274, 36.9559] ] }, "properties": { "title": "tg_humidity_lm_all_sites", - "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_humidity_lm_all_sites model. Information for the model is provided as follows: The tg_humidity_lm_all_sites model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity. This model was used to forecast water temperature and dissolved oxygen concentration at the\nseven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_humidity_lm_all_sites model. Information for the model is provided as follows: The tg_humidity_lm_all_sites model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity. This model was used to forecast water temperature and dissolved oxygen concentration at the\nseven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: TOMB, TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-01-31", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -79,16 +80,6 @@ "oxygen", "Daily", "P1D", - "MCRA", - "OKSR", - "POSE", - "PRIN", - "PRLA", - "PRPO", - "REDB", - "SUGG", - "SYCA", - "TECR", "TOMB", "TOOK", "WALK", @@ -112,7 +103,17 @@ "LIRO", "MART", "MAYF", - "MCDI" + "MCDI", + "MCRA", + "OKSR", + "POSE", + "PRIN", + "PRLA", + "PRPO", + "REDB", + "SUGG", + "SYCA", + "TECR" ], "table:columns": [ { @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_humidity_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_lasso.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_lasso.json index 9e0480e4bd..9a65265e61 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_lasso.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_lasso.json @@ -9,15 +9,6 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-122.1655, 44.2596], - [-149.143, 68.6698], - [-78.1473, 38.8943], - [-97.7823, 33.3785], - [-99.1139, 47.1591], - [-99.2531, 47.1298], - [-111.7979, 40.7839], - [-82.0177, 29.6878], - [-111.5081, 33.751], [-119.0274, 36.9559], [-88.1589, 31.8534], [-149.6106, 68.6307], @@ -42,13 +33,23 @@ [-89.7048, 45.9983], [-121.9338, 45.7908], [-87.4077, 32.9604], - [-96.443, 38.9459] + [-96.443, 38.9459], + [-122.1655, 44.2596], + [-149.143, 68.6698], + [-78.1473, 38.8943], + [-97.7823, 33.3785], + [-99.1139, 47.1591], + [-99.2531, 47.1298], + [-111.7979, 40.7839], + [-82.0177, 29.6878], + [-111.5081, 33.751] ] }, "properties": { "title": "tg_lasso", - "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_lasso model. Information for the model is provided as follows: Lasso is a machine learning model implemented in the same workflow as tg_randfor, but with\ndifferent hyperparameter tuning. The model drivers are unlagged air temperature, air pressure, relative\nhumidity, surface downwelling longwave and shortwave radiation, precipitation, and northward and\neastward wind. Lasso regressions were fitted with the function glmnet() in\nthe package glmnet (Tay et al. 2023), where the regularization hyperparameter (lambda) is tuned and\nselected with 10-fold cross validation..\n The model predicts this variable at the following sites: MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_lasso model. Information for the model is provided as follows: Lasso is a machine learning model implemented in the same workflow as tg_randfor, but with\ndifferent hyperparameter tuning. The model drivers are unlagged air temperature, air pressure, relative\nhumidity, surface downwelling longwave and shortwave radiation, precipitation, and northward and\neastward wind. Lasso regressions were fitted with the function glmnet() in\nthe package glmnet (Tay et al. 2023), where the regularization hyperparameter (lambda) is tuned and\nselected with 10-fold cross validation..\n The model predicts this variable at the following sites: TECR, TOMB, TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-01-31", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-04T00:00:00Z", "providers": [ @@ -79,15 +80,6 @@ "oxygen", "Daily", "P1D", - "MCRA", - "OKSR", - "POSE", - "PRIN", - "PRLA", - "PRPO", - "REDB", - "SUGG", - "SYCA", "TECR", "TOMB", "TOOK", @@ -112,7 +104,16 @@ "LIRO", "MART", "MAYF", - "MCDI" + "MCDI", + "MCRA", + "OKSR", + "POSE", + "PRIN", + "PRLA", + "PRPO", + "REDB", + "SUGG", + "SYCA" ], "table:columns": [ { @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_lasso\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_precip_lm.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_precip_lm.json index ff6a74c50a..c1bcfe9802 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_precip_lm.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_precip_lm.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_precip_lm", "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_precip_lm model. Information for the model is provided as follows: The tg_precip_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only total precipitation used as a model covariate..\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_precip_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_precip_lm_all_sites.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_precip_lm_all_sites.json index 07f1656a08..d219c2d8e9 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_precip_lm_all_sites.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_precip_lm_all_sites.json @@ -9,6 +9,10 @@ "geometry": { "type": "MultiPoint", "coordinates": [ + [-88.1589, 31.8534], + [-149.6106, 68.6307], + [-84.2793, 35.9574], + [-105.9154, 39.8914], [-102.4471, 39.7582], [-82.0084, 29.676], [-119.2575, 37.0597], @@ -38,17 +42,14 @@ [-111.7979, 40.7839], [-82.0177, 29.6878], [-111.5081, 33.751], - [-119.0274, 36.9559], - [-88.1589, 31.8534], - [-149.6106, 68.6307], - [-84.2793, 35.9574], - [-105.9154, 39.8914] + [-119.0274, 36.9559] ] }, "properties": { "title": "tg_precip_lm_all_sites", - "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_precip_lm_all_sites model. Information for the model is provided as follows: The tg_precip_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation. y. This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together..\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_precip_lm_all_sites model. Information for the model is provided as follows: The tg_precip_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation. y. This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together..\n The model predicts this variable at the following sites: TOMB, TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-08-16", + "updated": "2024-08-23", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-09-19T00:00:00Z", "providers": [ @@ -79,6 +80,10 @@ "oxygen", "Daily", "P1D", + "TOMB", + "TOOK", + "WALK", + "WLOU", "ARIK", "BARC", "BIGC", @@ -108,11 +113,7 @@ "REDB", "SUGG", "SYCA", - "TECR", - "TOMB", - "TOOK", - "WALK", - "WLOU" + "TECR" ], "table:columns": [ { @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_precip_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_randfor.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_randfor.json index 07fcb4ffdb..480f7a8a0e 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_randfor.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_randfor.json @@ -9,6 +9,11 @@ "geometry": { "type": "MultiPoint", "coordinates": [ + [-119.0274, 36.9559], + [-88.1589, 31.8534], + [-149.6106, 68.6307], + [-84.2793, 35.9574], + [-105.9154, 39.8914], [-102.4471, 39.7582], [-82.0084, 29.676], [-119.2575, 37.0597], @@ -37,18 +42,14 @@ [-99.2531, 47.1298], [-111.7979, 40.7839], [-82.0177, 29.6878], - [-111.5081, 33.751], - [-119.0274, 36.9559], - [-88.1589, 31.8534], - [-149.6106, 68.6307], - [-84.2793, 35.9574], - [-105.9154, 39.8914] + [-111.5081, 33.751] ] }, "properties": { "title": "tg_randfor", - "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_randfor model. Information for the model is provided as follows: Random Forest is a machine learning model that is fitted with the ranger() function in the ranger\nR package (Wright & Ziegler 2017) within the tidymodels framework (Kuhn & Wickham 2020). The\nmodel drivers are unlagged air temperature, air pressure, relative humidity, surface downwelling\nlongwave and shortwave radiation, precipitation, and northward and eastward wind.\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_randfor model. Information for the model is provided as follows: Random Forest is a machine learning model that is fitted with the ranger() function in the ranger\nR package (Wright & Ziegler 2017) within the tidymodels framework (Kuhn & Wickham 2020). The\nmodel drivers are unlagged air temperature, air pressure, relative humidity, surface downwelling\nlongwave and shortwave radiation, precipitation, and northward and eastward wind.\n The model predicts this variable at the following sites: TECR, TOMB, TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-08-16", + "updated": "2024-08-23", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-09-18T00:00:00Z", "providers": [ @@ -79,6 +80,11 @@ "oxygen", "Daily", "P1D", + "TECR", + "TOMB", + "TOOK", + "WALK", + "WLOU", "ARIK", "BARC", "BIGC", @@ -107,12 +113,7 @@ "PRPO", "REDB", "SUGG", - "SYCA", - "TECR", - "TOMB", - "TOOK", - "WALK", - "WLOU" + "SYCA" ], "table:columns": [ { @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_randfor\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_tbats.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_tbats.json index 27a4ba237a..4e2c98f39d 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_tbats.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_tbats.json @@ -9,6 +9,16 @@ "geometry": { "type": "MultiPoint", "coordinates": [ + [-102.4471, 39.7582], + [-82.0084, 29.676], + [-119.2575, 37.0597], + [-110.5871, 44.9501], + [-96.6242, 34.4442], + [-87.7982, 32.5415], + [-147.504, 65.1532], + [-105.5442, 40.035], + [-89.4737, 46.2097], + [-66.9868, 18.1135], [-84.4374, 31.1854], [-66.7987, 18.1741], [-72.3295, 42.4719], @@ -32,23 +42,14 @@ [-88.1589, 31.8534], [-149.6106, 68.6307], [-84.2793, 35.9574], - [-105.9154, 39.8914], - [-102.4471, 39.7582], - [-82.0084, 29.676], - [-119.2575, 37.0597], - [-110.5871, 44.9501], - [-96.6242, 34.4442], - [-87.7982, 32.5415], - [-147.504, 65.1532], - [-105.5442, 40.035], - [-89.4737, 46.2097], - [-66.9868, 18.1135] + [-105.9154, 39.8914] ] }, "properties": { "title": "tg_tbats", - "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_tbats model. Information for the model is provided as follows: The tg_tbats model is a TBATS (Trigonometric seasonality, Box-Cox transformation, ARMA\nerrors, Trend and Seasonal components) model fit using the function tbats() from the forecast package in\nR (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series model with no\ncovariates..\n The model predicts this variable at the following sites: FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_tbats model. Information for the model is provided as follows: The tg_tbats model is a TBATS (Trigonometric seasonality, Box-Cox transformation, ARMA\nerrors, Trend and Seasonal components) model fit using the function tbats() from the forecast package in\nR (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series model with no\ncovariates..\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-07-19", + "updated": "2024-08-23", "start_datetime": "2023-01-01T00:00:00Z", "end_datetime": "2024-08-18T00:00:00Z", "providers": [ @@ -79,6 +80,16 @@ "oxygen", "Daily", "P1D", + "ARIK", + "BARC", + "BIGC", + "BLDE", + "BLUE", + "BLWA", + "CARI", + "COMO", + "CRAM", + "CUPE", "FLNT", "GUIL", "HOPB", @@ -102,17 +113,7 @@ "TOMB", "TOOK", "WALK", - "WLOU", - "ARIK", - "BARC", - "BIGC", - "BLDE", - "BLUE", - "BLWA", - "CARI", - "COMO", - "CRAM", - "CUPE" + "WLOU" ], "table:columns": [ { @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_tbats\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_temp_lm.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_temp_lm.json index 9d5cc89fa1..b84db3934b 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_temp_lm.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_temp_lm.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_temp_lm", "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_temp_lm model. Information for the model is provided as follows: The tg_temp_lm model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation..\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_temp_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_temp_lm_all_sites.json b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_temp_lm_all_sites.json index 3159165fcd..4cb6e130fd 100644 --- a/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_temp_lm_all_sites.json +++ b/catalog/summaries/Aquatics/Daily_Dissolved_oxygen/models/tg_temp_lm_all_sites.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_temp_lm_all_sites", "description": "All summaries for the Daily_Dissolved_oxygen variable for the tg_temp_lm_all_sites model. Information for the model is provided as follows: The tg_temp_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation.This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: TOMB, TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Dissolved_oxygen", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=oxygen/model_id=tg_temp_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/collection.json b/catalog/summaries/Aquatics/Daily_Water_temperature/collection.json index 70a1e4dfa6..2f38a3fff4 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/collection.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/collection.json @@ -61,82 +61,82 @@ { "rel": "item", "type": "application/json", - "href": "./models/tg_humidity_lm.json" + "href": "./models/tg_precip_lm.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_humidity_lm_all_sites.json" + "href": "./models/tg_precip_lm_all_sites.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_lasso.json" + "href": "./models/tg_randfor.json" }, { "rel": "item", "type": "application/json", - "href": "./models/cb_prophet.json" + "href": "./models/tg_tbats.json" }, { "rel": "item", "type": "application/json", - "href": "./models/climatology.json" + "href": "./models/tg_humidity_lm.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_precip_lm.json" + "href": "./models/tg_humidity_lm_all_sites.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_precip_lm_all_sites.json" + "href": "./models/tg_lasso.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_randfor.json" + "href": "./models/tg_temp_lm.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_tbats.json" + "href": "./models/tg_temp_lm_all_sites.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_temp_lm.json" + "href": "./models/fARIMA_clim_ensemble.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_temp_lm_all_sites.json" + "href": "./models/climatology.json" }, { "rel": "item", "type": "application/json", - "href": "./models/GLEON_JRabaey_temp_physics.json" + "href": "./models/cb_prophet.json" }, { "rel": "item", "type": "application/json", - "href": "./models/GLEON_lm_lag_1day.json" + "href": "./models/GLEON_JRabaey_temp_physics.json" }, { "rel": "item", "type": "application/json", - "href": "./models/air2waterSat_2.json" + "href": "./models/GLEON_lm_lag_1day.json" }, { "rel": "item", "type": "application/json", - "href": "./models/baseline_ensemble.json" + "href": "./models/air2waterSat_2.json" }, { "rel": "item", "type": "application/json", - "href": "./models/fARIMA_clim_ensemble.json" + "href": "./models/baseline_ensemble.json" }, { "rel": "item", @@ -151,42 +151,42 @@ { "rel": "item", "type": "application/json", - "href": "./models/GAM_air_wind.json" + "href": "./models/mlp1_wtempforecast_LF.json" }, { "rel": "item", "type": "application/json", - "href": "./models/TSLM_seasonal_JM.json" + "href": "./models/zimmerman_proj1.json" }, { "rel": "item", "type": "application/json", - "href": "./models/bee_bake_RFModel_2024.json" + "href": "./models/GAM_air_wind.json" }, { "rel": "item", "type": "application/json", - "href": "./models/hotdeck.json" + "href": "./models/TSLM_seasonal_JM.json" }, { "rel": "item", "type": "application/json", - "href": "./models/lm_AT_WTL_WS.json" + "href": "./models/bee_bake_RFModel_2024.json" }, { "rel": "item", "type": "application/json", - "href": "./models/mkricheldorf_w_lag.json" + "href": "./models/hotdeck.json" }, { "rel": "item", "type": "application/json", - "href": "./models/mlp1_wtempforecast_LF.json" + "href": "./models/lm_AT_WTL_WS.json" }, { "rel": "item", "type": "application/json", - "href": "./models/zimmerman_proj1.json" + "href": "./models/mkricheldorf_w_lag.json" }, { "rel": "item", diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/GAM_air_wind.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/GAM_air_wind.json index f8af577ed8..aa3be2e578 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/GAM_air_wind.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/GAM_air_wind.json @@ -21,7 +21,8 @@ "properties": { "title": "GAM_air_wind", "description": "All summaries for the Daily_Water_temperature variable for the GAM_air_wind model. Information for the model is provided as follows: I used a GAM (mgcv) with a linear relationship to air temperature and smoothing for eastward and northward winds..\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2024-03-01T00:00:00Z", "end_datetime": "2024-09-25T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=GAM_air_wind?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=GAM_air_wind?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=GAM_air_wind?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=GAM_air_wind\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/GLEON_JRabaey_temp_physics.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/GLEON_JRabaey_temp_physics.json index 1ef6aced5e..d888717d42 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/GLEON_JRabaey_temp_physics.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/GLEON_JRabaey_temp_physics.json @@ -48,7 +48,8 @@ "properties": { "title": "GLEON_JRabaey_temp_physics", "description": "All summaries for the Daily_Water_temperature variable for the GLEON_JRabaey_temp_physics model. Information for the model is provided as follows: The JR-physics model is a simple process model based on the assumption that surface water\ntemperature should trend towards equilibration with air temperature with a lag factor..\n The model predicts this variable at the following sites: WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-09", + "updated": "2024-02-09", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-12T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=GLEON_JRabaey_temp_physics?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=GLEON_JRabaey_temp_physics?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=GLEON_JRabaey_temp_physics?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=GLEON_JRabaey_temp_physics\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/GLEON_lm_lag_1day.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/GLEON_lm_lag_1day.json index f988838571..b5d074e462 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/GLEON_lm_lag_1day.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/GLEON_lm_lag_1day.json @@ -21,7 +21,8 @@ "properties": { "title": "GLEON_lm_lag_1day", "description": "All summaries for the Daily_Water_temperature variable for the GLEON_lm_lag_1day model. Information for the model is provided as follows: NA.\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2023-12-29", + "updated": "2024-01-01", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-02-02T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=GLEON_lm_lag_1day?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=GLEON_lm_lag_1day?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=GLEON_lm_lag_1day?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=GLEON_lm_lag_1day\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/GLEON_physics.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/GLEON_physics.json index 686eaf95c3..1acac0bc7f 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/GLEON_physics.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/GLEON_physics.json @@ -20,7 +20,8 @@ "properties": { "title": "GLEON_physics", "description": "All summaries for the Daily_Water_temperature variable for the GLEON_physics model. Information for the model is provided as follows: A simple, process-based model was developed to replicate the water temperature dynamics of a\nsurface water layer sensu Chapra (2008). The model focus was only on quantifying the impacts of\natmosphere-water heat flux exchanges on the idealized near-surface water temperature dynamics.\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2023-11-19", + "updated": "2023-11-20", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2023-12-22T00:00:00Z", "providers": [ @@ -203,7 +204,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=GLEON_physics?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=GLEON_physics?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=GLEON_physics?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=GLEON_physics\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/TSLM_seasonal_JM.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/TSLM_seasonal_JM.json index 88d2e958d5..b6bb749a55 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/TSLM_seasonal_JM.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/TSLM_seasonal_JM.json @@ -21,7 +21,8 @@ "properties": { "title": "TSLM_seasonal_JM", "description": "All summaries for the Daily_Water_temperature variable for the TSLM_seasonal_JM model. Information for the model is provided as follows: My model uses the fable package TSLM, and uses built in exogenous regressors to represent the trend and seasonality of the data as well as air temperature to predict water temperature..\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-04-30", + "updated": "2024-04-30", "start_datetime": "2024-02-29T00:00:00Z", "end_datetime": "2024-06-02T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=TSLM_seasonal_JM?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=TSLM_seasonal_JM?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=TSLM_seasonal_JM?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=TSLM_seasonal_JM\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/acp_fableLM.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/acp_fableLM.json index 10a00a9d26..c559d7009e 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/acp_fableLM.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/acp_fableLM.json @@ -21,7 +21,8 @@ "properties": { "title": "acp_fableLM", "description": "All summaries for the Daily_Water_temperature variable for the acp_fableLM model. Information for the model is provided as follows: Time series linear model with FABLE.\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-03-11", + "updated": "2024-03-12", "start_datetime": "2024-03-11T00:00:00Z", "end_datetime": "2024-04-13T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=acp_fableLM?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=acp_fableLM?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=acp_fableLM?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=acp_fableLM\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/air2waterSat_2.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/air2waterSat_2.json index a968096082..2743630a8d 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/air2waterSat_2.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/air2waterSat_2.json @@ -48,7 +48,8 @@ "properties": { "title": "air2waterSat_2", "description": "All summaries for the Daily_Water_temperature variable for the air2waterSat_2 model. Information for the model is provided as follows: The air2water model is a linear model fit using the function lm() in R and uses air temperature as\na covariate.\n The model predicts this variable at the following sites: TOOK, WALK, WLOU, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-01", + "updated": "2024-02-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=air2waterSat_2?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=air2waterSat_2?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=air2waterSat_2?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=air2waterSat_2\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/baseline_ensemble.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/baseline_ensemble.json index 6d91fc1431..d87e98ca65 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/baseline_ensemble.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/baseline_ensemble.json @@ -48,7 +48,8 @@ "properties": { "title": "baseline_ensemble", "description": "All summaries for the Daily_Water_temperature variable for the baseline_ensemble model. Information for the model is provided as follows: The Baseline MME is a multi-model ensemble (MME) comprised of the two baseline models\n(day-of-year, persistence) submitted by Challenge organisers.\n The model predicts this variable at the following sites: BLWA, COMO, CUPE, FLNT, GUIL, HOPB, SUGG, SYCA, TECR, TOMB, WALK, WLOU, KING, LECO, LEWI, MART, MAYF, MCDI, MCRA, POSE, PRIN, REDB, ARIK, BARC, BIGC, BLDE, BLUE, CRAM, LIRO, PRLA, PRPO, CARI, OKSR, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-07-03", + "updated": null, "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=baseline_ensemble?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=baseline_ensemble?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=baseline_ensemble?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=baseline_ensemble\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/bee_bake_RFModel_2024.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/bee_bake_RFModel_2024.json index 51b85800c4..6e7b1caf50 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/bee_bake_RFModel_2024.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/bee_bake_RFModel_2024.json @@ -9,19 +9,20 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-89.7048, 45.9983], + [-82.0084, 29.676], [-99.2531, 47.1298], [-89.4737, 46.2097], [-99.1139, 47.1591], - [-82.0084, 29.676], + [-89.7048, 45.9983], [-82.0177, 29.6878], [-149.6106, 68.6307] ] }, "properties": { "title": "bee_bake_RFModel_2024", - "description": "All summaries for the Daily_Water_temperature variable for the bee_bake_RFModel_2024 model. Information for the model is provided as follows: Random Forest.\n The model predicts this variable at the following sites: LIRO, PRPO, CRAM, PRLA, BARC, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "description": "All summaries for the Daily_Water_temperature variable for the bee_bake_RFModel_2024 model. Information for the model is provided as follows: Random Forest.\n The model predicts this variable at the following sites: BARC, PRPO, CRAM, PRLA, LIRO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2024-02-29T00:00:00Z", "end_datetime": "2024-09-24T00:00:00Z", "providers": [ @@ -52,11 +53,11 @@ "temperature", "Daily", "P1D", - "LIRO", + "BARC", "PRPO", "CRAM", "PRLA", - "BARC", + "LIRO", "SUGG", "TOOK" ], @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=bee_bake_RFModel_2024?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=bee_bake_RFModel_2024?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=bee_bake_RFModel_2024?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=bee_bake_RFModel_2024\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/cb_prophet.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/cb_prophet.json index a711dfbf4c..38e26b07e3 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/cb_prophet.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/cb_prophet.json @@ -46,7 +46,8 @@ "properties": { "title": "cb_prophet", "description": "All summaries for the Daily_Water_temperature variable for the cb_prophet model. Information for the model is provided as follows: The Prophet model is an empirical model, specifically a non-linear regression model that includes\nseasonality effects (Taylor & Letham, 2018). The model relies on Bayesian estimation with an additive\nwhite noise error term.\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, POSE, PRIN, PRLA, PRPO, REDB, SUGG, TECR, TOMB, WALK, WLOU, SYCA.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-06", + "updated": "2024-02-07", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-10T00:00:00Z", "providers": [ @@ -255,7 +256,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=cb_prophet\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/climatology.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/climatology.json index e1ea8b03be..9df09903bd 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/climatology.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/climatology.json @@ -9,13 +9,13 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-102.4471, 39.7582], - [-82.0084, 29.676], [-119.2575, 37.0597], [-110.5871, 44.9501], [-96.6242, 34.4442], [-87.7982, 32.5415], + [-147.504, 65.1532], [-105.5442, 40.035], + [-89.4737, 46.2097], [-66.9868, 18.1135], [-84.4374, 31.1854], [-66.7987, 18.1741], @@ -23,32 +23,33 @@ [-96.6038, 39.1051], [-83.5038, 35.6904], [-77.9832, 39.0956], + [-89.7048, 45.9983], [-121.9338, 45.7908], [-87.4077, 32.9604], [-96.443, 38.9459], [-122.1655, 44.2596], [-78.1473, 38.8943], [-97.7823, 33.3785], + [-99.1139, 47.1591], + [-99.2531, 47.1298], [-111.7979, 40.7839], [-82.0177, 29.6878], [-111.5081, 33.751], [-119.0274, 36.9559], + [-88.1589, 31.8534], [-84.2793, 35.9574], [-105.9154, 39.8914], - [-88.1589, 31.8534], - [-89.7048, 45.9983], - [-99.2531, 47.1298], - [-89.4737, 46.2097], - [-99.1139, 47.1591], - [-147.504, 65.1532], + [-102.4471, 39.7582], + [-82.0084, 29.676], [-149.143, 68.6698], [-149.6106, 68.6307] ] }, "properties": { "title": "climatology", - "description": "All summaries for the Daily_Water_temperature variable for the climatology model. Information for the model is provided as follows: Historical DOY mean and sd. Assumes normal distribution.\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, COMO, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, MART, MAYF, MCDI, MCRA, POSE, PRIN, REDB, SUGG, SYCA, TECR, WALK, WLOU, TOMB, LIRO, PRPO, CRAM, PRLA, CARI, OKSR, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "description": "All summaries for the Daily_Water_temperature variable for the climatology model. Information for the model is provided as follows: Historical DOY mean and sd. Assumes normal distribution.\n The model predicts this variable at the following sites: BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, WALK, WLOU, ARIK, BARC, OKSR, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2023-01-02T00:00:00Z", "end_datetime": "2024-09-26T00:00:00Z", "providers": [ @@ -79,13 +80,13 @@ "temperature", "Daily", "P1D", - "ARIK", - "BARC", "BIGC", "BLDE", "BLUE", "BLWA", + "CARI", "COMO", + "CRAM", "CUPE", "FLNT", "GUIL", @@ -93,24 +94,24 @@ "KING", "LECO", "LEWI", + "LIRO", "MART", "MAYF", "MCDI", "MCRA", "POSE", "PRIN", + "PRLA", + "PRPO", "REDB", "SUGG", "SYCA", "TECR", + "TOMB", "WALK", "WLOU", - "TOMB", - "LIRO", - "PRPO", - "CRAM", - "PRLA", - "CARI", + "ARIK", + "BARC", "OKSR", "TOOK" ], @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=climatology?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=climatology\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/fARIMA_clim_ensemble.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/fARIMA_clim_ensemble.json index 6a23a81323..9a42c3157c 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/fARIMA_clim_ensemble.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/fARIMA_clim_ensemble.json @@ -34,8 +34,8 @@ [-88.1589, 31.8534], [-119.2575, 37.0597], [-110.5871, 44.9501], - [-84.4374, 31.1854], [-89.4737, 46.2097], + [-84.4374, 31.1854], [-111.5081, 33.751], [-89.7048, 45.9983], [-99.1139, 47.1591], @@ -47,8 +47,9 @@ }, "properties": { "title": "fARIMA_clim_ensemble", - "description": "All summaries for the Daily_Water_temperature variable for the fARIMA_clim_ensemble model. Information for the model is provided as follows: The fAMIRA-DOY MME is a multi-model ensemble (MME) composed of two empirical\nmodels: an ARIMA model (fARIMA) and day-of-year model.\n The model predicts this variable at the following sites: LECO, LEWI, MART, MAYF, MCDI, MCRA, COMO, CUPE, GUIL, HOPB, KING, ARIK, BARC, BLUE, BLWA, WALK, WLOU, POSE, PRIN, REDB, SUGG, TECR, TOMB, BIGC, BLDE, FLNT, CRAM, SYCA, LIRO, PRLA, PRPO, CARI, OKSR, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "description": "All summaries for the Daily_Water_temperature variable for the fARIMA_clim_ensemble model. Information for the model is provided as follows: The fAMIRA-DOY MME is a multi-model ensemble (MME) composed of two empirical\nmodels: an ARIMA model (fARIMA) and day-of-year model.\n The model predicts this variable at the following sites: LECO, LEWI, MART, MAYF, MCDI, MCRA, COMO, CUPE, GUIL, HOPB, KING, ARIK, BARC, BLUE, BLWA, WALK, WLOU, POSE, PRIN, REDB, SUGG, TECR, TOMB, BIGC, BLDE, CRAM, FLNT, SYCA, LIRO, PRLA, PRPO, CARI, OKSR, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-07-03", + "updated": null, "start_datetime": "2023-11-10T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -104,8 +105,8 @@ "TOMB", "BIGC", "BLDE", - "FLNT", "CRAM", + "FLNT", "SYCA", "LIRO", "PRLA", @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=fARIMA_clim_ensemble?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=fARIMA_clim_ensemble?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=fARIMA_clim_ensemble?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=fARIMA_clim_ensemble\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/fTSLM_lag.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/fTSLM_lag.json index c3f6fd7e92..bf896140b2 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/fTSLM_lag.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/fTSLM_lag.json @@ -48,7 +48,8 @@ "properties": { "title": "fTSLM_lag", "description": "All summaries for the Daily_Water_temperature variable for the fTSLM_lag model. Information for the model is provided as follows: This is a simple time series linear model in which water temperature is a function of air\ntemperature of that day and the previous day’s air temperature.\n The model predicts this variable at the following sites: TOMB, TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-11", + "updated": "2024-08-23", "start_datetime": "2023-01-08T00:00:00Z", "end_datetime": "2024-09-14T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=fTSLM_lag?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=fTSLM_lag?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=fTSLM_lag?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=fTSLM_lag\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareGLM.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareGLM.json index a7b1bd0228..c97f37c4b0 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareGLM.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareGLM.json @@ -21,7 +21,8 @@ "properties": { "title": "flareGLM", "description": "All summaries for the Daily_Water_temperature variable for the flareGLM model. Information for the model is provided as follows: The FLARE-GLM is a forecasting framework that integrates the General Lake Model\nhydrodynamic process model (GLM; Hipsey et al., 2019) and data assimilation algorithm to generate\nensemble forecasts of lake water temperature..\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2023-01-02T00:00:00Z", "end_datetime": "2024-09-25T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareGLM?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareGLM?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareGLM?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareGLM\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareGLM_noDA.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareGLM_noDA.json index f4dd99007b..2d97164d4f 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareGLM_noDA.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareGLM_noDA.json @@ -21,7 +21,8 @@ "properties": { "title": "flareGLM_noDA", "description": "All summaries for the Daily_Water_temperature variable for the flareGLM_noDA model. Information for the model is provided as follows: The FLARE-GLM is a forecasting framework that integrates the General Lake Model\nhydrodynamic process model (GLM; Hipsey et al., 2019). This version does not incorportate data assimilation.\n The model predicts this variable at the following sites: TOOK, BARC, CRAM, LIRO, PRLA, PRPO, SUGG.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2023-03-02T00:00:00Z", "end_datetime": "2024-09-25T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareGLM_noDA?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareGLM_noDA?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareGLM_noDA?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareGLM_noDA\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareGOTM_noDA.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareGOTM_noDA.json index 625032bc6c..8b3fe05d66 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareGOTM_noDA.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareGOTM_noDA.json @@ -21,7 +21,8 @@ "properties": { "title": "flareGOTM_noDA", "description": "All summaries for the Daily_Water_temperature variable for the flareGOTM_noDA model. Information for the model is provided as follows: FLARE-GOTM uses the General Ocean Turbulence Model (GOTM) hydrodynamic model. GOTM is a 1-D\nhydrodynamic turbulence model (Umlauf et al., 2005) that estimates water column temperatures.\n The model predicts this variable at the following sites: BARC, CRAM, SUGG, LIRO, PRLA, PRPO, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-19", + "updated": "2024-02-20", "start_datetime": "2023-03-08T00:00:00Z", "end_datetime": "2024-03-20T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareGOTM_noDA?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareGOTM_noDA?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareGOTM_noDA?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareGOTM_noDA\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareSimstrat_noDA.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareSimstrat_noDA.json index 7c4e899d43..b4673e623b 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareSimstrat_noDA.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/flareSimstrat_noDA.json @@ -20,7 +20,8 @@ "properties": { "title": "flareSimstrat_noDA", "description": "All summaries for the Daily_Water_temperature variable for the flareSimstrat_noDA model. Information for the model is provided as follows: FLARE-Simstrat uses the same principles and overarching framework as FLARE-GLM with the\nhydrodynamic model replaced with Simstrat. Simstrat is a 1-D hydrodynamic turbulence model\n(Goudsmit et al., 2002) that estimates water column temperatures..\n The model predicts this variable at the following sites: BARC, SUGG, TOOK, CRAM, PRLA, PRPO.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-18", + "updated": "2024-02-19", "start_datetime": "2023-03-08T00:00:00Z", "end_datetime": "2024-03-19T00:00:00Z", "providers": [ @@ -203,7 +204,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareSimstrat_noDA?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareSimstrat_noDA?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareSimstrat_noDA?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=flareSimstrat_noDA\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/flare_ler.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/flare_ler.json index 010fec8b01..856cf017b3 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/flare_ler.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/flare_ler.json @@ -20,7 +20,8 @@ "properties": { "title": "flare_ler", "description": "All summaries for the Daily_Water_temperature variable for the flare_ler model. Information for the model is provided as follows: The LER MME is a multi-model ensemble (MME) derived from the three process models from\nFLARE (FLARE-GLM, FLARE-GOTM, and FLARE-Simstrat). To generate the MME, an ensemble\nforecast was generated by sampling from the submitted models’ ensemble members.\n The model predicts this variable at the following sites: SUGG, CRAM, LIRO, PRLA, PRPO, BARC.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-18", + "updated": "2024-08-22", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-19T00:00:00Z", "providers": [ @@ -203,7 +204,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flare_ler?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flare_ler?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flare_ler?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=flare_ler\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/flare_ler_baselines.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/flare_ler_baselines.json index 355a90636a..81ac0e4cc4 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/flare_ler_baselines.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/flare_ler_baselines.json @@ -16,7 +16,8 @@ "properties": { "title": "flare_ler_baselines", "description": "All summaries for the Daily_Water_temperature variable for the flare_ler_baselines model. Information for the model is provided as follows: The LER-baselines model is a multi-model ensemble (MME) comprised of the three process\nmodels from FLARE (FLARE-GLM, FLARE-GOTM, and FLARE-Simstrat) and the two baseline\nmodels (day-of-year, persistence), submitted by Challenge organisers. To generate the MME, an\nensemble forecast was generated by sampling from the submitted model’s ensemble members (either\nfrom an ensemble forecast in the case of the FLARE models and persistence, or from the distribution for\nthe day-of-year forecasts).\n The model predicts this variable at the following sites: SUGG, BARC.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-18", + "updated": null, "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-19T00:00:00Z", "providers": [ @@ -195,7 +196,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flare_ler_baselines?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flare_ler_baselines?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=flare_ler_baselines?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=flare_ler_baselines\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/hotdeck.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/hotdeck.json index 51aef5a1b8..ba7ae47cc7 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/hotdeck.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/hotdeck.json @@ -26,10 +26,10 @@ [-72.3295, 42.4719], [-111.7979, 40.7839], [-119.0274, 36.9559], + [-89.4737, 46.2097], [-110.5871, 44.9501], [-105.5442, 40.035], [-105.9154, 39.8914], - [-89.4737, 46.2097], [-147.504, 65.1532], [-119.2575, 37.0597], [-96.6242, 34.4442], @@ -43,8 +43,9 @@ }, "properties": { "title": "hotdeck", - "description": "All summaries for the Daily_Water_temperature variable for the hotdeck model. Information for the model is provided as follows: Uses a hot deck approach: - Take the latest observation/forecast. - Past observations from around the same window of the season are collected. - Values close to the latest observation/forecast are collected. - One of these is randomly sampled. - Its \"tomorrow\" observation is used as the forecast. - Repeat until forecast at step h..\n The model predicts this variable at the following sites: BARC, SUGG, TOMB, BLWA, FLNT, MCRA, KING, SYCA, POSE, PRIN, MAYF, LEWI, LECO, ARIK, HOPB, REDB, TECR, BLDE, COMO, WLOU, CRAM, CARI, BIGC, BLUE, CUPE, GUIL, WALK, LIRO, PRLA, PRPO.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "description": "All summaries for the Daily_Water_temperature variable for the hotdeck model. Information for the model is provided as follows: Uses a hot deck approach: - Take the latest observation/forecast. - Past observations from around the same window of the season are collected. - Values close to the latest observation/forecast are collected. - One of these is randomly sampled. - Its \"tomorrow\" observation is used as the forecast. - Repeat until forecast at step h..\n The model predicts this variable at the following sites: BARC, SUGG, TOMB, BLWA, FLNT, MCRA, KING, SYCA, POSE, PRIN, MAYF, LEWI, LECO, ARIK, HOPB, REDB, TECR, CRAM, BLDE, COMO, WLOU, CARI, BIGC, BLUE, CUPE, GUIL, WALK, LIRO, PRLA, PRPO.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2024-02-28T00:00:00Z", "end_datetime": "2024-09-21T00:00:00Z", "providers": [ @@ -92,10 +93,10 @@ "HOPB", "REDB", "TECR", + "CRAM", "BLDE", "COMO", "WLOU", - "CRAM", "CARI", "BIGC", "BLUE", @@ -251,7 +252,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=hotdeck?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=hotdeck?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=hotdeck?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=hotdeck\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/lm_AT_WTL_WS.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/lm_AT_WTL_WS.json index 93fa62cf34..6730e3cfa6 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/lm_AT_WTL_WS.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/lm_AT_WTL_WS.json @@ -21,7 +21,8 @@ "properties": { "title": "lm_AT_WTL_WS", "description": "All summaries for the Daily_Water_temperature variable for the lm_AT_WTL_WS model. Information for the model is provided as follows: This forecast of water temperature at NEON Lake sites uses a linear model, incorporating air temperature, wind speed, and the previous day's forecasted water temperature as variables..\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2024-03-01T00:00:00Z", "end_datetime": "2024-09-21T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=lm_AT_WTL_WS?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=lm_AT_WTL_WS?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=lm_AT_WTL_WS?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=lm_AT_WTL_WS\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/mkricheldorf_w_lag.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/mkricheldorf_w_lag.json index ab59ee2536..0090592b98 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/mkricheldorf_w_lag.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/mkricheldorf_w_lag.json @@ -21,7 +21,8 @@ "properties": { "title": "mkricheldorf_w_lag", "description": "All summaries for the Daily_Water_temperature variable for the mkricheldorf_w_lag model. Information for the model is provided as follows: I used an autoregressive linear model using the lm() function.\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2024-03-06T00:00:00Z", "end_datetime": "2024-09-25T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=mkricheldorf_w_lag?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=mkricheldorf_w_lag?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=mkricheldorf_w_lag?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=mkricheldorf_w_lag\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/mlp1_wtempforecast_LF.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/mlp1_wtempforecast_LF.json index b41356aa93..20333cd5ac 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/mlp1_wtempforecast_LF.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/mlp1_wtempforecast_LF.json @@ -9,19 +9,20 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-82.0084, 29.676], - [-89.4737, 46.2097], [-89.7048, 45.9983], [-99.1139, 47.1591], [-99.2531, 47.1298], [-82.0177, 29.6878], - [-149.6106, 68.6307] + [-149.6106, 68.6307], + [-82.0084, 29.676], + [-89.4737, 46.2097] ] }, "properties": { "title": "mlp1_wtempforecast_LF", - "description": "All summaries for the Daily_Water_temperature variable for the mlp1_wtempforecast_LF model. Information for the model is provided as follows: Modelling for water temperature using a single layer neural network (mlp() in tidymodels). Used relative humidity, precipitation flux and air temperature as drivers. Hypertuned parameters for models to be run with 100 epochs and penalty value of 0.01..\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "description": "All summaries for the Daily_Water_temperature variable for the mlp1_wtempforecast_LF model. Information for the model is provided as follows: Modelling for water temperature using a single layer neural network (mlp() in tidymodels). Used relative humidity, precipitation flux and air temperature as drivers. Hypertuned parameters for models to be run with 100 epochs and penalty value of 0.01..\n The model predicts this variable at the following sites: LIRO, PRLA, PRPO, SUGG, TOOK, BARC, CRAM.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2024-03-01T00:00:00Z", "end_datetime": "2024-09-24T00:00:00Z", "providers": [ @@ -52,13 +53,13 @@ "temperature", "Daily", "P1D", - "BARC", - "CRAM", "LIRO", "PRLA", "PRPO", "SUGG", - "TOOK" + "TOOK", + "BARC", + "CRAM" ], "table:columns": [ { @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=mlp1_wtempforecast_LF?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=mlp1_wtempforecast_LF?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=mlp1_wtempforecast_LF?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=mlp1_wtempforecast_LF\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/persistenceRW.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/persistenceRW.json index 6a7ad0d6fd..08c5bab9dd 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/persistenceRW.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/persistenceRW.json @@ -48,7 +48,8 @@ "properties": { "title": "persistenceRW", "description": "All summaries for the Daily_Water_temperature variable for the persistenceRW model. Information for the model is provided as follows: Random walk from the fable package with ensembles used to represent uncertainty.\n The model predicts this variable at the following sites: KING, LECO, LEWI, LIRO, MART, MAYF, ARIK, BARC, BIGC, BLDE, BLUE, MCDI, MCRA, OKSR, POSE, PRIN, WLOU, CUPE, FLNT, GUIL, HOPB, PRLA, PRPO, REDB, SUGG, SYCA, BLWA, CARI, COMO, CRAM, TECR, TOMB, TOOK, WALK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2023-11-15T00:00:00Z", "end_datetime": "2024-09-25T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=persistenceRW\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/precip_mod.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/precip_mod.json index 281c970c6e..414f33f43b 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/precip_mod.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/precip_mod.json @@ -21,7 +21,8 @@ "properties": { "title": "precip_mod", "description": "All summaries for the Daily_Water_temperature variable for the precip_mod model. Information for the model is provided as follows: NA.\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2023-12-20", + "updated": "2023-12-22", "start_datetime": "2023-12-21T00:00:00Z", "end_datetime": "2024-01-24T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=precip_mod?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=precip_mod?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=precip_mod?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=precip_mod\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_arima.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_arima.json index 29e68c0a7a..75819abac5 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_arima.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_arima.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_arima", "description": "All summaries for the Daily_Water_temperature variable for the tg_arima model. Information for the model is provided as follows: The tg_arima model is an AutoRegressive Integrated Moving Average (ARIMA) model fit using\nthe function auto.arima() from the forecast package in R (Hyndman et al. 2023; Hyndman et al., 2008).\nThis is an empirical time series model with no covariates.\n The model predicts this variable at the following sites: MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-07-19", + "updated": "2024-08-23", "start_datetime": "2023-01-01T00:00:00Z", "end_datetime": "2024-08-18T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_arima\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_ets.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_ets.json index aa4544c425..63d06ffa8d 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_ets.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_ets.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_ets", "description": "All summaries for the Daily_Water_temperature variable for the tg_ets model. Information for the model is provided as follows: The tg_ets model is an Error, Trend, Seasonal (ETS) model fit using the function ets() from the\nforecast package in R (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series\nmodel with no covariates..\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-07-19", + "updated": "2024-08-23", "start_datetime": "2023-01-01T00:00:00Z", "end_datetime": "2024-08-18T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_ets\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_humidity_lm.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_humidity_lm.json index b330b8a340..a48728c953 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_humidity_lm.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_humidity_lm.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_humidity_lm", "description": "All summaries for the Daily_Water_temperature variable for the tg_humidity_lm model. Information for the model is provided as follows: The tg_humidity_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity.\n The model predicts this variable at the following sites: TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_humidity_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_humidity_lm_all_sites.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_humidity_lm_all_sites.json index cd17da1749..2f4e74182d 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_humidity_lm_all_sites.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_humidity_lm_all_sites.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_humidity_lm_all_sites", "description": "All summaries for the Daily_Water_temperature variable for the tg_humidity_lm_all_sites model. Information for the model is provided as follows: The tg_humidity_lm_all_sites model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity. This model was used to forecast water temperature and dissolved oxygen concentration at the\nseven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_humidity_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_lasso.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_lasso.json index 8a2b00f34b..a83138ce3f 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_lasso.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_lasso.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_lasso", "description": "All summaries for the Daily_Water_temperature variable for the tg_lasso model. Information for the model is provided as follows: Lasso is a machine learning model implemented in the same workflow as tg_randfor, but with\ndifferent hyperparameter tuning. The model drivers are unlagged air temperature, air pressure, relative\nhumidity, surface downwelling longwave and shortwave radiation, precipitation, and northward and\neastward wind. Lasso regressions were fitted with the function glmnet() in\nthe package glmnet (Tay et al. 2023), where the regularization hyperparameter (lambda) is tuned and\nselected with 10-fold cross validation..\n The model predicts this variable at the following sites: TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-04T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_lasso\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_precip_lm.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_precip_lm.json index 6c987837b6..b574e230f4 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_precip_lm.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_precip_lm.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_precip_lm", "description": "All summaries for the Daily_Water_temperature variable for the tg_precip_lm model. Information for the model is provided as follows: The tg_precip_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only total precipitation used as a model covariate..\n The model predicts this variable at the following sites: TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_precip_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_precip_lm_all_sites.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_precip_lm_all_sites.json index 8ac1b30101..3160f27f74 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_precip_lm_all_sites.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_precip_lm_all_sites.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_precip_lm_all_sites", "description": "All summaries for the Daily_Water_temperature variable for the tg_precip_lm_all_sites model. Information for the model is provided as follows: The tg_precip_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation. y. This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together..\n The model predicts this variable at the following sites: MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-16", + "updated": "2024-08-23", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-09-19T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_precip_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_randfor.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_randfor.json index 087cca73c9..a0a8069cd1 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_randfor.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_randfor.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_randfor", "description": "All summaries for the Daily_Water_temperature variable for the tg_randfor model. Information for the model is provided as follows: Random Forest is a machine learning model that is fitted with the ranger() function in the ranger\nR package (Wright & Ziegler 2017) within the tidymodels framework (Kuhn & Wickham 2020). The\nmodel drivers are unlagged air temperature, air pressure, relative humidity, surface downwelling\nlongwave and shortwave radiation, precipitation, and northward and eastward wind.\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-16", + "updated": "2024-08-23", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-09-18T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_randfor\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_tbats.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_tbats.json index f8a8a8bebb..975f51c7ec 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_tbats.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_tbats.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_tbats", "description": "All summaries for the Daily_Water_temperature variable for the tg_tbats model. Information for the model is provided as follows: The tg_tbats model is a TBATS (Trigonometric seasonality, Box-Cox transformation, ARMA\nerrors, Trend and Seasonal components) model fit using the function tbats() from the forecast package in\nR (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series model with no\ncovariates..\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-07-19", + "updated": "2024-08-23", "start_datetime": "2023-01-01T00:00:00Z", "end_datetime": "2024-08-18T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_tbats\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_temp_lm.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_temp_lm.json index 7f570d03f0..d7bfd8c6df 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_temp_lm.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_temp_lm.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_temp_lm", "description": "All summaries for the Daily_Water_temperature variable for the tg_temp_lm model. Information for the model is provided as follows: The tg_temp_lm model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation..\n The model predicts this variable at the following sites: ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI, MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_temp_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_temp_lm_all_sites.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_temp_lm_all_sites.json index a11b82fbdc..3cbe3b120b 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_temp_lm_all_sites.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/tg_temp_lm_all_sites.json @@ -48,7 +48,8 @@ "properties": { "title": "tg_temp_lm_all_sites", "description": "All summaries for the Daily_Water_temperature variable for the tg_temp_lm_all_sites model. Information for the model is provided as follows: The tg_temp_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation.This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: MCRA, OKSR, POSE, PRIN, PRLA, PRPO, REDB, SUGG, SYCA, TECR, TOMB, TOOK, WALK, WLOU, ARIK, BARC, BIGC, BLDE, BLUE, BLWA, CARI, COMO, CRAM, CUPE, FLNT, GUIL, HOPB, KING, LECO, LEWI, LIRO, MART, MAYF, MCDI.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-02", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -259,7 +260,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=tg_temp_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Aquatics/Daily_Water_temperature/models/zimmerman_proj1.json b/catalog/summaries/Aquatics/Daily_Water_temperature/models/zimmerman_proj1.json index 58493f2cd0..8562aeb15b 100644 --- a/catalog/summaries/Aquatics/Daily_Water_temperature/models/zimmerman_proj1.json +++ b/catalog/summaries/Aquatics/Daily_Water_temperature/models/zimmerman_proj1.json @@ -21,7 +21,8 @@ "properties": { "title": "zimmerman_proj1", "description": "All summaries for the Daily_Water_temperature variable for the zimmerman_proj1 model. Information for the model is provided as follows: I used an ARIMA model with one autoregressive term. I also included air pressure and air temperature.\n The model predicts this variable at the following sites: BARC, CRAM, LIRO, PRLA, PRPO, SUGG, TOOK.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-08-22", + "updated": "2024-08-23", "start_datetime": "2024-02-28T00:00:00Z", "end_datetime": "2024-09-25T00:00:00Z", "providers": [ @@ -205,7 +206,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Water_temperature", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=zimmerman_proj1?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=zimmerman_proj1?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=temperature/model_id=zimmerman_proj1?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=temperature/model_id=zimmerman_proj1\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_arima.json b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_arima.json index d9b79af33f..d36c8ffb4b 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_arima.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_arima.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_arima", "description": "All summaries for the Weekly_beetle_community_abundance variable for the tg_arima model. Information for the model is provided as follows: The tg_arima model is an AutoRegressive Integrated Moving Average (ARIMA) model fit using\nthe function auto.arima() from the forecast package in R (Hyndman et al. 2023; Hyndman et al., 2008).\nThis is an empirical time series model with no covariates.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-07-14", + "updated": "2024-08-23", "start_datetime": "2023-02-13T00:00:00Z", "end_datetime": "2025-07-07T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_abundance", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_arima\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_ets.json b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_ets.json index b36d0c3812..f143885fe8 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_ets.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_ets.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_ets", "description": "All summaries for the Weekly_beetle_community_abundance variable for the tg_ets model. Information for the model is provided as follows: The tg_ets model is an Error, Trend, Seasonal (ETS) model fit using the function ets() from the\nforecast package in R (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series\nmodel with no covariates..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-07-14", + "updated": "2024-08-23", "start_datetime": "2023-02-06T00:00:00Z", "end_datetime": "2025-07-07T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_abundance", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_ets\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_humidity_lm.json b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_humidity_lm.json index 2992cf136c..8cf1ec4b87 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_humidity_lm.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_humidity_lm.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_humidity_lm", "description": "All summaries for the Weekly_beetle_community_abundance variable for the tg_humidity_lm model. Information for the model is provided as follows: The tg_humidity_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_abundance", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_humidity_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_humidity_lm_all_sites.json b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_humidity_lm_all_sites.json index 5e786dc0cf..e461575426 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_humidity_lm_all_sites.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_humidity_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_humidity_lm_all_sites", "description": "All summaries for the Weekly_beetle_community_abundance variable for the tg_humidity_lm_all_sites model. Information for the model is provided as follows: The tg_humidity_lm_all_sites model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity. This model was used to forecast water temperature and dissolved oxygen concentration at the\nseven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_abundance", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_humidity_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_lasso.json b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_lasso.json index 9bf476e64e..b002d981a3 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_lasso.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_lasso.json @@ -59,7 +59,8 @@ "properties": { "title": "tg_lasso", "description": "All summaries for the Weekly_beetle_community_abundance variable for the tg_lasso model. Information for the model is provided as follows: Lasso is a machine learning model implemented in the same workflow as tg_randfor, but with\ndifferent hyperparameter tuning. The model drivers are unlagged air temperature, air pressure, relative\nhumidity, surface downwelling longwave and shortwave radiation, precipitation, and northward and\neastward wind. Lasso regressions were fitted with the function glmnet() in\nthe package glmnet (Tay et al. 2023), where the regularization hyperparameter (lambda) is tuned and\nselected with 10-fold cross validation..\n The model predicts this variable at the following sites: ABBY, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -281,7 +282,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_abundance", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_lasso\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_precip_lm.json b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_precip_lm.json index bb787214d9..e7d6517421 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_precip_lm.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_precip_lm.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_precip_lm", "description": "All summaries for the Weekly_beetle_community_abundance variable for the tg_precip_lm model. Information for the model is provided as follows: The tg_precip_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only total precipitation used as a model covariate..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_abundance", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_precip_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_precip_lm_all_sites.json b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_precip_lm_all_sites.json index 905c2d984c..46ceee0566 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_precip_lm_all_sites.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_precip_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_precip_lm_all_sites", "description": "All summaries for the Weekly_beetle_community_abundance variable for the tg_precip_lm_all_sites model. Information for the model is provided as follows: The tg_precip_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation. y. This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_abundance", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_precip_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_randfor.json b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_randfor.json index 0c6036d96b..59cc1987a0 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_randfor.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_randfor.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_randfor", "description": "All summaries for the Weekly_beetle_community_abundance variable for the tg_randfor model. Information for the model is provided as follows: Random Forest is a machine learning model that is fitted with the ranger() function in the ranger\nR package (Wright & Ziegler 2017) within the tidymodels framework (Kuhn & Wickham 2020). The\nmodel drivers are unlagged air temperature, air pressure, relative humidity, surface downwelling\nlongwave and shortwave radiation, precipitation, and northward and eastward wind.\n The model predicts this variable at the following sites: SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-19T00:00:00Z", "end_datetime": "2024-03-01T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_abundance", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_randfor\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_tbats.json b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_tbats.json index 6b5c6ea341..a1f02420f3 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_tbats.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_tbats.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_tbats", "description": "All summaries for the Weekly_beetle_community_abundance variable for the tg_tbats model. Information for the model is provided as follows: The tg_tbats model is a TBATS (Trigonometric seasonality, Box-Cox transformation, ARMA\nerrors, Trend and Seasonal components) model fit using the function tbats() from the forecast package in\nR (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series model with no\ncovariates..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-07-14", + "updated": "2024-08-23", "start_datetime": "2023-01-02T00:00:00Z", "end_datetime": "2025-07-07T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_abundance", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_tbats\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_temp_lm.json b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_temp_lm.json index 87ade3ef13..b5d270e57f 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_temp_lm.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_temp_lm.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_temp_lm", "description": "All summaries for the Weekly_beetle_community_abundance variable for the tg_temp_lm model. Information for the model is provided as follows: The tg_temp_lm model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_abundance", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_temp_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_temp_lm_all_sites.json b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_temp_lm_all_sites.json index 7d72f72b79..d6febd42e6 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_temp_lm_all_sites.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_abundance/models/tg_temp_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_temp_lm_all_sites", "description": "All summaries for the Weekly_beetle_community_abundance variable for the tg_temp_lm_all_sites model. Information for the model is provided as follows: The tg_temp_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation.This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_abundance", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=abundance/model_id=tg_temp_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_arima.json b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_arima.json index 8390ac615a..d1c4d494b6 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_arima.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_arima.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_arima", "description": "All summaries for the Weekly_beetle_community_richness variable for the tg_arima model. Information for the model is provided as follows: The tg_arima model is an AutoRegressive Integrated Moving Average (ARIMA) model fit using\nthe function auto.arima() from the forecast package in R (Hyndman et al. 2023; Hyndman et al., 2008).\nThis is an empirical time series model with no covariates.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-07-14", + "updated": "2024-08-23", "start_datetime": "2023-02-13T00:00:00Z", "end_datetime": "2025-07-07T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_richness", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_arima\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_ets.json b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_ets.json index 92fd1e71fc..773aa9cfd3 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_ets.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_ets.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_ets", "description": "All summaries for the Weekly_beetle_community_richness variable for the tg_ets model. Information for the model is provided as follows: The tg_ets model is an Error, Trend, Seasonal (ETS) model fit using the function ets() from the\nforecast package in R (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series\nmodel with no covariates..\n The model predicts this variable at the following sites: OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-07-14", + "updated": "2024-08-23", "start_datetime": "2023-02-06T00:00:00Z", "end_datetime": "2025-07-07T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_richness", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_ets\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_humidity_lm.json b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_humidity_lm.json index 6dc4963c63..dfbf9f33ba 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_humidity_lm.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_humidity_lm.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_humidity_lm", "description": "All summaries for the Weekly_beetle_community_richness variable for the tg_humidity_lm model. Information for the model is provided as follows: The tg_humidity_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_richness", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_humidity_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_humidity_lm_all_sites.json b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_humidity_lm_all_sites.json index 2144b6266e..7df9c8cf43 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_humidity_lm_all_sites.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_humidity_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_humidity_lm_all_sites", "description": "All summaries for the Weekly_beetle_community_richness variable for the tg_humidity_lm_all_sites model. Information for the model is provided as follows: The tg_humidity_lm_all_sites model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity. This model was used to forecast water temperature and dissolved oxygen concentration at the\nseven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_richness", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_humidity_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_lasso.json b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_lasso.json index 6a2316b157..bc1ba554a0 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_lasso.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_lasso.json @@ -59,7 +59,8 @@ "properties": { "title": "tg_lasso", "description": "All summaries for the Weekly_beetle_community_richness variable for the tg_lasso model. Information for the model is provided as follows: Lasso is a machine learning model implemented in the same workflow as tg_randfor, but with\ndifferent hyperparameter tuning. The model drivers are unlagged air temperature, air pressure, relative\nhumidity, surface downwelling longwave and shortwave radiation, precipitation, and northward and\neastward wind. Lasso regressions were fitted with the function glmnet() in\nthe package glmnet (Tay et al. 2023), where the regularization hyperparameter (lambda) is tuned and\nselected with 10-fold cross validation..\n The model predicts this variable at the following sites: ABBY, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -281,7 +282,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_richness", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_lasso\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_precip_lm.json b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_precip_lm.json index 52f25e01b0..30a04635ba 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_precip_lm.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_precip_lm.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_precip_lm", "description": "All summaries for the Weekly_beetle_community_richness variable for the tg_precip_lm model. Information for the model is provided as follows: The tg_precip_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only total precipitation used as a model covariate..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_richness", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_precip_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_precip_lm_all_sites.json b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_precip_lm_all_sites.json index 755f11f9db..ebb50b5727 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_precip_lm_all_sites.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_precip_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_precip_lm_all_sites", "description": "All summaries for the Weekly_beetle_community_richness variable for the tg_precip_lm_all_sites model. Information for the model is provided as follows: The tg_precip_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation. y. This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_richness", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_precip_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_randfor.json b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_randfor.json index efde860c26..4aaefd9d29 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_randfor.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_randfor.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_randfor", "description": "All summaries for the Weekly_beetle_community_richness variable for the tg_randfor model. Information for the model is provided as follows: Random Forest is a machine learning model that is fitted with the ranger() function in the ranger\nR package (Wright & Ziegler 2017) within the tidymodels framework (Kuhn & Wickham 2020). The\nmodel drivers are unlagged air temperature, air pressure, relative humidity, surface downwelling\nlongwave and shortwave radiation, precipitation, and northward and eastward wind.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-19T00:00:00Z", "end_datetime": "2024-03-01T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_richness", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_randfor\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_tbats.json b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_tbats.json index abf695ff58..5a9d7a9338 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_tbats.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_tbats.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_tbats", "description": "All summaries for the Weekly_beetle_community_richness variable for the tg_tbats model. Information for the model is provided as follows: The tg_tbats model is a TBATS (Trigonometric seasonality, Box-Cox transformation, ARMA\nerrors, Trend and Seasonal components) model fit using the function tbats() from the forecast package in\nR (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series model with no\ncovariates..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-07-14", + "updated": "2024-08-23", "start_datetime": "2023-01-02T00:00:00Z", "end_datetime": "2025-07-07T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_richness", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_tbats\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_temp_lm.json b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_temp_lm.json index 99f1c881cf..90af7945cc 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_temp_lm.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_temp_lm.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_temp_lm", "description": "All summaries for the Weekly_beetle_community_richness variable for the tg_temp_lm model. Information for the model is provided as follows: The tg_temp_lm model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_richness", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_temp_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_temp_lm_all_sites.json b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_temp_lm_all_sites.json index c88035866b..dfbe54311f 100644 --- a/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_temp_lm_all_sites.json +++ b/catalog/summaries/Beetles/Weekly_beetle_community_richness/models/tg_temp_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_temp_lm_all_sites", "description": "All summaries for the Weekly_beetle_community_richness variable for the tg_temp_lm_all_sites model. Information for the model is provided as follows: The tg_temp_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation.This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-02", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly beetle_community_richness", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=richness/model_id=tg_temp_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/collection.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/collection.json index e0d9e2a1e8..a9b739da86 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/collection.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/collection.json @@ -43,11 +43,6 @@ "type": "application/json", "href": "./models/tg_temp_lm_all_sites.json" }, - { - "rel": "item", - "type": "application/json", - "href": "./models/tg_ets.json" - }, { "rel": "item", "type": "application/json", @@ -78,6 +73,11 @@ "type": "application/json", "href": "./models/tg_randfor.json" }, + { + "rel": "item", + "type": "application/json", + "href": "./models/tg_ets.json" + }, { "rel": "item", "type": "application/json", diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/ChlorophyllCrusaders.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/ChlorophyllCrusaders.json index 097eaeb50b..78735c7726 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/ChlorophyllCrusaders.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/ChlorophyllCrusaders.json @@ -16,7 +16,8 @@ "properties": { "title": "ChlorophyllCrusaders", "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the ChlorophyllCrusaders model. Information for the model is provided as follows: Our project utilizes a historical GCC data to fit a Dynamic Linear Model (DLM). After this DLM is trained, we utilize forecasted temperature data to predict future GCC data..\n The model predicts this variable at the following sites: HARV, HEAL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-06-19", + "updated": "2024-05-21", "start_datetime": "2024-04-26T00:00:00Z", "end_datetime": "2024-06-20T00:00:00Z", "providers": [ @@ -195,7 +196,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=ChlorophyllCrusaders?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=ChlorophyllCrusaders?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=ChlorophyllCrusaders?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=ChlorophyllCrusaders\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/PEG.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/PEG.json index 0b6088a020..1ede434c1c 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/PEG.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/PEG.json @@ -61,7 +61,8 @@ "properties": { "title": "PEG", "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the PEG model. Information for the model is provided as follows: This model was a Simple Seasonal + Exponential Smoothing Model, with the GCC targets as inputs.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2023-12-21", + "updated": "2023-12-22", "start_datetime": "2023-12-22T00:00:00Z", "end_datetime": "2024-01-25T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=PEG?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=PEG?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=PEG?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=PEG\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/cb_prophet.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/cb_prophet.json index c3c017ffc2..d53ab2b048 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/cb_prophet.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/cb_prophet.json @@ -9,6 +9,21 @@ "geometry": { "type": "MultiPoint", "coordinates": [ + [-76.56, 38.8901], + [-119.7323, 37.1088], + [-119.2622, 37.0334], + [-110.8355, 31.9107], + [-89.5864, 45.5089], + [-103.0293, 40.4619], + [-87.3933, 32.9505], + [-119.006, 37.0058], + [-149.3705, 68.6611], + [-89.5857, 45.4937], + [-95.1921, 39.0404], + [-89.5373, 46.2339], + [-99.2413, 47.1282], + [-121.9519, 45.8205], + [-110.5391, 44.9535], [-122.3303, 45.7624], [-156.6194, 71.2824], [-71.2874, 44.0639], @@ -40,28 +55,14 @@ [-81.9934, 29.6893], [-155.3173, 19.5531], [-105.546, 40.2759], - [-78.1395, 38.8929], - [-76.56, 38.8901], - [-119.7323, 37.1088], - [-119.2622, 37.0334], - [-110.8355, 31.9107], - [-89.5864, 45.5089], - [-103.0293, 40.4619], - [-87.3933, 32.9505], - [-119.006, 37.0058], - [-149.3705, 68.6611], - [-89.5857, 45.4937], - [-95.1921, 39.0404], - [-89.5373, 46.2339], - [-99.2413, 47.1282], - [-121.9519, 45.8205], - [-110.5391, 44.9535] + [-78.1395, 38.8929] ] }, "properties": { "title": "cb_prophet", - "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the cb_prophet model. Information for the model is provided as follows: The Prophet model is an empirical model, specifically a non-linear regression model that includes\nseasonality effects (Taylor & Letham, 2018). The model relies on Bayesian estimation with an additive\nwhite noise error term.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the cb_prophet model. Information for the model is provided as follows: The Prophet model is an empirical model, specifically a non-linear regression model that includes\nseasonality effects (Taylor & Letham, 2018). The model relies on Bayesian estimation with an additive\nwhite noise error term.\n The model predicts this variable at the following sites: SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-02-06", + "updated": "2024-02-07", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-09T00:00:00Z", "providers": [ @@ -92,6 +93,21 @@ "gcc_90", "Daily", "P1D", + "SERC", + "SJER", + "SOAP", + "SRER", + "STEI", + "STER", + "TALL", + "TEAK", + "TOOL", + "TREE", + "UKFS", + "UNDE", + "WOOD", + "WREF", + "YELL", "ABBY", "BARR", "BART", @@ -123,22 +139,7 @@ "OSBS", "PUUM", "RMNP", - "SCBI", - "SERC", - "SJER", - "SOAP", - "SRER", - "STEI", - "STER", - "TALL", - "TEAK", - "TOOL", - "TREE", - "UKFS", - "UNDE", - "WOOD", - "WREF", - "YELL" + "SCBI" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=cb_prophet\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/climatology.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/climatology.json index 2a670b4461..f96f39aad9 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/climatology.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/climatology.json @@ -54,14 +54,15 @@ [-147.5026, 65.154], [-145.7514, 63.8811], [-149.2133, 63.8758], - [-149.3705, 68.6611], - [-156.6194, 71.2824] + [-156.6194, 71.2824], + [-149.3705, 68.6611] ] }, "properties": { "title": "climatology", - "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the climatology model. Information for the model is provided as follows: Historical DOY mean and sd. Assumes normal distribution.\n The model predicts this variable at the following sites: ABBY, BART, BLAN, CLBJ, CPER, DCFS, DELA, DSNY, GRSM, GUAN, HARV, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TREE, UKFS, UNDE, WOOD, WREF, YELL, BONA, DEJU, HEAL, TOOL, BARR.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the climatology model. Information for the model is provided as follows: Historical DOY mean and sd. Assumes normal distribution.\n The model predicts this variable at the following sites: ABBY, BART, BLAN, CLBJ, CPER, DCFS, DELA, DSNY, GRSM, GUAN, HARV, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TREE, UKFS, UNDE, WOOD, WREF, YELL, BONA, DEJU, HEAL, BARR, TOOL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-11-15T00:00:00Z", "end_datetime": "2024-08-07T00:00:00Z", "providers": [ @@ -137,8 +138,8 @@ "BONA", "DEJU", "HEAL", - "TOOL", - "BARR" + "BARR", + "TOOL" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=climatology?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=climatology\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/persistenceRW.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/persistenceRW.json index f0e91e58f3..c839d18b50 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/persistenceRW.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/persistenceRW.json @@ -20,38 +20,38 @@ [-104.7456, 40.8155], [-99.1066, 47.1617], [-145.7514, 63.8811], - [-81.9934, 29.6893], - [-155.3173, 19.5531], - [-105.546, 40.2759], - [-78.1395, 38.8929], - [-76.56, 38.8901], [-119.7323, 37.1088], - [-67.0769, 18.0213], - [-88.1612, 31.8539], - [-80.5248, 37.3783], - [-109.3883, 38.2483], - [-105.5824, 40.0543], - [-89.5373, 46.2339], - [-99.2413, 47.1282], - [-121.9519, 45.8205], - [-110.5391, 44.9535], - [-100.9154, 46.7697], [-119.2622, 37.0334], [-110.8355, 31.9107], [-89.5864, 45.5089], [-103.0293, 40.4619], [-87.3933, 32.9505], + [-100.9154, 46.7697], + [-99.0588, 35.4106], + [-112.4524, 40.1776], + [-84.2826, 35.9641], + [-81.9934, 29.6893], [-119.006, 37.0058], [-149.3705, 68.6611], [-89.5857, 45.4937], [-95.1921, 39.0404], + [-89.5373, 46.2339], + [-67.0769, 18.0213], + [-88.1612, 31.8539], + [-80.5248, 37.3783], + [-109.3883, 38.2483], + [-105.5824, 40.0543], + [-155.3173, 19.5531], + [-105.546, 40.2759], + [-78.1395, 38.8929], + [-76.56, 38.8901], + [-99.2413, 47.1282], + [-121.9519, 45.8205], + [-110.5391, 44.9535], [-122.3303, 45.7624], [-156.6194, 71.2824], [-71.2874, 44.0639], [-78.0418, 39.0337], - [-99.0588, 35.4106], - [-112.4524, 40.1776], - [-84.2826, 35.9641], [-84.4686, 31.1948], [-106.8425, 32.5907], [-96.6129, 39.1104], @@ -60,8 +60,9 @@ }, "properties": { "title": "persistenceRW", - "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the persistenceRW model. Information for the model is provided as follows: Random walk from the fable package with ensembles used to represent uncertainty.\n The model predicts this variable at the following sites: DELA, DSNY, GRSM, GUAN, HARV, HEAL, BONA, CLBJ, CPER, DCFS, DEJU, OSBS, PUUM, RMNP, SCBI, SERC, SJER, LAJA, LENO, MLBS, MOAB, NIWO, UNDE, WOOD, WREF, YELL, NOGP, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, ABBY, BARR, BART, BLAN, OAES, ONAQ, ORNL, JERC, JORN, KONA, KONZ.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the persistenceRW model. Information for the model is provided as follows: Random walk from the fable package with ensembles used to represent uncertainty.\n The model predicts this variable at the following sites: DELA, DSNY, GRSM, GUAN, HARV, HEAL, BONA, CLBJ, CPER, DCFS, DEJU, SJER, SOAP, SRER, STEI, STER, TALL, NOGP, OAES, ONAQ, ORNL, OSBS, TEAK, TOOL, TREE, UKFS, UNDE, LAJA, LENO, MLBS, MOAB, NIWO, PUUM, RMNP, SCBI, SERC, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, JERC, JORN, KONA, KONZ.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-11-15T00:00:00Z", "end_datetime": "2024-08-06T00:00:00Z", "providers": [ @@ -103,38 +104,38 @@ "CPER", "DCFS", "DEJU", - "OSBS", - "PUUM", - "RMNP", - "SCBI", - "SERC", "SJER", - "LAJA", - "LENO", - "MLBS", - "MOAB", - "NIWO", - "UNDE", - "WOOD", - "WREF", - "YELL", - "NOGP", "SOAP", "SRER", "STEI", "STER", "TALL", + "NOGP", + "OAES", + "ONAQ", + "ORNL", + "OSBS", "TEAK", "TOOL", "TREE", "UKFS", + "UNDE", + "LAJA", + "LENO", + "MLBS", + "MOAB", + "NIWO", + "PUUM", + "RMNP", + "SCBI", + "SERC", + "WOOD", + "WREF", + "YELL", "ABBY", "BARR", "BART", "BLAN", - "OAES", - "ONAQ", - "ORNL", "JERC", "JORN", "KONA", @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=persistenceRW\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_arima.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_arima.json index 2ff467a7e6..702dd10a34 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_arima.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_arima.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_arima", "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_arima model. Information for the model is provided as follows: The tg_arima model is an AutoRegressive Integrated Moving Average (ARIMA) model fit using\nthe function auto.arima() from the forecast package in R (Hyndman et al. 2023; Hyndman et al., 2008).\nThis is an empirical time series model with no covariates.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-01-07T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_arima\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_ets.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_ets.json index 57ff401954..a4951f55f1 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_ets.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_ets.json @@ -9,24 +9,6 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-155.3173, 19.5531], - [-105.546, 40.2759], - [-78.1395, 38.8929], - [-76.56, 38.8901], - [-119.7323, 37.1088], - [-119.2622, 37.0334], - [-110.8355, 31.9107], - [-89.5864, 45.5089], - [-103.0293, 40.4619], - [-87.3933, 32.9505], - [-119.006, 37.0058], - [-149.3705, 68.6611], - [-89.5857, 45.4937], - [-95.1921, 39.0404], - [-89.5373, 46.2339], - [-99.2413, 47.1282], - [-121.9519, 45.8205], - [-110.5391, 44.9535], [-122.3303, 45.7624], [-156.6194, 71.2824], [-71.2874, 44.0639], @@ -55,13 +37,32 @@ [-99.0588, 35.4106], [-112.4524, 40.1776], [-84.2826, 35.9641], - [-81.9934, 29.6893] + [-81.9934, 29.6893], + [-155.3173, 19.5531], + [-105.546, 40.2759], + [-78.1395, 38.8929], + [-76.56, 38.8901], + [-119.7323, 37.1088], + [-119.2622, 37.0334], + [-110.8355, 31.9107], + [-89.5864, 45.5089], + [-103.0293, 40.4619], + [-87.3933, 32.9505], + [-119.006, 37.0058], + [-149.3705, 68.6611], + [-89.5857, 45.4937], + [-95.1921, 39.0404], + [-89.5373, 46.2339], + [-99.2413, 47.1282], + [-121.9519, 45.8205], + [-110.5391, 44.9535] ] }, "properties": { "title": "tg_ets", - "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_ets model. Information for the model is provided as follows: The tg_ets model is an Error, Trend, Seasonal (ETS) model fit using the function ets() from the\nforecast package in R (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series\nmodel with no covariates..\n The model predicts this variable at the following sites: PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_ets model. Information for the model is provided as follows: The tg_ets model is an Error, Trend, Seasonal (ETS) model fit using the function ets() from the\nforecast package in R (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series\nmodel with no covariates..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-01-07T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -92,24 +93,6 @@ "gcc_90", "Daily", "P1D", - "PUUM", - "RMNP", - "SCBI", - "SERC", - "SJER", - "SOAP", - "SRER", - "STEI", - "STER", - "TALL", - "TEAK", - "TOOL", - "TREE", - "UKFS", - "UNDE", - "WOOD", - "WREF", - "YELL", "ABBY", "BARR", "BART", @@ -138,7 +121,25 @@ "OAES", "ONAQ", "ORNL", - "OSBS" + "OSBS", + "PUUM", + "RMNP", + "SCBI", + "SERC", + "SJER", + "SOAP", + "SRER", + "STEI", + "STER", + "TALL", + "TEAK", + "TOOL", + "TREE", + "UKFS", + "UNDE", + "WOOD", + "WREF", + "YELL" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_ets\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_humidity_lm.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_humidity_lm.json index ce88f71cc5..b6a326a544 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_humidity_lm.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_humidity_lm.json @@ -9,6 +9,23 @@ "geometry": { "type": "MultiPoint", "coordinates": [ + [-149.2133, 63.8758], + [-84.4686, 31.1948], + [-106.8425, 32.5907], + [-96.6129, 39.1104], + [-96.5631, 39.1008], + [-67.0769, 18.0213], + [-88.1612, 31.8539], + [-80.5248, 37.3783], + [-109.3883, 38.2483], + [-105.5824, 40.0543], + [-100.9154, 46.7697], + [-99.0588, 35.4106], + [-112.4524, 40.1776], + [-84.2826, 35.9641], + [-81.9934, 29.6893], + [-155.3173, 19.5531], + [-105.546, 40.2759], [-78.1395, 38.8929], [-76.56, 38.8901], [-119.7323, 37.1088], @@ -38,30 +55,14 @@ [-81.4362, 28.1251], [-83.5019, 35.689], [-66.8687, 17.9696], - [-72.1727, 42.5369], - [-149.2133, 63.8758], - [-84.4686, 31.1948], - [-106.8425, 32.5907], - [-96.6129, 39.1104], - [-96.5631, 39.1008], - [-67.0769, 18.0213], - [-88.1612, 31.8539], - [-80.5248, 37.3783], - [-109.3883, 38.2483], - [-105.5824, 40.0543], - [-100.9154, 46.7697], - [-99.0588, 35.4106], - [-112.4524, 40.1776], - [-84.2826, 35.9641], - [-81.9934, 29.6893], - [-155.3173, 19.5531], - [-105.546, 40.2759] + [-72.1727, 42.5369] ] }, "properties": { "title": "tg_humidity_lm", - "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_humidity_lm model. Information for the model is provided as follows: The tg_humidity_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity.\n The model predicts this variable at the following sites: SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_humidity_lm model. Information for the model is provided as follows: The tg_humidity_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity.\n The model predicts this variable at the following sites: HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-02-03", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -92,6 +93,23 @@ "gcc_90", "Daily", "P1D", + "HEAL", + "JERC", + "JORN", + "KONA", + "KONZ", + "LAJA", + "LENO", + "MLBS", + "MOAB", + "NIWO", + "NOGP", + "OAES", + "ONAQ", + "ORNL", + "OSBS", + "PUUM", + "RMNP", "SCBI", "SERC", "SJER", @@ -121,24 +139,7 @@ "DSNY", "GRSM", "GUAN", - "HARV", - "HEAL", - "JERC", - "JORN", - "KONA", - "KONZ", - "LAJA", - "LENO", - "MLBS", - "MOAB", - "NIWO", - "NOGP", - "OAES", - "ONAQ", - "ORNL", - "OSBS", - "PUUM", - "RMNP" + "HARV" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_humidity_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_humidity_lm_all_sites.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_humidity_lm_all_sites.json index b8061c85a6..b448423a6c 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_humidity_lm_all_sites.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_humidity_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_humidity_lm_all_sites", "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_humidity_lm_all_sites model. Information for the model is provided as follows: The tg_humidity_lm_all_sites model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity. This model was used to forecast water temperature and dissolved oxygen concentration at the\nseven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_humidity_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_lasso.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_lasso.json index e31e4905f2..4f736e74e7 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_lasso.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_lasso.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_lasso", "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_lasso model. Information for the model is provided as follows: Lasso is a machine learning model implemented in the same workflow as tg_randfor, but with\ndifferent hyperparameter tuning. The model drivers are unlagged air temperature, air pressure, relative\nhumidity, surface downwelling longwave and shortwave radiation, precipitation, and northward and\neastward wind. Lasso regressions were fitted with the function glmnet() in\nthe package glmnet (Tay et al. 2023), where the regularization hyperparameter (lambda) is tuned and\nselected with 10-fold cross validation..\n The model predicts this variable at the following sites: SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-04T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_lasso\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_precip_lm.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_precip_lm.json index 3ef7df50d5..40c88066a1 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_precip_lm.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_precip_lm.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_precip_lm", "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_precip_lm model. Information for the model is provided as follows: The tg_precip_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only total precipitation used as a model covariate..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_precip_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_precip_lm_all_sites.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_precip_lm_all_sites.json index fc453e6f6b..74e05569c5 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_precip_lm_all_sites.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_precip_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_precip_lm_all_sites", "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_precip_lm_all_sites model. Information for the model is provided as follows: The tg_precip_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation. y. This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_precip_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_randfor.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_randfor.json index 334b9c2658..101211efc3 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_randfor.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_randfor.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_randfor", "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_randfor model. Information for the model is provided as follows: Random Forest is a machine learning model that is fitted with the ranger() function in the ranger\nR package (Wright & Ziegler 2017) within the tidymodels framework (Kuhn & Wickham 2020). The\nmodel drivers are unlagged air temperature, air pressure, relative humidity, surface downwelling\nlongwave and shortwave radiation, precipitation, and northward and eastward wind.\n The model predicts this variable at the following sites: SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-04T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_randfor\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_tbats.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_tbats.json index e64512d6a3..49eb2507d0 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_tbats.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_tbats.json @@ -9,6 +9,19 @@ "geometry": { "type": "MultiPoint", "coordinates": [ + [-76.56, 38.8901], + [-119.7323, 37.1088], + [-119.2622, 37.0334], + [-110.8355, 31.9107], + [-89.5864, 45.5089], + [-103.0293, 40.4619], + [-87.3933, 32.9505], + [-119.006, 37.0058], + [-149.3705, 68.6611], + [-89.5857, 45.4937], + [-95.1921, 39.0404], + [-89.5373, 46.2339], + [-99.2413, 47.1282], [-121.9519, 45.8205], [-110.5391, 44.9535], [-122.3303, 45.7624], @@ -42,26 +55,14 @@ [-81.9934, 29.6893], [-155.3173, 19.5531], [-105.546, 40.2759], - [-78.1395, 38.8929], - [-76.56, 38.8901], - [-119.7323, 37.1088], - [-119.2622, 37.0334], - [-110.8355, 31.9107], - [-89.5864, 45.5089], - [-103.0293, 40.4619], - [-87.3933, 32.9505], - [-119.006, 37.0058], - [-149.3705, 68.6611], - [-89.5857, 45.4937], - [-95.1921, 39.0404], - [-89.5373, 46.2339], - [-99.2413, 47.1282] + [-78.1395, 38.8929] ] }, "properties": { "title": "tg_tbats", - "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_tbats model. Information for the model is provided as follows: The tg_tbats model is a TBATS (Trigonometric seasonality, Box-Cox transformation, ARMA\nerrors, Trend and Seasonal components) model fit using the function tbats() from the forecast package in\nR (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series model with no\ncovariates..\n The model predicts this variable at the following sites: WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_tbats model. Information for the model is provided as follows: The tg_tbats model is a TBATS (Trigonometric seasonality, Box-Cox transformation, ARMA\nerrors, Trend and Seasonal components) model fit using the function tbats() from the forecast package in\nR (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series model with no\ncovariates..\n The model predicts this variable at the following sites: SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-07-03", + "updated": "2024-08-23", "start_datetime": "2023-01-01T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -92,6 +93,19 @@ "gcc_90", "Daily", "P1D", + "SERC", + "SJER", + "SOAP", + "SRER", + "STEI", + "STER", + "TALL", + "TEAK", + "TOOL", + "TREE", + "UKFS", + "UNDE", + "WOOD", "WREF", "YELL", "ABBY", @@ -125,20 +139,7 @@ "OSBS", "PUUM", "RMNP", - "SCBI", - "SERC", - "SJER", - "SOAP", - "SRER", - "STEI", - "STER", - "TALL", - "TEAK", - "TOOL", - "TREE", - "UKFS", - "UNDE", - "WOOD" + "SCBI" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_tbats\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_temp_lm.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_temp_lm.json index 376501f8f5..a45a8253f5 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_temp_lm.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_temp_lm.json @@ -9,22 +9,6 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-78.1395, 38.8929], - [-76.56, 38.8901], - [-119.7323, 37.1088], - [-119.2622, 37.0334], - [-110.8355, 31.9107], - [-89.5864, 45.5089], - [-103.0293, 40.4619], - [-87.3933, 32.9505], - [-119.006, 37.0058], - [-149.3705, 68.6611], - [-89.5857, 45.4937], - [-95.1921, 39.0404], - [-89.5373, 46.2339], - [-99.2413, 47.1282], - [-121.9519, 45.8205], - [-110.5391, 44.9535], [-122.3303, 45.7624], [-156.6194, 71.2824], [-71.2874, 44.0639], @@ -55,13 +39,30 @@ [-84.2826, 35.9641], [-81.9934, 29.6893], [-155.3173, 19.5531], - [-105.546, 40.2759] + [-105.546, 40.2759], + [-78.1395, 38.8929], + [-76.56, 38.8901], + [-119.7323, 37.1088], + [-119.2622, 37.0334], + [-110.8355, 31.9107], + [-89.5864, 45.5089], + [-103.0293, 40.4619], + [-87.3933, 32.9505], + [-119.006, 37.0058], + [-149.3705, 68.6611], + [-89.5857, 45.4937], + [-95.1921, 39.0404], + [-89.5373, 46.2339], + [-99.2413, 47.1282], + [-121.9519, 45.8205], + [-110.5391, 44.9535] ] }, "properties": { "title": "tg_temp_lm", - "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_temp_lm model. Information for the model is provided as follows: The tg_temp_lm model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation..\n The model predicts this variable at the following sites: SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_temp_lm model. Information for the model is provided as follows: The tg_temp_lm model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-02-03", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -92,22 +93,6 @@ "gcc_90", "Daily", "P1D", - "SCBI", - "SERC", - "SJER", - "SOAP", - "SRER", - "STEI", - "STER", - "TALL", - "TEAK", - "TOOL", - "TREE", - "UKFS", - "UNDE", - "WOOD", - "WREF", - "YELL", "ABBY", "BARR", "BART", @@ -138,7 +123,23 @@ "ORNL", "OSBS", "PUUM", - "RMNP" + "RMNP", + "SCBI", + "SERC", + "SJER", + "SOAP", + "SRER", + "STEI", + "STER", + "TALL", + "TEAK", + "TOOL", + "TREE", + "UKFS", + "UNDE", + "WOOD", + "WREF", + "YELL" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_temp_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_temp_lm_all_sites.json b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_temp_lm_all_sites.json index 3f85896d1a..1cb8d75d47 100644 --- a/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_temp_lm_all_sites.json +++ b/catalog/summaries/Phenology/Daily_Green_chromatic_coordinate/models/tg_temp_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_temp_lm_all_sites", "description": "All summaries for the Daily_Green_chromatic_coordinate variable for the tg_temp_lm_all_sites model. Information for the model is provided as follows: The tg_temp_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation.This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Green_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=gcc_90/model_id=tg_temp_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/collection.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/collection.json index bf823cc614..29ec7518be 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/collection.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/collection.json @@ -11,57 +11,57 @@ { "rel": "item", "type": "application/json", - "href": "./models/tg_tbats.json" + "href": "./models/baseline_ensemble.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_temp_lm.json" + "href": "./models/tg_arima.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_arima.json" + "href": "./models/tg_ets.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_ets.json" + "href": "./models/tg_lasso.json" }, { "rel": "item", "type": "application/json", - "href": "./models/baseline_ensemble.json" + "href": "./models/tg_precip_lm.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_humidity_lm.json" + "href": "./models/tg_precip_lm_all_sites.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_humidity_lm_all_sites.json" + "href": "./models/tg_randfor.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_lasso.json" + "href": "./models/tg_tbats.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_precip_lm.json" + "href": "./models/tg_humidity_lm.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_precip_lm_all_sites.json" + "href": "./models/tg_humidity_lm_all_sites.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_randfor.json" + "href": "./models/tg_temp_lm.json" }, { "rel": "item", @@ -71,17 +71,17 @@ { "rel": "item", "type": "application/json", - "href": "./models/tg_temp_lm_all_sites.json" + "href": "./models/cb_prophet.json" }, { "rel": "item", "type": "application/json", - "href": "./models/cb_prophet.json" + "href": "./models/climatology.json" }, { "rel": "item", "type": "application/json", - "href": "./models/climatology.json" + "href": "./models/tg_temp_lm_all_sites.json" }, { "rel": "item", diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/PEG.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/PEG.json index 75702d9a41..405efa31fb 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/PEG.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/PEG.json @@ -61,7 +61,8 @@ "properties": { "title": "PEG", "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the PEG model. Information for the model is provided as follows: This model was a Simple Seasonal + Exponential Smoothing Model, with the GCC targets as inputs.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2023-12-21", + "updated": "2023-12-22", "start_datetime": "2023-12-22T00:00:00Z", "end_datetime": "2024-01-25T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=PEG?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=PEG?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=PEG?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=PEG\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/baseline_ensemble.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/baseline_ensemble.json index 4577c680e2..d51f883d8f 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/baseline_ensemble.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/baseline_ensemble.json @@ -61,7 +61,8 @@ "properties": { "title": "baseline_ensemble", "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the baseline_ensemble model. Information for the model is provided as follows: The Baseline MME is a multi-model ensemble (MME) comprised of the two baseline models\n(day-of-year, persistence) submitted by Challenge organisers.\n The model predicts this variable at the following sites: ABBY, BART, BLAN, CLBJ, CPER, DCFS, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, HARV, JERC, JORN, KONA, KONZ, LAJA, LENO, SERC, SJER, SOAP, SRER, STEI, UNDE, WOOD, WREF, YELL, STER, TALL, TEAK, TREE, UKFS, DELA, DSNY, GRSM, GUAN, MLBS, MOAB, NIWO, NOGP, BONA, DEJU, HEAL, TOOL, BARR.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-07-03", + "updated": null, "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=baseline_ensemble?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=baseline_ensemble?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=baseline_ensemble?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=baseline_ensemble\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/cb_prophet.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/cb_prophet.json index 3d8c2e00d6..d29f798dde 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/cb_prophet.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/cb_prophet.json @@ -9,23 +9,6 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-122.3303, 45.7624], - [-156.6194, 71.2824], - [-71.2874, 44.0639], - [-78.0418, 39.0337], - [-147.5026, 65.154], - [-97.57, 33.4012], - [-104.7456, 40.8155], - [-99.1066, 47.1617], - [-145.7514, 63.8811], - [-87.8039, 32.5417], - [-81.4362, 28.1251], - [-83.5019, 35.689], - [-66.8687, 17.9696], - [-72.1727, 42.5369], - [-149.2133, 63.8758], - [-84.4686, 31.1948], - [-106.8425, 32.5907], [-96.6129, 39.1104], [-96.5631, 39.1008], [-67.0769, 18.0213], @@ -55,13 +38,31 @@ [-89.5373, 46.2339], [-99.2413, 47.1282], [-121.9519, 45.8205], - [-110.5391, 44.9535] + [-110.5391, 44.9535], + [-122.3303, 45.7624], + [-156.6194, 71.2824], + [-71.2874, 44.0639], + [-78.0418, 39.0337], + [-147.5026, 65.154], + [-97.57, 33.4012], + [-104.7456, 40.8155], + [-99.1066, 47.1617], + [-145.7514, 63.8811], + [-87.8039, 32.5417], + [-81.4362, 28.1251], + [-83.5019, 35.689], + [-66.8687, 17.9696], + [-72.1727, 42.5369], + [-149.2133, 63.8758], + [-84.4686, 31.1948], + [-106.8425, 32.5907] ] }, "properties": { "title": "cb_prophet", - "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the cb_prophet model. Information for the model is provided as follows: The Prophet model is an empirical model, specifically a non-linear regression model that includes\nseasonality effects (Taylor & Letham, 2018). The model relies on Bayesian estimation with an additive\nwhite noise error term.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the cb_prophet model. Information for the model is provided as follows: The Prophet model is an empirical model, specifically a non-linear regression model that includes\nseasonality effects (Taylor & Letham, 2018). The model relies on Bayesian estimation with an additive\nwhite noise error term.\n The model predicts this variable at the following sites: KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-02-06", + "updated": "2024-02-07", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-09T00:00:00Z", "providers": [ @@ -92,23 +93,6 @@ "rcc_90", "Daily", "P1D", - "ABBY", - "BARR", - "BART", - "BLAN", - "BONA", - "CLBJ", - "CPER", - "DCFS", - "DEJU", - "DELA", - "DSNY", - "GRSM", - "GUAN", - "HARV", - "HEAL", - "JERC", - "JORN", "KONA", "KONZ", "LAJA", @@ -138,7 +122,24 @@ "UNDE", "WOOD", "WREF", - "YELL" + "YELL", + "ABBY", + "BARR", + "BART", + "BLAN", + "BONA", + "CLBJ", + "CPER", + "DCFS", + "DEJU", + "DELA", + "DSNY", + "GRSM", + "GUAN", + "HARV", + "HEAL", + "JERC", + "JORN" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=cb_prophet\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/climatology.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/climatology.json index 11334872a0..d2d2cb888d 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/climatology.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/climatology.json @@ -54,14 +54,15 @@ [-145.7514, 63.8811], [-149.2133, 63.8758], [-147.5026, 65.154], - [-149.3705, 68.6611], - [-156.6194, 71.2824] + [-156.6194, 71.2824], + [-149.3705, 68.6611] ] }, "properties": { "title": "climatology", - "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the climatology model. Information for the model is provided as follows: Historical DOY mean and sd. Assumes normal distribution.\n The model predicts this variable at the following sites: ABBY, BART, BLAN, CLBJ, CPER, DCFS, DELA, DSNY, GRSM, GUAN, HARV, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TREE, UKFS, UNDE, WOOD, WREF, YELL, DEJU, HEAL, BONA, TOOL, BARR.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the climatology model. Information for the model is provided as follows: Historical DOY mean and sd. Assumes normal distribution.\n The model predicts this variable at the following sites: ABBY, BART, BLAN, CLBJ, CPER, DCFS, DELA, DSNY, GRSM, GUAN, HARV, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TREE, UKFS, UNDE, WOOD, WREF, YELL, DEJU, HEAL, BONA, BARR, TOOL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-11-15T00:00:00Z", "end_datetime": "2024-08-07T00:00:00Z", "providers": [ @@ -137,8 +138,8 @@ "DEJU", "HEAL", "BONA", - "TOOL", - "BARR" + "BARR", + "TOOL" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=climatology?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=climatology\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/persistenceRW.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/persistenceRW.json index 066fa6ec5a..dea7c50f50 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/persistenceRW.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/persistenceRW.json @@ -44,24 +44,25 @@ [-99.0588, 35.4106], [-84.4686, 31.1948], [-106.8425, 32.5907], - [-83.5019, 35.689], - [-66.8687, 17.9696], - [-72.1727, 42.5369], - [-149.2133, 63.8758], + [-80.5248, 37.3783], + [-109.3883, 38.2483], + [-105.5824, 40.0543], + [-100.9154, 46.7697], [-149.3705, 68.6611], [-89.5857, 45.4937], [-95.1921, 39.0404], [-89.5373, 46.2339], - [-80.5248, 37.3783], - [-109.3883, 38.2483], - [-105.5824, 40.0543], - [-100.9154, 46.7697] + [-83.5019, 35.689], + [-66.8687, 17.9696], + [-72.1727, 42.5369], + [-149.2133, 63.8758] ] }, "properties": { "title": "persistenceRW", - "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the persistenceRW model. Information for the model is provided as follows: Random walk from the fable package with ensembles used to represent uncertainty.\n The model predicts this variable at the following sites: KONA, KONZ, LAJA, LENO, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, ONAQ, ORNL, OSBS, PUUM, SRER, STEI, STER, TALL, TEAK, RMNP, SCBI, SERC, SJER, SOAP, OAES, JERC, JORN, GRSM, GUAN, HARV, HEAL, TOOL, TREE, UKFS, UNDE, MLBS, MOAB, NIWO, NOGP.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the persistenceRW model. Information for the model is provided as follows: Random walk from the fable package with ensembles used to represent uncertainty.\n The model predicts this variable at the following sites: KONA, KONZ, LAJA, LENO, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, ONAQ, ORNL, OSBS, PUUM, SRER, STEI, STER, TALL, TEAK, RMNP, SCBI, SERC, SJER, SOAP, OAES, JERC, JORN, MLBS, MOAB, NIWO, NOGP, TOOL, TREE, UKFS, UNDE, GRSM, GUAN, HARV, HEAL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-11-15T00:00:00Z", "end_datetime": "2024-08-06T00:00:00Z", "providers": [ @@ -127,18 +128,18 @@ "OAES", "JERC", "JORN", - "GRSM", - "GUAN", - "HARV", - "HEAL", + "MLBS", + "MOAB", + "NIWO", + "NOGP", "TOOL", "TREE", "UKFS", "UNDE", - "MLBS", - "MOAB", - "NIWO", - "NOGP" + "GRSM", + "GUAN", + "HARV", + "HEAL" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=persistenceRW\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_arima.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_arima.json index 148b3e167e..9f5775c7ad 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_arima.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_arima.json @@ -9,26 +9,6 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-87.8039, 32.5417], - [-81.4362, 28.1251], - [-83.5019, 35.689], - [-66.8687, 17.9696], - [-72.1727, 42.5369], - [-149.2133, 63.8758], - [-84.4686, 31.1948], - [-106.8425, 32.5907], - [-96.6129, 39.1104], - [-96.5631, 39.1008], - [-67.0769, 18.0213], - [-88.1612, 31.8539], - [-80.5248, 37.3783], - [-109.3883, 38.2483], - [-105.5824, 40.0543], - [-100.9154, 46.7697], - [-99.0588, 35.4106], - [-112.4524, 40.1776], - [-84.2826, 35.9641], - [-81.9934, 29.6893], [-155.3173, 19.5531], [-105.546, 40.2759], [-78.1395, 38.8929], @@ -55,13 +35,34 @@ [-97.57, 33.4012], [-104.7456, 40.8155], [-99.1066, 47.1617], - [-145.7514, 63.8811] + [-145.7514, 63.8811], + [-87.8039, 32.5417], + [-81.4362, 28.1251], + [-83.5019, 35.689], + [-66.8687, 17.9696], + [-72.1727, 42.5369], + [-149.2133, 63.8758], + [-84.4686, 31.1948], + [-106.8425, 32.5907], + [-96.6129, 39.1104], + [-96.5631, 39.1008], + [-67.0769, 18.0213], + [-88.1612, 31.8539], + [-80.5248, 37.3783], + [-109.3883, 38.2483], + [-105.5824, 40.0543], + [-100.9154, 46.7697], + [-99.0588, 35.4106], + [-112.4524, 40.1776], + [-84.2826, 35.9641], + [-81.9934, 29.6893] ] }, "properties": { "title": "tg_arima", - "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_arima model. Information for the model is provided as follows: The tg_arima model is an AutoRegressive Integrated Moving Average (ARIMA) model fit using\nthe function auto.arima() from the forecast package in R (Hyndman et al. 2023; Hyndman et al., 2008).\nThis is an empirical time series model with no covariates.\n The model predicts this variable at the following sites: DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_arima model. Information for the model is provided as follows: The tg_arima model is an AutoRegressive Integrated Moving Average (ARIMA) model fit using\nthe function auto.arima() from the forecast package in R (Hyndman et al. 2023; Hyndman et al., 2008).\nThis is an empirical time series model with no covariates.\n The model predicts this variable at the following sites: PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-01-07T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -92,26 +93,6 @@ "rcc_90", "Daily", "P1D", - "DELA", - "DSNY", - "GRSM", - "GUAN", - "HARV", - "HEAL", - "JERC", - "JORN", - "KONA", - "KONZ", - "LAJA", - "LENO", - "MLBS", - "MOAB", - "NIWO", - "NOGP", - "OAES", - "ONAQ", - "ORNL", - "OSBS", "PUUM", "RMNP", "SCBI", @@ -138,7 +119,27 @@ "CLBJ", "CPER", "DCFS", - "DEJU" + "DEJU", + "DELA", + "DSNY", + "GRSM", + "GUAN", + "HARV", + "HEAL", + "JERC", + "JORN", + "KONA", + "KONZ", + "LAJA", + "LENO", + "MLBS", + "MOAB", + "NIWO", + "NOGP", + "OAES", + "ONAQ", + "ORNL", + "OSBS" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_arima\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_ets.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_ets.json index 591a1085e6..630fb62435 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_ets.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_ets.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_ets", "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_ets model. Information for the model is provided as follows: The tg_ets model is an Error, Trend, Seasonal (ETS) model fit using the function ets() from the\nforecast package in R (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series\nmodel with no covariates..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-01-07T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_ets\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_humidity_lm.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_humidity_lm.json index 34d9c1ef62..1b6df10978 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_humidity_lm.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_humidity_lm.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_humidity_lm", "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_humidity_lm model. Information for the model is provided as follows: The tg_humidity_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity.\n The model predicts this variable at the following sites: JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_humidity_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_humidity_lm_all_sites.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_humidity_lm_all_sites.json index 2776420ab2..99f0999a4a 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_humidity_lm_all_sites.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_humidity_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_humidity_lm_all_sites", "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_humidity_lm_all_sites model. Information for the model is provided as follows: The tg_humidity_lm_all_sites model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity. This model was used to forecast water temperature and dissolved oxygen concentration at the\nseven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_humidity_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_lasso.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_lasso.json index 905ba6c81d..28087e2beb 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_lasso.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_lasso.json @@ -9,6 +9,13 @@ "geometry": { "type": "MultiPoint", "coordinates": [ + [-87.8039, 32.5417], + [-81.4362, 28.1251], + [-83.5019, 35.689], + [-66.8687, 17.9696], + [-72.1727, 42.5369], + [-149.2133, 63.8758], + [-84.4686, 31.1948], [-106.8425, 32.5907], [-96.6129, 39.1104], [-96.5631, 39.1008], @@ -48,20 +55,14 @@ [-97.57, 33.4012], [-104.7456, 40.8155], [-99.1066, 47.1617], - [-145.7514, 63.8811], - [-87.8039, 32.5417], - [-81.4362, 28.1251], - [-83.5019, 35.689], - [-66.8687, 17.9696], - [-72.1727, 42.5369], - [-149.2133, 63.8758], - [-84.4686, 31.1948] + [-145.7514, 63.8811] ] }, "properties": { "title": "tg_lasso", - "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_lasso model. Information for the model is provided as follows: Lasso is a machine learning model implemented in the same workflow as tg_randfor, but with\ndifferent hyperparameter tuning. The model drivers are unlagged air temperature, air pressure, relative\nhumidity, surface downwelling longwave and shortwave radiation, precipitation, and northward and\neastward wind. Lasso regressions were fitted with the function glmnet() in\nthe package glmnet (Tay et al. 2023), where the regularization hyperparameter (lambda) is tuned and\nselected with 10-fold cross validation..\n The model predicts this variable at the following sites: JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_lasso model. Information for the model is provided as follows: Lasso is a machine learning model implemented in the same workflow as tg_randfor, but with\ndifferent hyperparameter tuning. The model drivers are unlagged air temperature, air pressure, relative\nhumidity, surface downwelling longwave and shortwave radiation, precipitation, and northward and\neastward wind. Lasso regressions were fitted with the function glmnet() in\nthe package glmnet (Tay et al. 2023), where the regularization hyperparameter (lambda) is tuned and\nselected with 10-fold cross validation..\n The model predicts this variable at the following sites: DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-04T00:00:00Z", "providers": [ @@ -92,6 +93,13 @@ "rcc_90", "Daily", "P1D", + "DELA", + "DSNY", + "GRSM", + "GUAN", + "HARV", + "HEAL", + "JERC", "JORN", "KONA", "KONZ", @@ -131,14 +139,7 @@ "CLBJ", "CPER", "DCFS", - "DEJU", - "DELA", - "DSNY", - "GRSM", - "GUAN", - "HARV", - "HEAL", - "JERC" + "DEJU" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_lasso\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_precip_lm.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_precip_lm.json index 5f4d404b90..889b4e7a42 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_precip_lm.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_precip_lm.json @@ -9,21 +9,6 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-122.3303, 45.7624], - [-156.6194, 71.2824], - [-71.2874, 44.0639], - [-78.0418, 39.0337], - [-147.5026, 65.154], - [-97.57, 33.4012], - [-104.7456, 40.8155], - [-99.1066, 47.1617], - [-145.7514, 63.8811], - [-87.8039, 32.5417], - [-81.4362, 28.1251], - [-83.5019, 35.689], - [-66.8687, 17.9696], - [-72.1727, 42.5369], - [-149.2133, 63.8758], [-84.4686, 31.1948], [-106.8425, 32.5907], [-96.6129, 39.1104], @@ -55,13 +40,29 @@ [-89.5373, 46.2339], [-99.2413, 47.1282], [-121.9519, 45.8205], - [-110.5391, 44.9535] + [-110.5391, 44.9535], + [-122.3303, 45.7624], + [-156.6194, 71.2824], + [-71.2874, 44.0639], + [-78.0418, 39.0337], + [-147.5026, 65.154], + [-97.57, 33.4012], + [-104.7456, 40.8155], + [-99.1066, 47.1617], + [-145.7514, 63.8811], + [-87.8039, 32.5417], + [-81.4362, 28.1251], + [-83.5019, 35.689], + [-66.8687, 17.9696], + [-72.1727, 42.5369], + [-149.2133, 63.8758] ] }, "properties": { "title": "tg_precip_lm", - "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_precip_lm model. Information for the model is provided as follows: The tg_precip_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only total precipitation used as a model covariate..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_precip_lm model. Information for the model is provided as follows: The tg_precip_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only total precipitation used as a model covariate..\n The model predicts this variable at the following sites: JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-02-03", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -92,21 +93,6 @@ "rcc_90", "Daily", "P1D", - "ABBY", - "BARR", - "BART", - "BLAN", - "BONA", - "CLBJ", - "CPER", - "DCFS", - "DEJU", - "DELA", - "DSNY", - "GRSM", - "GUAN", - "HARV", - "HEAL", "JERC", "JORN", "KONA", @@ -138,7 +124,22 @@ "UNDE", "WOOD", "WREF", - "YELL" + "YELL", + "ABBY", + "BARR", + "BART", + "BLAN", + "BONA", + "CLBJ", + "CPER", + "DCFS", + "DEJU", + "DELA", + "DSNY", + "GRSM", + "GUAN", + "HARV", + "HEAL" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_precip_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_precip_lm_all_sites.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_precip_lm_all_sites.json index 5eccb2ac83..efb080feb8 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_precip_lm_all_sites.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_precip_lm_all_sites.json @@ -9,6 +9,18 @@ "geometry": { "type": "MultiPoint", "coordinates": [ + [-122.3303, 45.7624], + [-156.6194, 71.2824], + [-71.2874, 44.0639], + [-78.0418, 39.0337], + [-147.5026, 65.154], + [-97.57, 33.4012], + [-104.7456, 40.8155], + [-99.1066, 47.1617], + [-145.7514, 63.8811], + [-87.8039, 32.5417], + [-81.4362, 28.1251], + [-83.5019, 35.689], [-66.8687, 17.9696], [-72.1727, 42.5369], [-149.2133, 63.8758], @@ -43,25 +55,14 @@ [-89.5373, 46.2339], [-99.2413, 47.1282], [-121.9519, 45.8205], - [-110.5391, 44.9535], - [-122.3303, 45.7624], - [-156.6194, 71.2824], - [-71.2874, 44.0639], - [-78.0418, 39.0337], - [-147.5026, 65.154], - [-97.57, 33.4012], - [-104.7456, 40.8155], - [-99.1066, 47.1617], - [-145.7514, 63.8811], - [-87.8039, 32.5417], - [-81.4362, 28.1251], - [-83.5019, 35.689] + [-110.5391, 44.9535] ] }, "properties": { "title": "tg_precip_lm_all_sites", - "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_precip_lm_all_sites model. Information for the model is provided as follows: The tg_precip_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation. y. This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together..\n The model predicts this variable at the following sites: GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_precip_lm_all_sites model. Information for the model is provided as follows: The tg_precip_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation. y. This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -92,6 +93,18 @@ "rcc_90", "Daily", "P1D", + "ABBY", + "BARR", + "BART", + "BLAN", + "BONA", + "CLBJ", + "CPER", + "DCFS", + "DEJU", + "DELA", + "DSNY", + "GRSM", "GUAN", "HARV", "HEAL", @@ -126,19 +139,7 @@ "UNDE", "WOOD", "WREF", - "YELL", - "ABBY", - "BARR", - "BART", - "BLAN", - "BONA", - "CLBJ", - "CPER", - "DCFS", - "DEJU", - "DELA", - "DSNY", - "GRSM" + "YELL" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_precip_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_randfor.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_randfor.json index 94e93c4747..94a1ac62e0 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_randfor.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_randfor.json @@ -9,22 +9,6 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-122.3303, 45.7624], - [-156.6194, 71.2824], - [-71.2874, 44.0639], - [-78.0418, 39.0337], - [-147.5026, 65.154], - [-97.57, 33.4012], - [-104.7456, 40.8155], - [-99.1066, 47.1617], - [-145.7514, 63.8811], - [-87.8039, 32.5417], - [-81.4362, 28.1251], - [-83.5019, 35.689], - [-66.8687, 17.9696], - [-72.1727, 42.5369], - [-149.2133, 63.8758], - [-84.4686, 31.1948], [-106.8425, 32.5907], [-96.6129, 39.1104], [-96.5631, 39.1008], @@ -55,13 +39,30 @@ [-89.5373, 46.2339], [-99.2413, 47.1282], [-121.9519, 45.8205], - [-110.5391, 44.9535] + [-110.5391, 44.9535], + [-122.3303, 45.7624], + [-156.6194, 71.2824], + [-71.2874, 44.0639], + [-78.0418, 39.0337], + [-147.5026, 65.154], + [-97.57, 33.4012], + [-104.7456, 40.8155], + [-99.1066, 47.1617], + [-145.7514, 63.8811], + [-87.8039, 32.5417], + [-81.4362, 28.1251], + [-83.5019, 35.689], + [-66.8687, 17.9696], + [-72.1727, 42.5369], + [-149.2133, 63.8758], + [-84.4686, 31.1948] ] }, "properties": { "title": "tg_randfor", - "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_randfor model. Information for the model is provided as follows: Random Forest is a machine learning model that is fitted with the ranger() function in the ranger\nR package (Wright & Ziegler 2017) within the tidymodels framework (Kuhn & Wickham 2020). The\nmodel drivers are unlagged air temperature, air pressure, relative humidity, surface downwelling\nlongwave and shortwave radiation, precipitation, and northward and eastward wind.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_randfor model. Information for the model is provided as follows: Random Forest is a machine learning model that is fitted with the ranger() function in the ranger\nR package (Wright & Ziegler 2017) within the tidymodels framework (Kuhn & Wickham 2020). The\nmodel drivers are unlagged air temperature, air pressure, relative humidity, surface downwelling\nlongwave and shortwave radiation, precipitation, and northward and eastward wind.\n The model predicts this variable at the following sites: JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-04T00:00:00Z", "providers": [ @@ -92,22 +93,6 @@ "rcc_90", "Daily", "P1D", - "ABBY", - "BARR", - "BART", - "BLAN", - "BONA", - "CLBJ", - "CPER", - "DCFS", - "DEJU", - "DELA", - "DSNY", - "GRSM", - "GUAN", - "HARV", - "HEAL", - "JERC", "JORN", "KONA", "KONZ", @@ -138,7 +123,23 @@ "UNDE", "WOOD", "WREF", - "YELL" + "YELL", + "ABBY", + "BARR", + "BART", + "BLAN", + "BONA", + "CLBJ", + "CPER", + "DCFS", + "DEJU", + "DELA", + "DSNY", + "GRSM", + "GUAN", + "HARV", + "HEAL", + "JERC" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_randfor\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_tbats.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_tbats.json index 7ce0bf4212..3d434d75d1 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_tbats.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_tbats.json @@ -9,10 +9,6 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-89.5373, 46.2339], - [-99.2413, 47.1282], - [-121.9519, 45.8205], - [-110.5391, 44.9535], [-122.3303, 45.7624], [-156.6194, 71.2824], [-71.2874, 44.0639], @@ -55,13 +51,18 @@ [-119.006, 37.0058], [-149.3705, 68.6611], [-89.5857, 45.4937], - [-95.1921, 39.0404] + [-95.1921, 39.0404], + [-89.5373, 46.2339], + [-99.2413, 47.1282], + [-121.9519, 45.8205], + [-110.5391, 44.9535] ] }, "properties": { "title": "tg_tbats", - "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_tbats model. Information for the model is provided as follows: The tg_tbats model is a TBATS (Trigonometric seasonality, Box-Cox transformation, ARMA\nerrors, Trend and Seasonal components) model fit using the function tbats() from the forecast package in\nR (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series model with no\ncovariates..\n The model predicts this variable at the following sites: UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_tbats model. Information for the model is provided as follows: The tg_tbats model is a TBATS (Trigonometric seasonality, Box-Cox transformation, ARMA\nerrors, Trend and Seasonal components) model fit using the function tbats() from the forecast package in\nR (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series model with no\ncovariates..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-07-03", + "updated": "2024-08-23", "start_datetime": "2023-01-01T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -92,10 +93,6 @@ "rcc_90", "Daily", "P1D", - "UNDE", - "WOOD", - "WREF", - "YELL", "ABBY", "BARR", "BART", @@ -138,7 +135,11 @@ "TEAK", "TOOL", "TREE", - "UKFS" + "UKFS", + "UNDE", + "WOOD", + "WREF", + "YELL" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_tbats\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_temp_lm.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_temp_lm.json index 052b8e0240..290ceff37d 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_temp_lm.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_temp_lm.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_temp_lm", "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_temp_lm model. Information for the model is provided as follows: The tg_temp_lm model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation..\n The model predicts this variable at the following sites: JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_temp_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_temp_lm_all_sites.json b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_temp_lm_all_sites.json index 3bc7b32e2b..d3963f132e 100644 --- a/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_temp_lm_all_sites.json +++ b/catalog/summaries/Phenology/Daily_Red_chromatic_coordinate/models/tg_temp_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_temp_lm_all_sites", "description": "All summaries for the Daily_Red_chromatic_coordinate variable for the tg_temp_lm_all_sites model. Information for the model is provided as follows: The tg_temp_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation.This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Red_chromatic_coordinate", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=rcc_90/model_id=tg_temp_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/30min_Net_ecosystem_exchange/models/climatology.json b/catalog/summaries/Terrestrial/30min_Net_ecosystem_exchange/models/climatology.json index 2aae91f81e..e0f4656b31 100644 --- a/catalog/summaries/Terrestrial/30min_Net_ecosystem_exchange/models/climatology.json +++ b/catalog/summaries/Terrestrial/30min_Net_ecosystem_exchange/models/climatology.json @@ -61,7 +61,8 @@ "properties": { "title": "climatology", "description": "All summaries for the 30min_Net_ecosystem_exchange variable for the climatology model. Information for the model is provided as follows: Historical DOY mean and sd. Assumes normal distribution.\n The model predicts this variable at the following sites: UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2023-12-30", + "updated": "2023-12-31", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-01-09T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for 30min Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=PT30M/variable=nee/model_id=climatology?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=PT30M/variable=nee/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=PT30M/variable=nee/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=PT30M/variable=nee/model_id=climatology\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/30min_latent_heat_flux/models/climatology.json b/catalog/summaries/Terrestrial/30min_latent_heat_flux/models/climatology.json index 2f2678d2fc..73afad33ed 100644 --- a/catalog/summaries/Terrestrial/30min_latent_heat_flux/models/climatology.json +++ b/catalog/summaries/Terrestrial/30min_latent_heat_flux/models/climatology.json @@ -9,13 +9,10 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-76.56, 38.8901], [-119.7323, 37.1088], [-119.2622, 37.0334], [-110.8355, 31.9107], [-89.5864, 45.5089], - [-103.0293, 40.4619], - [-87.3933, 32.9505], [-119.006, 37.0058], [-149.3705, 68.6611], [-89.5857, 45.4937], @@ -27,6 +24,8 @@ [-122.3303, 45.7624], [-156.6194, 71.2824], [-71.2874, 44.0639], + [-103.0293, 40.4619], + [-87.3933, 32.9505], [-78.0418, 39.0337], [-147.5026, 65.154], [-97.57, 33.4012], @@ -44,8 +43,6 @@ [-96.6129, 39.1104], [-96.5631, 39.1008], [-67.0769, 18.0213], - [-78.1395, 38.8929], - [-105.546, 40.2759], [-88.1612, 31.8539], [-80.5248, 37.3783], [-109.3883, 38.2483], @@ -55,13 +52,17 @@ [-112.4524, 40.1776], [-84.2826, 35.9641], [-81.9934, 29.6893], - [-155.3173, 19.5531] + [-155.3173, 19.5531], + [-105.546, 40.2759], + [-78.1395, 38.8929], + [-76.56, 38.8901] ] }, "properties": { "title": "climatology", - "description": "All summaries for the 30min_latent_heat_flux variable for the climatology model. Information for the model is provided as follows: Historical DOY mean and sd. Assumes normal distribution.\n The model predicts this variable at the following sites: SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, SCBI, RMNP, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the 30min_latent_heat_flux variable for the climatology model. Information for the model is provided as follows: Historical DOY mean and sd. Assumes normal distribution.\n The model predicts this variable at the following sites: SJER, SOAP, SRER, STEI, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, STER, TALL, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2023-12-30", + "updated": "2023-12-31", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-01-09T00:00:00Z", "providers": [ @@ -92,13 +93,10 @@ "le", "30min", "PT30M", - "SERC", "SJER", "SOAP", "SRER", "STEI", - "STER", - "TALL", "TEAK", "TOOL", "TREE", @@ -110,6 +108,8 @@ "ABBY", "BARR", "BART", + "STER", + "TALL", "BLAN", "BONA", "CLBJ", @@ -127,8 +127,6 @@ "KONA", "KONZ", "LAJA", - "SCBI", - "RMNP", "LENO", "MLBS", "MOAB", @@ -138,7 +136,10 @@ "ONAQ", "ORNL", "OSBS", - "PUUM" + "PUUM", + "RMNP", + "SCBI", + "SERC" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for 30min latent_heat_flux", "href": "s3://anonymous@/project_id=neon4cast/duration=PT30M/variable=le/model_id=climatology?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=PT30M/variable=le/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=PT30M/variable=le/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=PT30M/variable=le/model_id=climatology\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/USUNEEDAILY.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/USUNEEDAILY.json index 06987d61c6..75abb34c24 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/USUNEEDAILY.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/USUNEEDAILY.json @@ -15,7 +15,8 @@ "properties": { "title": "USUNEEDAILY", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the USUNEEDAILY model. Information for the model is provided as follows: \"Home brew ARIMA.\" We didn't use a formal time series framework because of all the missing values in both our response variable and the weather covariates. So we used a GAM to fit a seasonal component based on day of year, and we included NEE the previous day as as an AR 1 term. We did some model selection, using cross validation, to identify temperature and relative humidity as weather covariates..\n The model predicts this variable at the following sites: PUUM.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2023-12-12", + "updated": "2023-12-14", "start_datetime": "2023-12-12T00:00:00Z", "end_datetime": "2024-01-16T00:00:00Z", "providers": [ @@ -193,7 +194,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=USUNEEDAILY?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=USUNEEDAILY?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=USUNEEDAILY?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=USUNEEDAILY\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/bookcast_forest.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/bookcast_forest.json index 52ed53b624..8ed1d86f54 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/bookcast_forest.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/bookcast_forest.json @@ -16,7 +16,8 @@ "properties": { "title": "bookcast_forest", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the bookcast_forest model. Information for the model is provided as follows: A simple daily timestep process-based model of a terrestrial carbon cycle. It includes leaves, wood, and soil pools. It uses a light-use efficiency GPP model to convert PAR to carbon. The model is derived from https://github.com/mdietze/FluxCourseForecast..\n The model predicts this variable at the following sites: TALL, OSBS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-06-13", + "updated": "2024-07-03", "start_datetime": "2024-01-10T00:00:00Z", "end_datetime": "2024-07-12T00:00:00Z", "providers": [ @@ -195,7 +196,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=bookcast_forest?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=bookcast_forest?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=bookcast_forest?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=bookcast_forest\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/cb_prophet.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/cb_prophet.json index b2b7ddd841..7fd6a227ba 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/cb_prophet.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/cb_prophet.json @@ -60,7 +60,8 @@ "properties": { "title": "cb_prophet", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the cb_prophet model. Information for the model is provided as follows: The Prophet model is an empirical model, specifically a non-linear regression model that includes\nseasonality effects (Taylor & Letham, 2018). The model relies on Bayesian estimation with an additive\nwhite noise error term.\n The model predicts this variable at the following sites: PUUM, GUAN, OSBS, SCBI, MOAB, BART, CPER, HARV, UNDE, STER, KONA, TREE, ABBY, LENO, UKFS, DEJU, KONZ, RMNP, BARR, JORN, SOAP, STEI, TALL, DCFS, TOOL, WOOD, OAES, HEAL, SERC, BLAN, GRSM, ORNL, SRER, NOGP, JERC, DELA, MLBS, NIWO, WREF, LAJA, TEAK, CLBJ, SJER, ONAQ, DSNY, BONA.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-02-06", + "updated": "2024-02-07", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-04T00:00:00Z", "providers": [ @@ -283,7 +284,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=cb_prophet\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/climatology.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/climatology.json index a4f9d3886a..a7dda68fb6 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/climatology.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/climatology.json @@ -61,7 +61,8 @@ "properties": { "title": "climatology", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the climatology model. Information for the model is provided as follows: Historical DOY mean and sd. Assumes normal distribution.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-11-15T00:00:00Z", "end_datetime": "2024-08-08T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=climatology?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=climatology\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/persistenceRW.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/persistenceRW.json index 9e11174663..5e8a41bae8 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/persistenceRW.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/persistenceRW.json @@ -61,7 +61,8 @@ "properties": { "title": "persistenceRW", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the persistenceRW model. Information for the model is provided as follows: Random walk from the fable package with ensembles used to represent uncertainty.\n The model predicts this variable at the following sites: NOGP, OAES, ONAQ, ORNL, OSBS, UNDE, WOOD, WREF, YELL, BONA, CLBJ, CPER, DCFS, DEJU, DELA, HEAL, JERC, JORN, KONA, KONZ, LAJA, SJER, SOAP, SRER, STEI, STER, TALL, DSNY, GRSM, GUAN, HARV, TEAK, TOOL, TREE, UKFS, ABBY, BARR, BART, BLAN, LENO, MLBS, MOAB, NIWO, PUUM, RMNP, SCBI, SERC.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-11-15T00:00:00Z", "end_datetime": "2024-08-06T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=persistenceRW?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=persistenceRW\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_arima.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_arima.json index 59a4e73b39..869fe40b31 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_arima.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_arima.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_arima", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the tg_arima model. Information for the model is provided as follows: The tg_arima model is an AutoRegressive Integrated Moving Average (ARIMA) model fit using\nthe function auto.arima() from the forecast package in R (Hyndman et al. 2023; Hyndman et al., 2008).\nThis is an empirical time series model with no covariates.\n The model predicts this variable at the following sites: KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-01-07T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_arima\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_ets.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_ets.json index ed0a5257e3..9a4be47559 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_ets.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_ets.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_ets", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the tg_ets model. Information for the model is provided as follows: The tg_ets model is an Error, Trend, Seasonal (ETS) model fit using the function ets() from the\nforecast package in R (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series\nmodel with no covariates..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-23T00:00:00Z", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-01-07T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_ets\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_humidity_lm.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_humidity_lm.json index 5ea20d9458..844b0d5415 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_humidity_lm.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_humidity_lm.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_humidity_lm", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the tg_humidity_lm model. Information for the model is provided as follows: The tg_humidity_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity.\n The model predicts this variable at the following sites: SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-04", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_humidity_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_humidity_lm_all_sites.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_humidity_lm_all_sites.json index 54e51c88a4..1229cc6d87 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_humidity_lm_all_sites.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_humidity_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_humidity_lm_all_sites", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the tg_humidity_lm_all_sites model. Information for the model is provided as follows: The tg_humidity_lm_all_sites model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity. This model was used to forecast water temperature and dissolved oxygen concentration at the\nseven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_humidity_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_precip_lm.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_precip_lm.json index 562b70c288..8c22de314e 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_precip_lm.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_precip_lm.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_precip_lm", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the tg_precip_lm model. Information for the model is provided as follows: The tg_precip_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only total precipitation used as a model covariate..\n The model predicts this variable at the following sites: SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-04", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_precip_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_precip_lm_all_sites.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_precip_lm_all_sites.json index 1b1f74f0ed..e5e4ce0db6 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_precip_lm_all_sites.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_precip_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_precip_lm_all_sites", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the tg_precip_lm_all_sites model. Information for the model is provided as follows: The tg_precip_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation. y. This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_precip_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_randfor.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_randfor.json index 500db6c5ee..a1f2e673d3 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_randfor.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_randfor.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_randfor", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the tg_randfor model. Information for the model is provided as follows: Random Forest is a machine learning model that is fitted with the ranger() function in the ranger\nR package (Wright & Ziegler 2017) within the tidymodels framework (Kuhn & Wickham 2020). The\nmodel drivers are unlagged air temperature, air pressure, relative humidity, surface downwelling\nlongwave and shortwave radiation, precipitation, and northward and eastward wind.\n The model predicts this variable at the following sites: SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-04T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_randfor\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_tbats.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_tbats.json index 8ca41b6866..c5ab96a6e5 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_tbats.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_tbats.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_tbats", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the tg_tbats model. Information for the model is provided as follows: The tg_tbats model is a TBATS (Trigonometric seasonality, Box-Cox transformation, ARMA\nerrors, Trend and Seasonal components) model fit using the function tbats() from the forecast package in\nR (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series model with no\ncovariates..\n The model predicts this variable at the following sites: CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-01-01T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_tbats\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_temp_lm.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_temp_lm.json index 6950b1e289..a8d1bd11d3 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_temp_lm.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_temp_lm.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_temp_lm", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the tg_temp_lm model. Information for the model is provided as follows: The tg_temp_lm model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation..\n The model predicts this variable at the following sites: SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-04", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_temp_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_temp_lm_all_sites.json b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_temp_lm_all_sites.json index 41ea3b833b..cd1a57bf6e 100644 --- a/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_temp_lm_all_sites.json +++ b/catalog/summaries/Terrestrial/Daily_Net_ecosystem_exchange/models/tg_temp_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_temp_lm_all_sites", "description": "All summaries for the Daily_Net_ecosystem_exchange variable for the tg_temp_lm_all_sites model. Information for the model is provided as follows: The tg_temp_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation.This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily Net_ecosystem_exchange", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=nee/model_id=tg_temp_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/collection.json b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/collection.json index 31e6d4caf8..65b681a206 100644 --- a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/collection.json +++ b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/collection.json @@ -13,6 +13,11 @@ "type": "application/json", "href": "./models/tg_arima.json" }, + { + "rel": "item", + "type": "application/json", + "href": "./models/tg_ets.json" + }, { "rel": "item", "type": "application/json", @@ -41,32 +46,27 @@ { "rel": "item", "type": "application/json", - "href": "./models/cb_prophet.json" - }, - { - "rel": "item", - "type": "application/json", - "href": "./models/climatology.json" + "href": "./models/tg_humidity_lm.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_ets.json" + "href": "./models/tg_humidity_lm_all_sites.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_humidity_lm.json" + "href": "./models/tg_precip_lm.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_humidity_lm_all_sites.json" + "href": "./models/cb_prophet.json" }, { "rel": "item", "type": "application/json", - "href": "./models/tg_precip_lm.json" + "href": "./models/climatology.json" }, { "rel": "parent", diff --git a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/cb_prophet.json b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/cb_prophet.json index 131b2f2b83..4e9deaa969 100644 --- a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/cb_prophet.json +++ b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/cb_prophet.json @@ -60,7 +60,8 @@ "properties": { "title": "cb_prophet", "description": "All summaries for the Daily_latent_heat_flux variable for the cb_prophet model. Information for the model is provided as follows: The Prophet model is an empirical model, specifically a non-linear regression model that includes\nseasonality effects (Taylor & Letham, 2018). The model relies on Bayesian estimation with an additive\nwhite noise error term.\n The model predicts this variable at the following sites: DSNY, SCBI, MOAB, PUUM, GUAN, BART, CPER, HARV, UNDE, STER, KONA, TREE, ABBY, LENO, UKFS, DEJU, KONZ, RMNP, BARR, JORN, SOAP, STEI, TALL, DCFS, TOOL, WOOD, OAES, HEAL, SERC, BLAN, GRSM, ORNL, SRER, NOGP, JERC, DELA, MLBS, NIWO, WREF, LAJA, TEAK, CLBJ, SJER, OSBS, BONA, ONAQ.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-02-06", + "updated": "2024-02-07", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-04T00:00:00Z", "providers": [ @@ -283,7 +284,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily latent_heat_flux", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=cb_prophet?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=le/model_id=cb_prophet\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/climatology.json b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/climatology.json index 20fca3a3ad..c09b292ad9 100644 --- a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/climatology.json +++ b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/climatology.json @@ -61,7 +61,8 @@ "properties": { "title": "climatology", "description": "All summaries for the Daily_latent_heat_flux variable for the climatology model. Information for the model is provided as follows: Historical DOY mean and sd. Assumes normal distribution.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-11-15T00:00:00Z", "end_datetime": "2024-08-08T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily latent_heat_flux", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=climatology?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=climatology?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=le/model_id=climatology\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_arima.json b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_arima.json index 40229093fb..5d9e463e9c 100644 --- a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_arima.json +++ b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_arima.json @@ -9,21 +9,6 @@ "geometry": { "type": "MultiPoint", "coordinates": [ - [-67.0769, 18.0213], - [-88.1612, 31.8539], - [-80.5248, 37.3783], - [-109.3883, 38.2483], - [-105.5824, 40.0543], - [-100.9154, 46.7697], - [-99.0588, 35.4106], - [-112.4524, 40.1776], - [-84.2826, 35.9641], - [-81.9934, 29.6893], - [-155.3173, 19.5531], - [-105.546, 40.2759], - [-78.1395, 38.8929], - [-76.56, 38.8901], - [-119.7323, 37.1088], [-119.2622, 37.0334], [-110.8355, 31.9107], [-89.5864, 45.5089], @@ -55,13 +40,29 @@ [-84.4686, 31.1948], [-106.8425, 32.5907], [-96.6129, 39.1104], - [-96.5631, 39.1008] + [-96.5631, 39.1008], + [-67.0769, 18.0213], + [-88.1612, 31.8539], + [-80.5248, 37.3783], + [-109.3883, 38.2483], + [-105.5824, 40.0543], + [-100.9154, 46.7697], + [-99.0588, 35.4106], + [-112.4524, 40.1776], + [-84.2826, 35.9641], + [-81.9934, 29.6893], + [-155.3173, 19.5531], + [-105.546, 40.2759], + [-78.1395, 38.8929], + [-76.56, 38.8901], + [-119.7323, 37.1088] ] }, "properties": { "title": "tg_arima", - "description": "All summaries for the Daily_latent_heat_flux variable for the tg_arima model. Information for the model is provided as follows: The tg_arima model is an AutoRegressive Integrated Moving Average (ARIMA) model fit using\nthe function auto.arima() from the forecast package in R (Hyndman et al. 2023; Hyndman et al., 2008).\nThis is an empirical time series model with no covariates.\n The model predicts this variable at the following sites: LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_latent_heat_flux variable for the tg_arima model. Information for the model is provided as follows: The tg_arima model is an AutoRegressive Integrated Moving Average (ARIMA) model fit using\nthe function auto.arima() from the forecast package in R (Hyndman et al. 2023; Hyndman et al., 2008).\nThis is an empirical time series model with no covariates.\n The model predicts this variable at the following sites: SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-01-07T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -92,21 +93,6 @@ "le", "Daily", "P1D", - "LAJA", - "LENO", - "MLBS", - "MOAB", - "NIWO", - "NOGP", - "OAES", - "ONAQ", - "ORNL", - "OSBS", - "PUUM", - "RMNP", - "SCBI", - "SERC", - "SJER", "SOAP", "SRER", "STEI", @@ -138,7 +124,22 @@ "JERC", "JORN", "KONA", - "KONZ" + "KONZ", + "LAJA", + "LENO", + "MLBS", + "MOAB", + "NIWO", + "NOGP", + "OAES", + "ONAQ", + "ORNL", + "OSBS", + "PUUM", + "RMNP", + "SCBI", + "SERC", + "SJER" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily latent_heat_flux", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=le/model_id=tg_arima\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_ets.json b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_ets.json index cf74837920..090f6602df 100644 --- a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_ets.json +++ b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_ets.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_ets", "description": "All summaries for the Daily_latent_heat_flux variable for the tg_ets model. Information for the model is provided as follows: The tg_ets model is an Error, Trend, Seasonal (ETS) model fit using the function ets() from the\nforecast package in R (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series\nmodel with no covariates..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-01-07T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily latent_heat_flux", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=le/model_id=tg_ets\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_humidity_lm.json b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_humidity_lm.json index 0e4a2c60cc..a9b5cc44d1 100644 --- a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_humidity_lm.json +++ b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_humidity_lm.json @@ -9,6 +9,9 @@ "geometry": { "type": "MultiPoint", "coordinates": [ + [-72.1727, 42.5369], + [-149.2133, 63.8758], + [-84.4686, 31.1948], [-122.3303, 45.7624], [-156.6194, 71.2824], [-71.2874, 44.0639], @@ -22,9 +25,6 @@ [-81.4362, 28.1251], [-83.5019, 35.689], [-66.8687, 17.9696], - [-72.1727, 42.5369], - [-149.2133, 63.8758], - [-84.4686, 31.1948], [-106.8425, 32.5907], [-96.6129, 39.1104], [-96.5631, 39.1008], @@ -60,8 +60,9 @@ }, "properties": { "title": "tg_humidity_lm", - "description": "All summaries for the Daily_latent_heat_flux variable for the tg_humidity_lm model. Information for the model is provided as follows: The tg_humidity_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_latent_heat_flux variable for the tg_humidity_lm model. Information for the model is provided as follows: The tg_humidity_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity.\n The model predicts this variable at the following sites: HARV, HEAL, JERC, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-02-03", + "updated": "2024-07-04", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -92,6 +93,9 @@ "le", "Daily", "P1D", + "HARV", + "HEAL", + "JERC", "ABBY", "BARR", "BART", @@ -105,9 +109,6 @@ "DSNY", "GRSM", "GUAN", - "HARV", - "HEAL", - "JERC", "JORN", "KONA", "KONZ", @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily latent_heat_flux", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=le/model_id=tg_humidity_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_humidity_lm_all_sites.json b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_humidity_lm_all_sites.json index 88bbd6dc57..fb423fbe2e 100644 --- a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_humidity_lm_all_sites.json +++ b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_humidity_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_humidity_lm_all_sites", "description": "All summaries for the Daily_latent_heat_flux variable for the tg_humidity_lm_all_sites model. Information for the model is provided as follows: The tg_humidity_lm_all_sites model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity. This model was used to forecast water temperature and dissolved oxygen concentration at the\nseven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily latent_heat_flux", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=le/model_id=tg_humidity_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_precip_lm.json b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_precip_lm.json index de8371c2e5..f2554fdcf7 100644 --- a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_precip_lm.json +++ b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_precip_lm.json @@ -9,6 +9,21 @@ "geometry": { "type": "MultiPoint", "coordinates": [ + [-122.3303, 45.7624], + [-156.6194, 71.2824], + [-71.2874, 44.0639], + [-78.0418, 39.0337], + [-147.5026, 65.154], + [-97.57, 33.4012], + [-104.7456, 40.8155], + [-99.1066, 47.1617], + [-145.7514, 63.8811], + [-87.8039, 32.5417], + [-81.4362, 28.1251], + [-83.5019, 35.689], + [-66.8687, 17.9696], + [-72.1727, 42.5369], + [-149.2133, 63.8758], [-84.4686, 31.1948], [-106.8425, 32.5907], [-96.6129, 39.1104], @@ -40,28 +55,14 @@ [-89.5373, 46.2339], [-99.2413, 47.1282], [-121.9519, 45.8205], - [-110.5391, 44.9535], - [-122.3303, 45.7624], - [-156.6194, 71.2824], - [-71.2874, 44.0639], - [-78.0418, 39.0337], - [-147.5026, 65.154], - [-97.57, 33.4012], - [-104.7456, 40.8155], - [-99.1066, 47.1617], - [-145.7514, 63.8811], - [-87.8039, 32.5417], - [-81.4362, 28.1251], - [-83.5019, 35.689], - [-66.8687, 17.9696], - [-72.1727, 42.5369], - [-149.2133, 63.8758] + [-110.5391, 44.9535] ] }, "properties": { "title": "tg_precip_lm", - "description": "All summaries for the Daily_latent_heat_flux variable for the tg_precip_lm model. Information for the model is provided as follows: The tg_precip_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only total precipitation used as a model covariate..\n The model predicts this variable at the following sites: JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "description": "All summaries for the Daily_latent_heat_flux variable for the tg_precip_lm model. Information for the model is provided as follows: The tg_precip_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only total precipitation used as a model covariate..\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", + "datetime": "2024-02-03", + "updated": "2024-07-04", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -92,6 +93,21 @@ "le", "Daily", "P1D", + "ABBY", + "BARR", + "BART", + "BLAN", + "BONA", + "CLBJ", + "CPER", + "DCFS", + "DEJU", + "DELA", + "DSNY", + "GRSM", + "GUAN", + "HARV", + "HEAL", "JERC", "JORN", "KONA", @@ -123,22 +139,7 @@ "UNDE", "WOOD", "WREF", - "YELL", - "ABBY", - "BARR", - "BART", - "BLAN", - "BONA", - "CLBJ", - "CPER", - "DCFS", - "DEJU", - "DELA", - "DSNY", - "GRSM", - "GUAN", - "HARV", - "HEAL" + "YELL" ], "table:columns": [ { @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily latent_heat_flux", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=le/model_id=tg_precip_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_precip_lm_all_sites.json b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_precip_lm_all_sites.json index 60bd4f4198..baa08c5e48 100644 --- a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_precip_lm_all_sites.json +++ b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_precip_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_precip_lm_all_sites", "description": "All summaries for the Daily_latent_heat_flux variable for the tg_precip_lm_all_sites model. Information for the model is provided as follows: The tg_precip_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation. y. This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together..\n The model predicts this variable at the following sites: KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily latent_heat_flux", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=le/model_id=tg_precip_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_randfor.json b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_randfor.json index 8c8f31855e..7f557ab436 100644 --- a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_randfor.json +++ b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_randfor.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_randfor", "description": "All summaries for the Daily_latent_heat_flux variable for the tg_randfor model. Information for the model is provided as follows: Random Forest is a machine learning model that is fitted with the ranger() function in the ranger\nR package (Wright & Ziegler 2017) within the tidymodels framework (Kuhn & Wickham 2020). The\nmodel drivers are unlagged air temperature, air pressure, relative humidity, surface downwelling\nlongwave and shortwave radiation, precipitation, and northward and eastward wind.\n The model predicts this variable at the following sites: JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-04T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily latent_heat_flux", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=le/model_id=tg_randfor\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_tbats.json b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_tbats.json index a27ee7d095..cb8eba8ea1 100644 --- a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_tbats.json +++ b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_tbats.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_tbats", "description": "All summaries for the Daily_latent_heat_flux variable for the tg_tbats model. Information for the model is provided as follows: The tg_tbats model is a TBATS (Trigonometric seasonality, Box-Cox transformation, ARMA\nerrors, Trend and Seasonal components) model fit using the function tbats() from the forecast package in\nR (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series model with no\ncovariates..\n The model predicts this variable at the following sites: MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-07-03", + "updated": "2024-07-04", "start_datetime": "2023-01-01T00:00:00Z", "end_datetime": "2024-08-02T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily latent_heat_flux", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=le/model_id=tg_tbats\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_temp_lm.json b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_temp_lm.json index 283a131677..a36ddf1b3d 100644 --- a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_temp_lm.json +++ b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_temp_lm.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_temp_lm", "description": "All summaries for the Daily_latent_heat_flux variable for the tg_temp_lm model. Information for the model is provided as follows: The tg_temp_lm model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation..\n The model predicts this variable at the following sites: JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL, ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-02-03", + "updated": "2024-07-04", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-08T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily latent_heat_flux", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=le/model_id=tg_temp_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_temp_lm_all_sites.json b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_temp_lm_all_sites.json index 74da8514ec..63ad1f8a71 100644 --- a/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_temp_lm_all_sites.json +++ b/catalog/summaries/Terrestrial/Daily_latent_heat_flux/models/tg_temp_lm_all_sites.json @@ -61,7 +61,8 @@ "properties": { "title": "tg_temp_lm_all_sites", "description": "All summaries for the Daily_latent_heat_flux variable for the tg_temp_lm_all_sites model. Information for the model is provided as follows: The tg_temp_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation.This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: ABBY, BARR, BART, BLAN, BONA, CLBJ, CPER, DCFS, DEJU, DELA, DSNY, GRSM, GUAN, HARV, HEAL, JERC, JORN, KONA, KONZ, LAJA, LENO, MLBS, MOAB, NIWO, NOGP, OAES, ONAQ, ORNL, OSBS, PUUM, RMNP, SCBI, SERC, SJER, SOAP, SRER, STEI, STER, TALL, TEAK, TOOL, TREE, UKFS, UNDE, WOOD, WREF, YELL.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-31", + "updated": "2024-07-03", "start_datetime": "2023-11-14T00:00:00Z", "end_datetime": "2024-03-05T00:00:00Z", "providers": [ @@ -285,7 +286,7 @@ "type": "application/x-parquet", "title": "Database Access for Daily latent_heat_flux", "href": "s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1D/variable=le/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1D/variable=le/model_id=tg_temp_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_arima.json b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_arima.json index dbda92c42a..0c217245e3 100644 --- a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_arima.json +++ b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_arima.json @@ -23,7 +23,8 @@ "properties": { "title": "tg_arima", "description": "All summaries for the Weekly_Amblyomma_americanum_population variable for the tg_arima model. Information for the model is provided as follows: The tg_arima model is an AutoRegressive Integrated Moving Average (ARIMA) model fit using\nthe function auto.arima() from the forecast package in R (Hyndman et al. 2023; Hyndman et al., 2008).\nThis is an empirical time series model with no covariates.\n The model predicts this variable at the following sites: BLAN, KONZ, LENO, ORNL, OSBS, SCBI, SERC, TALL, UKFS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-06-30", + "updated": "2024-07-04", "start_datetime": "2023-02-13T00:00:00Z", "end_datetime": "2025-06-23T00:00:00Z", "providers": [ @@ -209,7 +210,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly Amblyomma_americanum_population", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_arima?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_arima\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_ets.json b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_ets.json index e5b6803236..cc8715ab78 100644 --- a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_ets.json +++ b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_ets.json @@ -23,7 +23,8 @@ "properties": { "title": "tg_ets", "description": "All summaries for the Weekly_Amblyomma_americanum_population variable for the tg_ets model. Information for the model is provided as follows: The tg_ets model is an Error, Trend, Seasonal (ETS) model fit using the function ets() from the\nforecast package in R (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series\nmodel with no covariates..\n The model predicts this variable at the following sites: BLAN, KONZ, LENO, ORNL, OSBS, SCBI, SERC, TALL, UKFS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-06-30", + "updated": "2024-07-04", "start_datetime": "2023-02-06T00:00:00Z", "end_datetime": "2025-06-23T00:00:00Z", "providers": [ @@ -209,7 +210,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly Amblyomma_americanum_population", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_ets?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_ets\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_humidity_lm.json b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_humidity_lm.json index 67c46e95c1..4a745990a2 100644 --- a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_humidity_lm.json +++ b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_humidity_lm.json @@ -23,7 +23,8 @@ "properties": { "title": "tg_humidity_lm", "description": "All summaries for the Weekly_Amblyomma_americanum_population variable for the tg_humidity_lm model. Information for the model is provided as follows: The tg_humidity_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity.\n The model predicts this variable at the following sites: BLAN, KONZ, LENO, ORNL, OSBS, SCBI, SERC, TALL, UKFS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-04", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -209,7 +210,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly Amblyomma_americanum_population", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_humidity_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_humidity_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_humidity_lm_all_sites.json b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_humidity_lm_all_sites.json index 71fb330d37..daf3b1ba70 100644 --- a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_humidity_lm_all_sites.json +++ b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_humidity_lm_all_sites.json @@ -23,7 +23,8 @@ "properties": { "title": "tg_humidity_lm_all_sites", "description": "All summaries for the Weekly_Amblyomma_americanum_population variable for the tg_humidity_lm_all_sites model. Information for the model is provided as follows: The tg_humidity_lm_all_sites model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only one covariate: relative humidity. This model was used to forecast water temperature and dissolved oxygen concentration at the\nseven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: BLAN, KONZ, LENO, ORNL, OSBS, SCBI, SERC, TALL, UKFS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-04", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -209,7 +210,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly Amblyomma_americanum_population", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_humidity_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_humidity_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_lasso.json b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_lasso.json index 2df19c2201..f737a8aef2 100644 --- a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_lasso.json +++ b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_lasso.json @@ -22,7 +22,8 @@ "properties": { "title": "tg_lasso", "description": "All summaries for the Weekly_Amblyomma_americanum_population variable for the tg_lasso model. Information for the model is provided as follows: Lasso is a machine learning model implemented in the same workflow as tg_randfor, but with\ndifferent hyperparameter tuning. The model drivers are unlagged air temperature, air pressure, relative\nhumidity, surface downwelling longwave and shortwave radiation, precipitation, and northward and\neastward wind. Lasso regressions were fitted with the function glmnet() in\nthe package glmnet (Tay et al. 2023), where the regularization hyperparameter (lambda) is tuned and\nselected with 10-fold cross validation..\n The model predicts this variable at the following sites: BLAN, KONZ, ORNL, OSBS, SCBI, SERC, TALL, UKFS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-04", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -207,7 +208,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly Amblyomma_americanum_population", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_lasso?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_lasso\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_precip_lm.json b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_precip_lm.json index df694f454c..3f79812c9e 100644 --- a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_precip_lm.json +++ b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_precip_lm.json @@ -23,7 +23,8 @@ "properties": { "title": "tg_precip_lm", "description": "All summaries for the Weekly_Amblyomma_americanum_population variable for the tg_precip_lm model. Information for the model is provided as follows: The tg_precip_lm model is a linear model fit using the function lm() in R. This is a very simple\nmodel with only total precipitation used as a model covariate..\n The model predicts this variable at the following sites: BLAN, KONZ, LENO, ORNL, OSBS, SCBI, SERC, TALL, UKFS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-04", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -209,7 +210,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly Amblyomma_americanum_population", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_precip_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_precip_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_precip_lm_all_sites.json b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_precip_lm_all_sites.json index 711186d140..44dd5d73c1 100644 --- a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_precip_lm_all_sites.json +++ b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_precip_lm_all_sites.json @@ -23,7 +23,8 @@ "properties": { "title": "tg_precip_lm_all_sites", "description": "All summaries for the Weekly_Amblyomma_americanum_population variable for the tg_precip_lm_all_sites model. Information for the model is provided as follows: The tg_precip_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation. y. This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together..\n The model predicts this variable at the following sites: BLAN, KONZ, LENO, ORNL, OSBS, SCBI, SERC, TALL, UKFS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-04", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -209,7 +210,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly Amblyomma_americanum_population", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_precip_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_precip_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_randfor.json b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_randfor.json index f5aca0383b..397c660f88 100644 --- a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_randfor.json +++ b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_randfor.json @@ -22,7 +22,8 @@ "properties": { "title": "tg_randfor", "description": "All summaries for the Weekly_Amblyomma_americanum_population variable for the tg_randfor model. Information for the model is provided as follows: Random Forest is a machine learning model that is fitted with the ranger() function in the ranger\nR package (Wright & Ziegler 2017) within the tidymodels framework (Kuhn & Wickham 2020). The\nmodel drivers are unlagged air temperature, air pressure, relative humidity, surface downwelling\nlongwave and shortwave radiation, precipitation, and northward and eastward wind.\n The model predicts this variable at the following sites: BLAN, KONZ, ORNL, OSBS, SCBI, SERC, TALL, UKFS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-04", "start_datetime": "2023-11-19T00:00:00Z", "end_datetime": "2024-03-01T00:00:00Z", "providers": [ @@ -207,7 +208,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly Amblyomma_americanum_population", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_randfor?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_randfor\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_tbats.json b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_tbats.json index 103b06d3e4..d2706cbfd0 100644 --- a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_tbats.json +++ b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_tbats.json @@ -23,7 +23,8 @@ "properties": { "title": "tg_tbats", "description": "All summaries for the Weekly_Amblyomma_americanum_population variable for the tg_tbats model. Information for the model is provided as follows: The tg_tbats model is a TBATS (Trigonometric seasonality, Box-Cox transformation, ARMA\nerrors, Trend and Seasonal components) model fit using the function tbats() from the forecast package in\nR (Hyndman et al. 2023; Hyndman et al., 2008). This is an empirical time series model with no\ncovariates..\n The model predicts this variable at the following sites: BLAN, KONZ, LENO, ORNL, OSBS, SCBI, SERC, TALL, UKFS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-06-30", + "updated": "2024-07-04", "start_datetime": "2023-01-02T00:00:00Z", "end_datetime": "2025-06-23T00:00:00Z", "providers": [ @@ -209,7 +210,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly Amblyomma_americanum_population", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_tbats?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_tbats\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_temp_lm.json b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_temp_lm.json index 532485c364..a08afd9e90 100644 --- a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_temp_lm.json +++ b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_temp_lm.json @@ -23,7 +23,8 @@ "properties": { "title": "tg_temp_lm", "description": "All summaries for the Weekly_Amblyomma_americanum_population variable for the tg_temp_lm model. Information for the model is provided as follows: The tg_temp_lm model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation..\n The model predicts this variable at the following sites: BLAN, KONZ, LENO, ORNL, OSBS, SCBI, SERC, TALL, UKFS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-04", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -209,7 +210,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly Amblyomma_americanum_population", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_temp_lm?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_temp_lm\"\ncon.read_parquet(path + \"/**\")" } } } diff --git a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_temp_lm_all_sites.json b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_temp_lm_all_sites.json index e849ce2c21..d6a68793b8 100644 --- a/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_temp_lm_all_sites.json +++ b/catalog/summaries/Ticks/Weekly_Amblyomma_americanum_population/models/tg_temp_lm_all_sites.json @@ -23,7 +23,8 @@ "properties": { "title": "tg_temp_lm_all_sites", "description": "All summaries for the Weekly_Amblyomma_americanum_population variable for the tg_temp_lm_all_sites model. Information for the model is provided as follows: The tg_temp_lm_all_sites model is a linear model fit using the function lm() in R. This is a very\nsimple model with only one covariate: total precipitation.This model was used to forecast water temperature and dissolved oxygen\nconcentration at the seven lake sites, with the model fitted for all sites together.\n The model predicts this variable at the following sites: BLAN, KONZ, LENO, ORNL, OSBS, SCBI, SERC, TALL, UKFS.\n Summaries are the forecasts statistics of the raw forecasts (i.e., mean, median, confidence intervals)", - "datetime": "2024-10-24T00:00:00Z", + "datetime": "2024-01-28", + "updated": "2024-07-04", "start_datetime": "2023-11-20T00:00:00Z", "end_datetime": "2024-02-26T00:00:00Z", "providers": [ @@ -209,7 +210,7 @@ "type": "application/x-parquet", "title": "Database Access for Weekly Amblyomma_americanum_population", "href": "s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org", - "description": "Use `arrow` for remote access to the database. This R code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n" + "description": "Use `R` or `Python` code for remote access to the database. This code will return results for this variable and model combination.\n\n### R\n\n```{r}\n# Use code below\n\nall_results <- arrow::open_dataset(\"s3://anonymous@/project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_temp_lm_all_sites?endpoint_override=sdsc.osn.xsede.org\")\ndf <- all_results |> dplyr::collect()\n\n```\n \n\nYou can use dplyr operations before calling `dplyr::collect()` to `summarise`, `select` columns, and/or `filter` rows prior to pulling the data into a local `data.frame`. Reducing the data that is pulled locally will speed up the data download speed and reduce your memory usage.\n\n\n### Python\n\n```# Use code below\n\n\nimport ibis\n\n con = ibis.duckdbf.connect()\n\n\n con.raw_sql(f'''\n\n CREATE OR REPLACE SECRET secret (\n\n TYPE S3,\n\n ENDPOINT 'sdsc.osn.xsede.org',\n\n URL_STYLE 'path'\n\n\n );/n\n '''\n\n\n path = \"s3:///project_id=neon4cast/duration=P1W/variable=amblyomma_americanum/model_id=tg_temp_lm_all_sites\"\ncon.read_parquet(path + \"/**\")" } } }