Skip to content

Commit

Permalink
Use existing framework for FTPPRD/NOMADS retries
Browse files Browse the repository at this point in the history
Avoids the try-catch not reaching the catch when requests to FTPPRD time
out, causing the data to stop flowing instead of using the backup NOMADS
service. Now, the base_url will automatically be switched if 5 minutes
have gone by without a successful update.
  • Loading branch information
zqianem authored and gravinamike committed Jun 28, 2024
1 parent 9e3b6ff commit 80e3932
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 19 deletions.
28 changes: 17 additions & 11 deletions backend/sources/gfs.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,19 +21,20 @@ export async function forage(current_state, datasets) {
: typical_metadata(d, dt, shared_metadata);
});

let url = gfs_url({ forecast, offset, system });
let base_url = choose_base_url(current_state);
let url = gfs_url({ forecast, offset, system, base_url });
let compression_level = system === 'gdas' && offset < 6 ? 11 : 6;

let simple_datasets = datasets.filter(d => !d.accumulation);
await convert_simple(url, simple_datasets, dt, compression_level);

if (offset !== 0) {
let urls = [url, gfs_url(current_state)];
let urls = [url, gfs_url({ ...current_state, base_url })];
let accum_datasets = datasets.filter(d => d.accumulation);
await convert_accum(urls, accum_datasets, dt, offset, compression_level);
}

return { metadatas, new_state: { forecast, offset, system } };
return { metadatas, new_state: { forecast, offset, system, base_url } };
}

export function increment_state(current_state) {
Expand All @@ -51,10 +52,20 @@ export function increment_state(current_state) {
return { forecast, offset, system };
}

export const base_url = 'https://ftpprd.ncep.noaa.gov/data/nccf/com/';
export const backup_url = 'https://nomads.ncep.noaa.gov/pub/data/nccf/com/';
const ftpprd_url = 'https://ftpprd.ncep.noaa.gov/data/nccf/com/';
const nomads_url = 'https://nomads.ncep.noaa.gov/pub/data/nccf/com/';

function gfs_url({ forecast, offset, system }) {
export function choose_base_url({ last_successful_update, base_url }) {
if (!(base_url && last_successful_update))
return ftpprd_url;

if (new Date() - new Date(last_successful_update) < 5 * 60_000)
return base_url;

return base_url === ftpprd_url ? nomads_url : ftpprd_url;
}

function gfs_url({ forecast, offset, system, base_url }) {
let fdt = Datetime.from(forecast);

return base_url
Expand Down Expand Up @@ -98,11 +109,6 @@ async function convert_accum(urls, datasets, dt, offset, compression_level) {
}

async function download_gfs(url, datasets) {
return _download_gfs(url, datasets)
.catch(() => _download_gfs(url.replace(base_url, backup_url), datasets));
}

async function _download_gfs(url, datasets) {
let idx_url = url + '.idx';
let idx = await download(idx_url);
let idx_string = (await readFile(idx)).toString();
Expand Down
9 changes: 5 additions & 4 deletions backend/sources/gfswave.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import {
shared_metadata,
increment_state,
base_url,
choose_base_url,
convert_simple,
} from './gfs.js';
import { Datetime } from '../datetime.js';
Expand All @@ -13,15 +13,16 @@ export async function forage(current_state, datasets) {

let metadatas = datasets.map(d => typical_metadata(d, dt, shared_metadata));

let url = gfswave_url({ forecast, offset, system });
let base_url = choose_base_url(current_state);
let url = gfswave_url({ forecast, offset, system, base_url });
let compression_level = system === 'gdas' && offset < 6 ? 11 : 6;

await convert_simple(url, datasets, dt, compression_level);

return { metadatas, new_state: { forecast, offset, system } };
return { metadatas, new_state: { forecast, offset, system, base_url } };
}

function gfswave_url({ forecast, offset, system }) {
function gfswave_url({ forecast, offset, system, base_url }) {
let fdt = Datetime.from(forecast);

return base_url
Expand Down
8 changes: 4 additions & 4 deletions backend/sources/rtgssthr.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { base_url, backup_url } from './gfs.js';
import { choose_base_url } from './gfs.js';
import { Datetime } from '../datetime.js';
import { download } from '../download.js';
import { grib2 } from '../file-conversions.js';
Expand All @@ -25,17 +25,17 @@ export async function forage(current_state, datasets) {

let metadatas = datasets.map(d => typical_metadata(d, dt, metadata));

let base_url = choose_base_url(current_state);
let url = base_url
+ 'nsst/prod/'
+ `nsst.${dt.year}${dt.p_month}${dt.p_day}/`
+ 'rtgssthr_grb_0.083_awips.grib2';

let input = await download(url)
.catch(() => download(url.replace(base_url, backup_url)));
let input = await download(url);

let output = output_path(datasets[0].output_dir, date);
await grib2(input, output, { compression_level: 11 });
await rm(input);

return { metadatas, new_state: { date } };
return { metadatas, new_state: { date, base_url } };
}

0 comments on commit 80e3932

Please sign in to comment.