From f7ae7ed3f981a6b2437e05d97f1a03dcb18fbdff Mon Sep 17 00:00:00 2001 From: "Documenter.jl" Date: Wed, 10 Jul 2024 07:46:40 +0000 Subject: [PATCH] build based on 23bd8c63 --- dev/.documenter-siteinfo.json | 2 +- .../BoundaryFilePreparation/index.html | 10 ++++++++-- dev/Boundaries/Boundarystrategies/index.html | 6 ------ dev/Build/Build_with_cmake/index.html | 2 +- dev/Build/Build_with_makeup/index.html | 2 +- .../ClimateGeneration/index.html | 2 +- .../DownloadInputData/index.html | 2 +- .../ClimateSimulation/index.html | 2 +- dev/DataAssimilation/CHKEVO/index.html | 2 +- dev/DataAssimilation/DFS/index.html | 2 +- dev/DataAssimilation/DaAlgorithms/index.html | 2 +- .../DigitalFilterInitialization/index.html | 2 +- dev/DataAssimilation/LSMIXandJk/index.html | 2 +- dev/DataAssimilation/MTEN/index.html | 2 +- dev/DataAssimilation/NWECHKEVO/index.html | 2 +- .../ObservationOperators/index.html | 2 +- dev/DataAssimilation/Screening/index.html | 2 +- dev/DataAssimilation/SingleObs/index.html | 2 +- .../StructureFunctions/index.html | 2 +- .../Surface/CANARI/index.html | 2 +- .../Surface/CANARI_EKF_SURFEX/index.html | 2 +- .../Surface/CANARI_OI_MAIN/index.html | 2 +- .../Surface/SurfaceAnalysis/index.html | 2 +- dev/EPS/BDSTRATEGY/index.html | 2 +- dev/EPS/Howto/index.html | 2 +- dev/EPS/SLAF/Get_pertdia.pl.pm/index.html | 2 +- dev/EPS/SLAF/index.html | 2 +- dev/EPS/SPP/index.html | 2 +- dev/EPS/SPPImplementation/index.html | 2 +- dev/EPS/SPPT/index.html | 2 +- dev/EPS/Setup/index.html | 2 +- dev/EPS/System/index.html | 2 +- .../ConfigureYourExperiment/index.html | 2 +- .../How_to_use_hires_topography/index.html | 2 +- .../ModelDomain/index.html | 2 +- .../Namelists/index.html | 2 +- .../PlatformConfiguration/index.html | 2 +- .../UpdateNamelists/index.html | 2 +- .../UseofObservation/index.html | 2 +- .../VerticalGrid/index.html | 2 +- .../namelist_sfx_forecast/index.html | 2 +- dev/ForecastModel/Forecast/index.html | 2 +- dev/ForecastModel/ForecastSettings/index.html | 2 +- dev/ForecastModel/HR/index.html | 2 +- .../NearRealTimeAerosols/index.html | 2 +- dev/ForecastModel/OCDN2/index.html | 2 +- dev/ForecastModel/Outputlist/index.html | 2 +- .../SingleColumnModel/Forcing/index.html | 2 +- .../SingleColumnModel/MUSC/index.html | 2 +- .../SingleColumnModel/MUSC_EMS/index.html | 2 +- .../SingleColumnModel/MUSC_vars/index.html | 2 +- dev/ForecastModel/WindFarms/index.html | 2 +- dev/Observations/Aeolus/index.html | 2 +- dev/Observations/Amv/index.html | 2 +- dev/Observations/Ascat/index.html | 2 +- dev/Observations/Atovs/index.html | 2 +- dev/Observations/Bator/index.html | 2 +- dev/Observations/Cope/index.html | 2 +- dev/Observations/GNSS/index.html | 2 +- dev/Observations/Iasi/index.html | 2 +- dev/Observations/Modes/index.html | 2 +- dev/Observations/ObservationData/index.html | 2 +- .../ObservationPreprocessing/index.html | 2 +- dev/Observations/Oulan/index.html | 2 +- dev/Observations/RadarData/index.html | 2 +- dev/Observations/SYNOP/index.html | 2 +- dev/Observations/Scatt/index.html | 2 +- dev/Observations/Seviri/index.html | 2 +- dev/Overview/Binaries/index.html | 2 +- dev/Overview/Content/index.html | 2 +- dev/Overview/FileFormats/index.html | 2 +- dev/Overview/Source/index.html | 2 +- dev/Overview/da_graph/index.html | 2 +- dev/PostProcessing/Diagnostics/index.html | 2 +- dev/PostProcessing/FileConversions/index.html | 2 +- dev/PostProcessing/Fullpos/index.html | 2 +- dev/PostProcessing/Interpolation/index.html | 2 +- dev/PostProcessing/gl/index.html | 2 +- dev/PostProcessing/xtool/index.html | 2 +- dev/SuiteManagement/ECFLOW/index.html | 2 +- dev/System/Build_local_docs/index.html | 2 +- dev/System/DrHook/index.html | 2 +- dev/System/ECMWF/ECMWF_teleport/index.html | 2 +- .../ECMWF/RunningHarmonieOnAtos/index.html | 2 +- .../GitDeveloperDocumentation/index.html | 2 +- dev/System/HarmonieTestbed/index.html | 2 +- dev/System/Local/QuickStartLocal/index.html | 2 +- dev/System/MFaccess/index.html | 2 +- dev/System/ReleaseProcess/index.html | 2 +- dev/System/StandaloneOdb/index.html | 2 +- dev/System/TheHarmonieScript/index.html | 2 +- dev/System/UpdateNamelists/index.html | 2 +- .../AllobsVerification/index.html | 2 +- .../CommonVerification/index.html | 2 +- .../Extract4verification/index.html | 2 +- dev/Verification/HARP/index.html | 2 +- dev/Verification/Obsmon/index.html | 2 +- dev/Verification/Verification/index.html | 2 +- dev/Visualization/EPyGrAM/index.html | 2 +- dev/assets/README/index.html | 2 +- dev/index.html | 2 +- dev/objects.inv | Bin 16514 -> 16547 bytes dev/references/index.html | 2 +- dev/search_index.js | 2 +- 104 files changed, 109 insertions(+), 109 deletions(-) delete mode 100644 dev/Boundaries/Boundarystrategies/index.html diff --git a/dev/.documenter-siteinfo.json b/dev/.documenter-siteinfo.json index 6a8e3f6009..2232e345e1 100644 --- a/dev/.documenter-siteinfo.json +++ b/dev/.documenter-siteinfo.json @@ -1 +1 @@ -{"documenter":{"julia_version":"1.10.4","generation_timestamp":"2024-07-05T14:47:23","documenter_version":"1.5.0"}} \ No newline at end of file +{"documenter":{"julia_version":"1.10.4","generation_timestamp":"2024-07-10T07:46:24","documenter_version":"1.5.0"}} \ No newline at end of file diff --git a/dev/Boundaries/BoundaryFilePreparation/index.html b/dev/Boundaries/BoundaryFilePreparation/index.html index c0523e6e02..c86c8968c4 100644 --- a/dev/Boundaries/BoundaryFilePreparation/index.html +++ b/dev/Boundaries/BoundaryFilePreparation/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Preparation of initial and boundary files

Introduction

HARMONIE can be coupled with external models as IFS, ARPEGE, HIRLAM. Internally it is possible to nest the different ALADIN/ALARO/AROME with some restrictions. In the following we describe the host initial and boundary files are generated depending on different configurations. Boundary file preparation basically includes two parts: forecast file fetching and boundary file generation.

The ECFLOW tasks for initial and boundary preparation

Boundary strategies

There are a number of ways to chose which forecast lengths you use as boundaries. The strategy is determined by BDSTRATEGY in ecf/config_exp.h and there are a number of strategies implemented.

  • available : Search for available files in BDDIR adn try to keep forecast consistency. This is ment to be used operationally since it will at least keep your run going, but with old boundaries, if no new boundaries are available.
  • simulate_operational : Mimic the behaviour of the operational runs using ECMWF 6h old boundaries.
  • same_forecast : Use all boundaries from the same forecast, start from analysis
  • analysis_only : Use only analyses as boundaries. Note that BDINT cannot be shorter than the frequency of the analyses.
  • latest : Use the latest possible boundary with the shortest forecast length
  • RCR_operational : Mimic the behaviour of the RCR runs, ie
  • 12h old boundaries at 00 and 12 and
  • 06h old boundaries at 06 and 18
  • jb_ensemble : Same as same_forecast but used for JB-statistics generation. With this you should export JB_ENS_MEMBER=some_number
  • eps_ec : ECMWF EPS members (on reduced Gaussian grid). It is only meaningful with ENSMSEL non-empty, i.e., ENSSIZE > 0

All the strategies are defined in scr/Boundary_strategy.pl. The script generates a file bdstrategy in your working directory that could look like:

 Boundary strategy
+

Preparation of initial and boundary files

Introduction

HARMONIE can be coupled with external models as IFS, ARPEGE, HIRLAM. Internally it is possible to nest the different ALADIN/ALARO/AROME with some restrictions. In the following we describe the host initial and boundary files are generated depending on different configurations. Boundary file preparation basically includes two parts: forecast file fetching and boundary file generation.

The ECFLOW tasks for initial and boundary preparation

Boundary strategies

There are a number of ways to chose which forecast lengths you use as boundaries. The strategy is determined by BDSTRATEGY in ecf/config_exp.h and there are a number of strategies implemented.

  • available : Search for available files in BDDIR adn try to keep forecast consistency. This is ment to be used operationally since it will at least keep your run going, but with old boundaries, if no new boundaries are available.
  • simulate_operational : Mimic the behaviour of the operational runs using ECMWF 6h old boundaries.
  • same_forecast : Use all boundaries from the same forecast, start from analysis
  • analysis_only : Use only analyses as boundaries. Note that BDINT cannot be shorter than the frequency of the analyses.
  • latest : Use the latest possible boundary with the shortest forecast length
  • RCR_operational : Mimic the behaviour of the RCR runs, ie
  • 12h old boundaries at 00 and 12 and
  • 06h old boundaries at 06 and 18
  • jb_ensemble : Same as same_forecast but used for JB-statistics generation. With this you should export JB_ENS_MEMBER=some_number
  • eps_ec : ECMWF EPS members (on reduced Gaussian grid). It is only meaningful with ENSMSEL non-empty, i.e., ENSSIZE > 0

All the strategies are defined in scr/Boundary_strategy.pl. The script generates a file bdstrategy in your working directory that could look like:

 Boundary strategy
 
        DTG: 2011090618
         LL: 36
@@ -51,4 +51,10 @@
 # hh_offset is 0 ; DTG is  
 SURFEX_INI| /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/20110901_12/SURFXINI.lfi 
 000|2011090112 /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/20110901_12/ELSCFHARMALBC000 /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/g05a/archive/2011/09/01/12/fc20110901_12+000 scp smhi:/data/arkiv/field/f_archive/hirlam/G05_60lev/201109/G05_201109011200+000H00M 
-003|2011090115 /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/20110901_12/ELSCFHARMALBC001 /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/g05a/archive/2011/09/01/12/fc20110901_12+003 scp smhi:/data/arkiv/field/f_archive/hirlam/G05_60lev/201109/G05_201109011200+003H00M 

In this example an scp from smhi will be executed if the expected file is not in BDDIR. There are a few environment variables that one can play with in sms/confi_exp.h that deals with the initial and boundary files

  • HOST_MODEL : Tells the origin of your boundary data * ifs : ecmwf data * hir : hirlam data * ald : Output from aladin physics, this also covers arpege data after fullpos processing. * ala : Output from alaro physics * aro : Output from arome physics
  • BDINT : Interval of boundaries in hours
  • BDLIB : Name of the forcing experiment. Set
    • ECMWF to use MARS data
    • RCRa to use RCRa data from ECFS
    • Other HARMONIE/HIRLAM experiment
  • BDDIR : The path to the boundary file. In the default location BDDIR=$HM_DATA/${BDLIB}/archive/@YYYY@/@MM@/@DD@/@HH@ the file retrieved from e.g. MARS will be stored in a separate directory. On could also consider to configure this so that all the retrieved files are located in your working directory $WRK. Locally this points to the directory where you have all your common boundary HIRLAM or ECMWF files.
  • INT_BDFILE : is the full path of the interpolated boundary files. The default setting is to let the boundary file be removed by directing it to $WRK.
  • INT_SINI_FILE : The full path of the initial surfex file.

There are a few optional environment variables that could be used that are not visible in config_exp.h

  • EXT_BDDIR : External location of boundary data. If not set rules are depending on HOST_MODEL
  • EXT_ACCESS : Method for accessing external data. If not set rules are depending on HOST_MODEL
  • BDCYCLE : Assimilation cycle interval of forcing data, default is 6h.

More about this can be bounds in the Boundary_strategy.pl script.

The bdstrategy file is parsed by the script ExtractBD.

  • scr/ExtractBD Checks if data are on BDDIR otherwise copy from EXT_BDDIR. The operation performed can be different depending on HOST and HOST_MODEL. IFS data at ECMWF are extracted from MARS, RCR data are copied from ECFS.
    • Input parameters: Forecast hour
    • Executables: none.

In case data should be retrieved from MARS there is also a stage step. When calling MARS with the stage command we ask MARS to make sure data are on disk. In HARMONIE we ask for all data for one day of r forecasts ( normally four cycles ) at the time.

Near real time aerosols

The use of near real time aerosols require the presence of aerosol fields in the boundary files.

  • BDAERO : Origin of the aerosol fields
    • none : no aerosols (default configuration)
    • cams : aerosol from CAMS.

A bdstrategycams file is generated. After the data is retrieved, the files are merge with the files from the HOSTMODEL to get the final boundary conditions files.

Initial and Boundary file generation

To be able to start the model we need the variables defining the model state.

  • T,U,V,PS in spectral space
  • Q in gridpoint or spectral space

Optional:

  • Q,,l,,, Q,,i,,, Q,,r,,, Q,,g,,, Q,,s,,, Q,,h,,
  • TKE

For the surface we need the different state variables for the different tiles. The scheme selected determines the variables.

Boundary files (coupling files) for HARMONIE are prepared in two different ways depending on the nesting procedure defined by HOST_MODEL.

Using gl

If you use data from HIRLAM or ECMWF gl_grib_api will be called to generate boundaries. The generation can be summarized in the following steps:

  • Setup geometry and what kind of fields to read depending on HOST_MODEL
  • Read the necessary climate data from a climate file
  • Translate and interpolate the surface variables horizontally if the file is to be used as an initial file. All interpolation respects land sea mask properties. The soil water is not interpolated directly but interpolated using the Soil Wetness Index to preserve the properties of the soil between different models. The treatment of the surface fields is only done for the initial file.
  • Horizontal interpolation of upper air fields as well as restaggering of winds.
  • Vertical interpolation using the same method (etaeta) as in HIRLAM
    • Conserve boundary layer structure
    • Conserve integrated quantities
  • Output to an FA file ( partly in spectral space )

gl_grib_api is called by the script scr/gl_bd where we make different choices depending on PHYSICS and HOST_MODEL

When starting a forecast there are options to whether e.g. cloud properties and TKE should be read from the initial/boundary file through NREQIN and NCOUPLING. At the moment these fields are read from the initial file but not coupled to. gl reads them if they are available in the input files and sets them to zero otherwise. For a Non-Hydrostatic run the non-hydrostatic pressure departure and the vertical divergence are demanded as an initial field. The pressure departure is by definition zero if you start from a non-hydrostatic mode and since the error done when disregarding the vertical divergence is small it is also set to zero in gl. There are also a choice in the forecast model to run with Q in gridpoint or in spectral space.

It's possible to use an input file without e.g. the uppermost levels. By setting LDEMAND_ALL_LEVELS=.FALSE. the missing levels will be ignored. This is used at some institutes to reduce the amount of data transferred for the operational runs.

Using fullpos

If you use data generated by HARMONIE you will use fullpos to generate boundaries and initial conditions. Here we will describe how it's implemented in HARMONIE but there are also good documentation on the gmapdoc site.

In HARMONIE it is done by the script scr/E927. It contains the following steps:

  • Fetcht climate files. Fullpos needs a climate file and the geometry definition for both the input and output domains.

  • Set different moist variables in the namelists depending if your run AROME or ALADIN/ALARO.

  • Check if input data has Q in gridpoint or spectral space.

  • Demand NH variables if we run NH.

  • Determine the number of levels in the input file and extract the correct levels from the definition in scr/Vertical_level.pl

  • Run fullpos

E927 is also called from 4DVAR when the resolution is changed between the inner and outer loops.

Generation of initial data for SURFEX

For SURFEX we have to fill the different tiles with correct information from the input data. This is called the PREP step in the SURFEX context. scr/Prep_ini_surfex creates an initial SURFEX file from an FA file if you run with SURFACE=surfex.

Read more about SURFEX

+003|2011090115 /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/20110901_12/ELSCFHARMALBC001 /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/g05a/archive/2011/09/01/12/fc20110901_12+003 scp smhi:/data/arkiv/field/f_archive/hirlam/G05_60lev/201109/G05_201109011200+003H00M

In this example an scp from smhi will be executed if the expected file is not in BDDIR. There are a few environment variables that one can play with in sms/confi_exp.h that deals with the initial and boundary files

  • HOST_MODEL : Tells the origin of your boundary data * ifs : ecmwf data * hir : hirlam data * ald : Output from aladin physics, this also covers arpege data after fullpos processing. * ala : Output from alaro physics * aro : Output from arome physics
  • BDINT : Interval of boundaries in hours
  • BDLIB : Name of the forcing experiment. Set
    • ECMWF to use MARS data
    • RCRa to use RCRa data from ECFS
    • Other HARMONIE/HIRLAM experiment
  • BDDIR : The path to the boundary file. In the default location BDDIR=$HM_DATA/${BDLIB}/archive/@YYYY@/@MM@/@DD@/@HH@ the file retrieved from e.g. MARS will be stored in a separate directory. On could also consider to configure this so that all the retrieved files are located in your working directory $WRK. Locally this points to the directory where you have all your common boundary HIRLAM or ECMWF files.
  • INT_BDFILE : is the full path of the interpolated boundary files. The default setting is to let the boundary file be removed by directing it to $WRK.
  • INT_SINI_FILE : The full path of the initial surfex file.

There are a few optional environment variables that could be used that are not visible in config_exp.h

  • EXT_BDDIR : External location of boundary data. If not set rules are depending on HOST_MODEL
  • EXT_ACCESS : Method for accessing external data. If not set rules are depending on HOST_MODEL
  • BDCYCLE : Assimilation cycle interval of forcing data, default is 6h.

More about this can be bounds in the Boundary_strategy.pl script.

The bdstrategy file is parsed by the script ExtractBD.

  • scr/ExtractBD Checks if data are on BDDIR otherwise copy from EXT_BDDIR. The operation performed can be different depending on HOST and HOST_MODEL. IFS data at ECMWF are extracted from MARS, RCR data are copied from ECFS.
    • Input parameters: Forecast hour
    • Executables: none.

In case data should be retrieved from MARS there is also a stage step. When calling MARS with the stage command we ask MARS to make sure data are on disk. In HARMONIE we ask for all data for one day of r forecasts ( normally four cycles ) at the time.

Near real time aerosols

The use of near real time aerosols require the presence of aerosol fields in the boundary files.

  • BDAERO : Origin of the aerosol fields
    • none : no aerosols (default configuration)
    • cams : aerosol from CAMS.

A bdstrategy_cams file is generated. After the data is retrieved, the files are merge with the files from the HOST_MODEL to get the final boundary conditions files.

Initial and Boundary file generation

To be able to start the model we need the variables defining the model state.

  • T,U,V,PS in spectral space
  • Q in gridpoint or spectral space

Optional:

  • $Q_l$, $Q_i$, $Q_r$, $Q_g$, $Q_s$, $Q_h$
  • TKE

For the surface we need the different state variables for the different tiles. The scheme selected determines the variables.

Boundary files (coupling files) for HARMONIE are prepared in two different ways depending on the nesting procedure defined by HOST_MODEL.

Using gl

If you use data from HIRLAM or ECMWF gl will be called to generate boundaries. The generation can be summarized in the following steps:

  • Setup geometry and what kind of fields to read depending on HOST_MODEL
  • Read the necessary climate data from a climate file
  • Translate and interpolate the surface variables horizontally if the file is to be used as an initial file. All interpolation respects land sea mask properties. The soil water is not interpolated directly but interpolated using the Soil Wetness Index to preserve the properties of the soil between different models. The treatment of the surface fields is only done for the initial file.
  • Horizontal interpolation of upper air fields as well as restaggering of winds.
  • Vertical interpolation using the same method (etaeta) as in HIRLAM
    • Conserve boundary layer structure
    • Conserve integrated quantities
  • Output to an FA file ( partly in spectral space )

gl is called by the script scr/gl_bd where we make different choices depending on PHYSICS and HOST_MODEL

When starting a forecast there are options to whether e.g. cloud properties and TKE should be read from the initial/boundary file through NREQIN and NCOUPLING. At the moment these fields are read from the initial file but not coupled to. gl reads them if they are available in the input files and sets them to zero otherwise. For a Non-Hydrostatic run the non-hydrostatic pressure departure and the vertical divergence are demanded as an initial field. The pressure departure is by definition zero if you start from a non-hydrostatic mode and since the error done when disregarding the vertical divergence is small it is also set to zero in gl. There are also a choice in the forecast model to run with Q in gridpoint or in spectral space.

It's possible to use an input file without e.g. the uppermost levels. By setting LDEMAND_ALL_LEVELS=.FALSE. the missing levels will be ignored. This is used at some institutes to reduce the amount of data transferred for the operational runs.

Using fullpos

If you use data generated by HARMONIE you will use fullpos to generate boundaries and initial conditions. Here we will describe how it's implemented in HARMONIE but there are also good documentation on the gmapdoc site.

In HARMONIE it is done by the script scr/E927. It contains the following steps:

  • Fetcht climate files. Fullpos needs a climate file and the geometry definition for both the input and output domains.

  • Set different moist variables in the namelists depending if your run AROME or ALADIN/ALARO.

  • Check if input data has Q in gridpoint or spectral space.

  • Demand NH variables if we run NH.

  • Determine the number of levels in the input file and extract the correct levels from the definition in scr/Vertical_level.pl

  • Run fullpos

E927 is also called from 4DVAR when the resolution is changed between the inner and outer loops.

Generation of initial data for SURFEX

For SURFEX we have to fill the different tiles with correct information from the input data. This is called the PREP step in the SURFEX context. scr/Prep_ini_surfex creates an initial SURFEX file from an FA file if you run with SURFACE=surfex.

Read more about SURFEX

Reading SST/SIC information

It is possible to update sea-surface temperature (SST) and sea-ice concentration (SIC) from the LBC/coupling files. Since June 2018 and Cycle 45r1, ECMWF's IFS has used interactive ocean and sea ice components. It has been shown that use of these components "... can significantly improve SST predictions in Europe, and as a result, predictions of near-surface air temperature". The use of SST and SIC as surface boundary conditions has the potential to improve the quality of LAM NWP forecasts. See the ECMWF Newsletter article https://www.ecmwf.int/en/newsletter/156/news/effects-ocean-coupling-weather-forecasts describing examples of the coupling improved IFS forecasts in the seas near Europe.

The reading of these data is controlled by the SSTSIC_UPD switch in ecf/config_exp.h. With SSTSIC_UPD=no (default) SST/SIC are read at analysis time and not updated during the forecast. With SSTSIC_UPD=yes SST and SIC are read by the model from files created by the Interpol_sst_mll task in the Boundaries ecFlow family.

Data preparation

The ecf/Interpol_sst_mll.ecf task reads the bdstrategy file described above and calls the scr/Interpol_sst_mll script to "Interpolate SST/SIC from various sources to the model geometry for given MLL & INFILE". The script uses gl (with -sst3 option set) to carry out the interpolation.

Interpol_sst_mll inputDescription
-hCommnand-line option. Model forecast hour
-iCommnand-line option. Input file name
SST_SOURCESEnvironment variable. External SST source used to set gl namelist
EXT_SST_SIC_$LLLHard-coded. Output filename expected by the code (LLL is the forecast length).

The code

The reading of the SST/SIC input files (EXT_SST_SIC_$LLL) is controlled in the scripts by the SSTSIC_UPD environment variable. With it set to yes, the following NAMMCC namelist entries are set to .TRUE.:

&NAMMCC
+  LMCC01_MSE=.TRUE.,
+  LMCCECSST=.TRUE.,
+/

From src/arpifs/module/yommcc.F90:

! LMCC01_MSE = .T.   ===> THE CLIM.FIELD(S) ARE READ IN LBC FILE AND USED IN SURFEX
+ :
+! LMCCECSST =.T. ===> SST FROM ECMWF (SST-ANA COMB with surf temp over seaice)
+!           =.F. ===> SST FROM SURFTEMPERATURE
diff --git a/dev/Boundaries/Boundarystrategies/index.html b/dev/Boundaries/Boundarystrategies/index.html deleted file mode 100644 index 65c08e41b8..0000000000 --- a/dev/Boundaries/Boundarystrategies/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -- · Harmonie wiki
diff --git a/dev/Build/Build_with_cmake/index.html b/dev/Build/Build_with_cmake/index.html index d3a3657256..f0a7a48c22 100644 --- a/dev/Build/Build_with_cmake/index.html +++ b/dev/Build/Build_with_cmake/index.html @@ -100,4 +100,4 @@ set(Fortran_DEFAULT_FLOAT_64 "-fdefault-double-8 -fdefault-real-8") set(Fortran_DEFAULT_INT_32 "") -set(Fortran_DEFAULT_INT_64 "-fdefault-integer-8")

When running cmake configure, and depending on the build precision, a subset of these flags is added to the CMAKE_Fortran_FLAGS variable thus affecting all the Fortran targets. Currently, DEFAULT_INT variables are not used in CMake build, but are provided for consistency.

Note

When creating FortranCompilerFlags.<compiler type>.cmake, <compiler type> should follow the naming provided by CMAKE_Fortran_COMPILER_ID, for example, GNU for gfortran and Intel for ifort. See the CMake documentation for a list of all supported compiler vendors.

Note on generating different build systems with CMake

CMake is a build system generator and it can create different native build systems from the same CMakeLists.txt. The full list of supported generators is available in the CMake documentation, however in practice when building HARMONIE-AROME on a Linux machine (or on a UNIX-like one in general) there are two options: the Unix Makefiles generator and the Ninja generator:

Note

Specific CMake generator can be selected at the configure time by passing the correct -G <gen> flag to cmake. For example, cmake -G Ninja <...other CMake args...> or cmake -G "Unix Makefiles" <...other CMake args...>.

Practical considerations

When to re-run CMake configure in my experiment?

In principle, it should be enough to run CMake configure only once to generate the build system and after that any modification of the source code or configuration files should be detected by the build system triggering the required re-build steps. The only time, when CMake configure should be explicitly re-run is when you add a new source file to HARMONIE-AROME. The current implementation of the CMake build scans the file system looking for the source files to compile, so just putting a new file under, say, src/surfex/SURFEX/ and re-running the build isn't enough since this new file would be still unknown to the build system, thus the need of rerunning the configure step first.

I added some code and CMake build stopped working

Unlike makeup, CMake build for HARMONIE-AROME enforces inter-project boundaries and each project has an explicit list of its dependencies. For example, it is not possible to use modules from arpifs in surfex, but it is possible to use mse modules. If after a code modification CMake starts complaining about missing module files, then it means that this modification violates the project dependencies in the build. To fix this problem, please update your changeset to use only the available modules. If you believe that your modification is sound with respect to inter-project dependencies of HARMONIE-AROME and it's the CMake build which misses a dependency, please open a new GitHub issue explaining the problem.

Can I move/copy my build directory to another directory and re-use it?

No, it's generally a bad idea. CMake loves absolute paths and uses them in many parts of the generated build system, thus simply moving the build directory would break the build.

Something went wrong and CMake doesn't behave anymore, can I refresh the build without nuking the whole build directory?

You can try deleting just the CMakeCache.txt file from the build directory.

CMake picks a wrong compiler

Sometimes CMake selects a system default compiler instead of the compiler provided, for example, by loading a module. There are a few options available to force CMake to use a specific compiler, a straightforward one is to set the compiler via commonly-used environment variables (for example, export FC=ifort for a Fortran compiler). Another way, is to set the correct compilers in command-line arguments when configuring the CMake build (for example adding -DCMAKE_Fortran_COMPILER=ifort to the list of CMake arguments). CMake recognizes CMAKE_<LANG>_COMPILER passed from the command line where <LANG> can be Fortran, C or CXX.

Can I get more verbose output when compiling with CMake?

To get detailed information about individual steps and commands issued when compiling HARMONIE-AROME with CMake add -v to your build command:

cmake --build . --target install -v

Is there a way to visualise dependencies between individual targets of HARMONIE-AROME in CMake build?

Since all the inter-target dependencies are defined in CMake scripts it can be useful to have an option to produce a graphical overview of the dependency graph of HARMONIE-AROME without grepping all the CMakeLists.txt files. This can be achieved by adding the --graphviz=<output file name> to the list of CMake arguments, for example:

cmake $HM_LIB/src --graphviz=harmonie.dot

then the produced dependency graph can be visualized using the dot tool:

dot -Tx11 harmonie.dot

The full dependency graph may be very cluttered and take quite some time to render, so it might be a good idea to plot dependencies of a single target, for example:

dot -Tx11 harmonie.dot.surf-static

See the CMake documentation on graphviz for additional information about fine-tuning of the generated graphs.

I need more information about CMake, where do I find documentation?

CMake documentation portal is a great source of detailed information about the various aspects of the CMake build system.

+set(Fortran_DEFAULT_INT_64 "-fdefault-integer-8")

When running cmake configure, and depending on the build precision, a subset of these flags is added to the CMAKE_Fortran_FLAGS variable thus affecting all the Fortran targets. Currently, DEFAULT_INT variables are not used in CMake build, but are provided for consistency.

Note

When creating FortranCompilerFlags.<compiler type>.cmake, <compiler type> should follow the naming provided by CMAKE_Fortran_COMPILER_ID, for example, GNU for gfortran and Intel for ifort. See the CMake documentation for a list of all supported compiler vendors.

Note on generating different build systems with CMake

CMake is a build system generator and it can create different native build systems from the same CMakeLists.txt. The full list of supported generators is available in the CMake documentation, however in practice when building HARMONIE-AROME on a Linux machine (or on a UNIX-like one in general) there are two options: the Unix Makefiles generator and the Ninja generator:

Note

Specific CMake generator can be selected at the configure time by passing the correct -G <gen> flag to cmake. For example, cmake -G Ninja <...other CMake args...> or cmake -G "Unix Makefiles" <...other CMake args...>.

Practical considerations

When to re-run CMake configure in my experiment?

In principle, it should be enough to run CMake configure only once to generate the build system and after that any modification of the source code or configuration files should be detected by the build system triggering the required re-build steps. The only time, when CMake configure should be explicitly re-run is when you add a new source file to HARMONIE-AROME. The current implementation of the CMake build scans the file system looking for the source files to compile, so just putting a new file under, say, src/surfex/SURFEX/ and re-running the build isn't enough since this new file would be still unknown to the build system, thus the need of rerunning the configure step first.

I added some code and CMake build stopped working

Unlike makeup, CMake build for HARMONIE-AROME enforces inter-project boundaries and each project has an explicit list of its dependencies. For example, it is not possible to use modules from arpifs in surfex, but it is possible to use mse modules. If after a code modification CMake starts complaining about missing module files, then it means that this modification violates the project dependencies in the build. To fix this problem, please update your changeset to use only the available modules. If you believe that your modification is sound with respect to inter-project dependencies of HARMONIE-AROME and it's the CMake build which misses a dependency, please open a new GitHub issue explaining the problem.

Can I move/copy my build directory to another directory and re-use it?

No, it's generally a bad idea. CMake loves absolute paths and uses them in many parts of the generated build system, thus simply moving the build directory would break the build.

Something went wrong and CMake doesn't behave anymore, can I refresh the build without nuking the whole build directory?

You can try deleting just the CMakeCache.txt file from the build directory.

CMake picks a wrong compiler

Sometimes CMake selects a system default compiler instead of the compiler provided, for example, by loading a module. There are a few options available to force CMake to use a specific compiler, a straightforward one is to set the compiler via commonly-used environment variables (for example, export FC=ifort for a Fortran compiler). Another way, is to set the correct compilers in command-line arguments when configuring the CMake build (for example adding -DCMAKE_Fortran_COMPILER=ifort to the list of CMake arguments). CMake recognizes CMAKE_<LANG>_COMPILER passed from the command line where <LANG> can be Fortran, C or CXX.

Can I get more verbose output when compiling with CMake?

To get detailed information about individual steps and commands issued when compiling HARMONIE-AROME with CMake add -v to your build command:

cmake --build . --target install -v

Is there a way to visualise dependencies between individual targets of HARMONIE-AROME in CMake build?

Since all the inter-target dependencies are defined in CMake scripts it can be useful to have an option to produce a graphical overview of the dependency graph of HARMONIE-AROME without grepping all the CMakeLists.txt files. This can be achieved by adding the --graphviz=<output file name> to the list of CMake arguments, for example:

cmake $HM_LIB/src --graphviz=harmonie.dot

then the produced dependency graph can be visualized using the dot tool:

dot -Tx11 harmonie.dot

The full dependency graph may be very cluttered and take quite some time to render, so it might be a good idea to plot dependencies of a single target, for example:

dot -Tx11 harmonie.dot.surf-static

See the CMake documentation on graphviz for additional information about fine-tuning of the generated graphs.

I need more information about CMake, where do I find documentation?

CMake documentation portal is a great source of detailed information about the various aspects of the CMake build system.

diff --git a/dev/Build/Build_with_makeup/index.html b/dev/Build/Build_with_makeup/index.html index 3415a423c9..1b651f87e6 100644 --- a/dev/Build/Build_with_makeup/index.html +++ b/dev/Build/Build_with_makeup/index.html @@ -79,4 +79,4 @@ # or not to mess up the output, use just one process for compilations -gmake NPES=1 -i

Creating precompiled installation

If you want to provide precompiled libraries, objects, source code to other users so that they do not have to start compilation from scratch, then make a distribution or precompiled installation as follows:

gmake PRECOMPILED=/a/precompiled/rootdir precompiled

After this the stuff you just compiled ends up in directory /a/precompiled/rootdir with two subdirectories : src/ and util/. All executables are currently removed.

You can repeat this call, and it will just rsync the modified bits.

Update/check your interface blocks outside configure

The configure has options -c or -g to check up or enforce for (re-)creation of interface blocks of projects arp and ald. To avoid full and lengthy configure-run, you can just do the following:

gmake intfb
+gmake NPES=1 -i

Creating precompiled installation

If you want to provide precompiled libraries, objects, source code to other users so that they do not have to start compilation from scratch, then make a distribution or precompiled installation as follows:

gmake PRECOMPILED=/a/precompiled/rootdir precompiled

After this the stuff you just compiled ends up in directory /a/precompiled/rootdir with two subdirectories : src/ and util/. All executables are currently removed.

You can repeat this call, and it will just rsync the modified bits.

Update/check your interface blocks outside configure

The configure has options -c or -g to check up or enforce for (re-)creation of interface blocks of projects arp and ald. To avoid full and lengthy configure-run, you can just do the following:

gmake intfb
diff --git a/dev/ClimateGeneration/ClimateGeneration/index.html b/dev/ClimateGeneration/ClimateGeneration/index.html index e77244d80a..6205fd7567 100644 --- a/dev/ClimateGeneration/ClimateGeneration/index.html +++ b/dev/ClimateGeneration/ClimateGeneration/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Generation of climate and physiography files

Introduction

The generation of climate files includes two parts. The first part is the generation of climate files for the atmospheric model, the so called e923 configuration. The second part is the generation of the physiography information for SURFEX. In the following we describe how it is implemented in HARMONIE.

Input data for climate generation

The location of your input data for the climate generation is defined by the HM_CLDATA environment variable defined in the config-sh/config.yourhost. At ECMWF the climate data is stored on Atos here: hpc-login:/ec/res4/hpcperm/hlam/data/climate

Information on what data to download is available here. The input data contains physiography data, topography information and climatological values determined from a one year ARPEGE assimilation experiment with a resolution of T79. Climatological aerosol optical depths (tegen) or vertically integrated aerosol mass based on CAMS reanalysis 2003-2022 (camscms), can be included in the monthly climate files.

In the current version the option to use pre-generated climate files has been introduced to save time for quick experiments. To use pre-generated domains you need to set USE_REF_CLIMDIR=yes in Env_system. The regenerated domains location is defined in config_exp.h and in ECMWF are located in REF_CLIMDIR=ec:/hlam/harmonie_climdir/release-43h2.1.rc1/$DOMAIN/$ECOCLIMAP_VERSION.

Preparation of SURFEX physiography file

SURFEX needs information about the distribution of different available tiles like nature, sea, water and town. The nature tile also needs information about type of vegetation and soiltypes. The main input sources for this are found at SURFEX physiographic maps.

The data base for SURFEX-file preparation is located under HM_CLDATA/PGD

  • ecoclimats_v2.* : Landtypes
  • gtopo30.* : Topography
  • sand_fao.* : Soil type distribution
  • clay_fao.* : Soil type distribution

The generation of SURFEX physiography file (PGD.lfi) is done in scr/Prepare_pgd. The script creates the namelist OPTIONS.nam based on the DOMAIN settings in scr/Harmonie_domains.pm. Note that the SURFEX domain is only created over the C+I area. In the namelist we set which scheme that should be activated for each tile.

Tile
PHYSICSNatureSeaWaterTown
AROMEISBASEAFLXWATFLXTEB
ALAROISBASEAFLXWATFLXTown as rock

The program PGD produces one SURFEX physiography file PGD.lfi, which is stored in CLIMDIR directory.

To make sure we have the same topography input for the atmospheric part we call Prepare_pgd two times. One time to produce a PGD.lfi for SURFEX and a second time to produce a PGD.fa file that can be used as input for the climate generation described below. Note that for the atmosphere the topography will be spectrally filtered and the resulting topography will be imposed on SURFEX again.

Generation of non SURFEX monthly climate files

These files contain, among others, the surface elevation, land-sea mask, climatological aerosol and several near-surface variables for ALADIN/ALARO systems that may run without SURFEX. Climatological aerosol can be aerosol optical depth@550 nm - Tegen or CAMS, in the future also vertically integrated aerosol mass mixing ratios based on CAMS reanalysis.

scr/Climate is a script, which prepares climate file(s) for prefered forecast range. Climate files are produced for past, present and following month. The outline of Climate is as follows:

  • Check if climate files already exists.
  • Creation of namelists. The definition of domain and truncation values is taken from src/Harmonie_domains.pm.
  • Part 0: Read the PGD.fa file generated by SURFEX and write it to Neworog
  • Part 1: Filter Neworog to target grid with spectral smoothing to remove 2dx waves.
  • Part 2: generation of surface, soil and vegetation variables, without annual variation.
  • Part 3: creation of monthly climatological values and modification of albedo and emissivity according to the climatology of sea-ice limit.
  • Part 4: definition and modification of the vegetation and surface characteristics
  • Part 5: modification of fields created by step 2 and 4 over land from high resolution datasets (for each month)
  • Part 6: modification of climatological values

The result is climate files for the previous, current and next month. The files are named after their month like m01, m02 - m12 and stored in CLIMDIR.

Further reference e923

+

Generation of climate and physiography files

Introduction

The generation of climate files includes two parts. The first part is the generation of climate files for the atmospheric model, the so called e923 configuration. The second part is the generation of the physiography information for SURFEX. In the following we describe how it is implemented in HARMONIE.

Input data for climate generation

The location of your input data for the climate generation is defined by the HM_CLDATA environment variable defined in the config-sh/config.yourhost. At ECMWF the climate data is stored on Atos here: hpc-login:/ec/res4/hpcperm/hlam/data/climate

Information on what data to download is available here. The input data contains physiography data, topography information and climatological values determined from a one year ARPEGE assimilation experiment with a resolution of T79. Climatological aerosol optical depths (tegen) or vertically integrated aerosol mass based on CAMS reanalysis 2003-2022 (camscms), can be included in the monthly climate files.

In the current version the option to use pre-generated climate files has been introduced to save time for quick experiments. To use pre-generated domains you need to set USE_REF_CLIMDIR=yes in Env_system. The regenerated domains location is defined in config_exp.h and in ECMWF are located in REF_CLIMDIR=ec:/hlam/harmonie_climdir/release-43h2.1.rc1/$DOMAIN/$ECOCLIMAP_VERSION.

Preparation of SURFEX physiography file

SURFEX needs information about the distribution of different available tiles like nature, sea, water and town. The nature tile also needs information about type of vegetation and soiltypes. The main input sources for this are found at SURFEX physiographic maps.

The data base for SURFEX-file preparation is located under HM_CLDATA/PGD

  • ecoclimats_v2.* : Landtypes
  • gtopo30.* : Topography
  • sand_fao.* : Soil type distribution
  • clay_fao.* : Soil type distribution

The generation of SURFEX physiography file (PGD.lfi) is done in scr/Prepare_pgd. The script creates the namelist OPTIONS.nam based on the DOMAIN settings in scr/Harmonie_domains.pm. Note that the SURFEX domain is only created over the C+I area. In the namelist we set which scheme that should be activated for each tile.

Tile
PHYSICSNatureSeaWaterTown
AROMEISBASEAFLXWATFLXTEB
ALAROISBASEAFLXWATFLXTown as rock

The program PGD produces one SURFEX physiography file PGD.lfi, which is stored in CLIMDIR directory.

To make sure we have the same topography input for the atmospheric part we call Prepare_pgd two times. One time to produce a PGD.lfi for SURFEX and a second time to produce a PGD.fa file that can be used as input for the climate generation described below. Note that for the atmosphere the topography will be spectrally filtered and the resulting topography will be imposed on SURFEX again.

Generation of non SURFEX monthly climate files

These files contain, among others, the surface elevation, land-sea mask, climatological aerosol and several near-surface variables for ALADIN/ALARO systems that may run without SURFEX. Climatological aerosol can be aerosol optical depth@550 nm - Tegen or CAMS, in the future also vertically integrated aerosol mass mixing ratios based on CAMS reanalysis.

scr/Climate is a script, which prepares climate file(s) for prefered forecast range. Climate files are produced for past, present and following month. The outline of Climate is as follows:

  • Check if climate files already exists.
  • Creation of namelists. The definition of domain and truncation values is taken from src/Harmonie_domains.pm.
  • Part 0: Read the PGD.fa file generated by SURFEX and write it to Neworog
  • Part 1: Filter Neworog to target grid with spectral smoothing to remove 2dx waves.
  • Part 2: generation of surface, soil and vegetation variables, without annual variation.
  • Part 3: creation of monthly climatological values and modification of albedo and emissivity according to the climatology of sea-ice limit.
  • Part 4: definition and modification of the vegetation and surface characteristics
  • Part 5: modification of fields created by step 2 and 4 over land from high resolution datasets (for each month)
  • Part 6: modification of climatological values

The result is climate files for the previous, current and next month. The files are named after their month like m01, m02 - m12 and stored in CLIMDIR.

Further reference e923

diff --git a/dev/ClimateGeneration/DownloadInputData/index.html b/dev/ClimateGeneration/DownloadInputData/index.html index 8da662cc54..f477aee294 100644 --- a/dev/ClimateGeneration/DownloadInputData/index.html +++ b/dev/ClimateGeneration/DownloadInputData/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Download input data

Before you can start running HARMONIE experiments some input data (external from the code repository) needs to be available on your platform. The input data contains physiography data, topography information and climatological values determined from a one year ARPEGE assimilation experiment with a resolution of T79.

+

Download input data

Before you can start running HARMONIE experiments some input data (external from the code repository) needs to be available on your platform. The input data contains physiography data, topography information and climatological values determined from a one year ARPEGE assimilation experiment with a resolution of T79.

diff --git a/dev/ClimateSimulations/ClimateSimulation/index.html b/dev/ClimateSimulations/ClimateSimulation/index.html index d8869ab741..47ff298019 100644 --- a/dev/ClimateSimulations/ClimateSimulation/index.html +++ b/dev/ClimateSimulations/ClimateSimulation/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
+
diff --git a/dev/DataAssimilation/CHKEVO/index.html b/dev/DataAssimilation/CHKEVO/index.html index 102999b911..fa30e4c4c0 100644 --- a/dev/DataAssimilation/CHKEVO/index.html +++ b/dev/DataAssimilation/CHKEVO/index.html @@ -20,4 +20,4 @@ CHKEVO : 1.3677546254375832 0.22965677860570116 CHKEVO : 1.1506125378848564 0.20575065246468008 CHKEVO : 0.98597708942270756 0.19299583141063531 -.....

The RMS of dps/dt alone can be extracted with:

grep "^ CHKEVO : " HM_Date_2013041118.html | tail -n +2 | awk '{print $3}'
+.....

The RMS of dps/dt alone can be extracted with:

grep "^ CHKEVO : " HM_Date_2013041118.html | tail -n +2 | awk '{print $3}'
diff --git a/dev/DataAssimilation/DFS/index.html b/dev/DataAssimilation/DFS/index.html index 53b7f23679..921c9c0e38 100644 --- a/dev/DataAssimilation/DFS/index.html +++ b/dev/DataAssimilation/DFS/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
+
diff --git a/dev/DataAssimilation/DaAlgorithms/index.html b/dev/DataAssimilation/DaAlgorithms/index.html index b97bba799a..32c83cef86 100644 --- a/dev/DataAssimilation/DaAlgorithms/index.html +++ b/dev/DataAssimilation/DaAlgorithms/index.html @@ -33,4 +33,4 @@ # Only meaningful if SURFEX_LSELECT=yes SFXSWFTIMES=$SFXSELTIMES # SURFEX select FA file IO server gathering times SWRITUPTIMES="00-06:1" # Surfex model state output times - SFXWFTIMES=$SWRITUPTIMES # SURFEX history FA file IO server gathering times

The surface assimilation is moved to the start of the forecast and hence it is only the upper air assimilation that is involved in the IAU.

Note

There is a difference between the first run with IAUVAR and all the following, due to the first run couples to a run done without IAU and the others couple to a run that has done IAU, so the files used as startfiles are different in valid times. To this effect the first run saves a semaphore file in the $SCRATCH/hm_home/exp_name/ directory for the following runs to react to! So if you need to rerun the first run, for some reason, that semaphore file (named is_iauvar) needs to be manually removed!!

Flow diagram of IAU (Magnus will help)

+ SFXWFTIMES=$SWRITUPTIMES # SURFEX history FA file IO server gathering times

The surface assimilation is moved to the start of the forecast and hence it is only the upper air assimilation that is involved in the IAU.

Note

There is a difference between the first run with IAUVAR and all the following, due to the first run couples to a run done without IAU and the others couple to a run that has done IAU, so the files used as startfiles are different in valid times. To this effect the first run saves a semaphore file in the $SCRATCH/hm_home/exp_name/ directory for the following runs to react to! So if you need to rerun the first run, for some reason, that semaphore file (named is_iauvar) needs to be manually removed!!

Flow diagram of IAU (Magnus will help)

diff --git a/dev/DataAssimilation/DigitalFilterInitialization/index.html b/dev/DataAssimilation/DigitalFilterInitialization/index.html index de01252dc0..ffc06ca240 100644 --- a/dev/DataAssimilation/DigitalFilterInitialization/index.html +++ b/dev/DataAssimilation/DigitalFilterInitialization/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Digital Filter Initialization

Digital Filter Initialization (DFI) is documented by Météo France here. This wiki page is based on the "Version cycle 40t1" document available on the gmapdoc web page. By default HARMONIE does not use DFI.

DFI

The use (or not) of DFI is controlled by the variable DFI in ecf/config_exp.h. By default it is set to none.

  • idfi, incremental DFI
  • fdfi, full DFI
  • none - no initialization (default)

scr/Dfi is the script which calls the model in order to carry out DFI.

References

+

Digital Filter Initialization

Digital Filter Initialization (DFI) is documented by Météo France here. This wiki page is based on the "Version cycle 40t1" document available on the gmapdoc web page. By default HARMONIE does not use DFI.

DFI

The use (or not) of DFI is controlled by the variable DFI in ecf/config_exp.h. By default it is set to none.

  • idfi, incremental DFI
  • fdfi, full DFI
  • none - no initialization (default)

scr/Dfi is the script which calls the model in order to carry out DFI.

References

diff --git a/dev/DataAssimilation/LSMIXandJk/index.html b/dev/DataAssimilation/LSMIXandJk/index.html index b4f91eabf3..a239ebd1db 100644 --- a/dev/DataAssimilation/LSMIXandJk/index.html +++ b/dev/DataAssimilation/LSMIXandJk/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Jk as a pre-mixing method

The 3D-Var cost function including the Jk term can be written:

\[J(x) = J_b + J_o + J_k = \frac{1}{2} (x - x_b)^{\rm T} B^{-1}(x - x_b) + \frac{1}{2} (y - Hx)^{\rm T}R^{-1}(y - Hx) + \frac{1}{2} (x - x_{LS})^{\rm T} V^{-1}(x - x_{LS})\]

Setting the gradient to zero, we have at the optimal $x$:

\[\nabla J = B^{-1}(x - x_b) - H^{\rm T}R^{-1}(y - Hx) + V^{-1}(x - x_{LS}) = 0 \]

or

\[\left[B^{-1} + V^{-1} + H^{\rm T}R^{-1}H\right] \left(x - x_b \right) = H^{\rm T}R^{-1}(y - Hx_b) + V^{-1}(x_{LS} - x_b). \]

Equivalent pre-mixed first guess

Assume now that $\widetilde{x_b}$ is some yet unknown, pre-mixed field depending on $x_b$ and $x_{LS}$ that we want to determine. By adding and subtracting identical terms to the gradient equation, we have

\[B^{-1}(x - x_b + \widetilde{x_b} - \widetilde{x_b}) - H^{\rm T}R^{-1}(y - Hx + H\widetilde{x_b} - H\widetilde{x_b}) + V^{-1}(x - x_{LS} + \widetilde{x_b} - \widetilde{x_b}) = 0,\]

which, when reorganized gives

\[\left[B^{-1} + V^{-1} + H^{\rm T}R^{-1}H \right] \left(x - \widetilde{x_b}\right) = H^{\rm T}R^{-1}(y - H\widetilde{x_b}) + B^{-1}(x_b - \widetilde{x_b}) + V^{-1}(x_{LS} - \widetilde{x_b}). \]

If the last two terms on the right hand side add up to zero, i.e.,

\[B^{-1}(x_b - \widetilde{x_b}) + V^{-1}(x_{LS} - \widetilde{x_b}) = 0, \]

which means that

\[\widetilde{x_b} = [B^{-1} + V^{-1}]^{-1} ( B^{-1} x_b + V^{-1} x_{LS} ), \]

then we see that by using this mixed first guess the Jk term can be omitted, provided we use a modified B-matrix with the property that

\[\widetilde{B}^{-1} = B^{-1} + V^{-1}. \]

By writing

\[B^{-1} + V^{-1} = B^{-1}(B + V)V^{-1} = V^{-1}(B + V)B^{-1} \]

we easily see by simply inverting that

\[\widetilde{B} = [B^{-1} + V^{-1}]^{-1} = B(B + V)^{-1}V = V(B + V)^{-1}B. \]

To conclude, a 3D-Var minimization with Jk is equivalent to a minimization without the Jk term, provided that one pre-mixes the two first guess fields according to

\[\widetilde{x_b} = [B^{-1} + V^{-1}]^{-1} ( B^{-1} x_b + V^{-1} x_{LS} ) = \widetilde{B}( B^{-1} x_b + V^{-1} x_{LS} ) = V(B + V)^{-1}x_b + B(B + V)^{-1}x_{LS} \]

and use the following covariance matrix for this mixed first guess:

\[\widetilde{B} = [B^{-1} + V^{-1}]^{-1} = B(B + V)^{-1}V = V(B + V)^{-1}B. \]

Whether this is implementable in practice is a different story, it just shows the theoretical equivalence, and how LSMIXBC should ideally be done if Jk is the right answer.

+

Jk as a pre-mixing method

The 3D-Var cost function including the Jk term can be written:

\[J(x) = J_b + J_o + J_k = \frac{1}{2} (x - x_b)^{\rm T} B^{-1}(x - x_b) + \frac{1}{2} (y - Hx)^{\rm T}R^{-1}(y - Hx) + \frac{1}{2} (x - x_{LS})^{\rm T} V^{-1}(x - x_{LS})\]

Setting the gradient to zero, we have at the optimal $x$:

\[\nabla J = B^{-1}(x - x_b) - H^{\rm T}R^{-1}(y - Hx) + V^{-1}(x - x_{LS}) = 0 \]

or

\[\left[B^{-1} + V^{-1} + H^{\rm T}R^{-1}H\right] \left(x - x_b \right) = H^{\rm T}R^{-1}(y - Hx_b) + V^{-1}(x_{LS} - x_b). \]

Equivalent pre-mixed first guess

Assume now that $\widetilde{x_b}$ is some yet unknown, pre-mixed field depending on $x_b$ and $x_{LS}$ that we want to determine. By adding and subtracting identical terms to the gradient equation, we have

\[B^{-1}(x - x_b + \widetilde{x_b} - \widetilde{x_b}) - H^{\rm T}R^{-1}(y - Hx + H\widetilde{x_b} - H\widetilde{x_b}) + V^{-1}(x - x_{LS} + \widetilde{x_b} - \widetilde{x_b}) = 0,\]

which, when reorganized gives

\[\left[B^{-1} + V^{-1} + H^{\rm T}R^{-1}H \right] \left(x - \widetilde{x_b}\right) = H^{\rm T}R^{-1}(y - H\widetilde{x_b}) + B^{-1}(x_b - \widetilde{x_b}) + V^{-1}(x_{LS} - \widetilde{x_b}). \]

If the last two terms on the right hand side add up to zero, i.e.,

\[B^{-1}(x_b - \widetilde{x_b}) + V^{-1}(x_{LS} - \widetilde{x_b}) = 0, \]

which means that

\[\widetilde{x_b} = [B^{-1} + V^{-1}]^{-1} ( B^{-1} x_b + V^{-1} x_{LS} ), \]

then we see that by using this mixed first guess the Jk term can be omitted, provided we use a modified B-matrix with the property that

\[\widetilde{B}^{-1} = B^{-1} + V^{-1}. \]

By writing

\[B^{-1} + V^{-1} = B^{-1}(B + V)V^{-1} = V^{-1}(B + V)B^{-1} \]

we easily see by simply inverting that

\[\widetilde{B} = [B^{-1} + V^{-1}]^{-1} = B(B + V)^{-1}V = V(B + V)^{-1}B. \]

To conclude, a 3D-Var minimization with Jk is equivalent to a minimization without the Jk term, provided that one pre-mixes the two first guess fields according to

\[\widetilde{x_b} = [B^{-1} + V^{-1}]^{-1} ( B^{-1} x_b + V^{-1} x_{LS} ) = \widetilde{B}( B^{-1} x_b + V^{-1} x_{LS} ) = V(B + V)^{-1}x_b + B(B + V)^{-1}x_{LS} \]

and use the following covariance matrix for this mixed first guess:

\[\widetilde{B} = [B^{-1} + V^{-1}]^{-1} = B(B + V)^{-1}V = V(B + V)^{-1}B. \]

Whether this is implementable in practice is a different story, it just shows the theoretical equivalence, and how LSMIXBC should ideally be done if Jk is the right answer.

diff --git a/dev/DataAssimilation/MTEN/index.html b/dev/DataAssimilation/MTEN/index.html index a8a873a3dc..18c4004891 100644 --- a/dev/DataAssimilation/MTEN/index.html +++ b/dev/DataAssimilation/MTEN/index.html @@ -55,4 +55,4 @@ done done -

See (Storto and Randriamampianina, 2010) for more details.

+

See (Storto and Randriamampianina, 2010) for more details.

diff --git a/dev/DataAssimilation/NWECHKEVO/index.html b/dev/DataAssimilation/NWECHKEVO/index.html index 63b7389b75..228e142ef0 100644 --- a/dev/DataAssimilation/NWECHKEVO/index.html +++ b/dev/DataAssimilation/NWECHKEVO/index.html @@ -53,4 +53,4 @@ NWECHKEVO:UA 13 001 003 0.79264193785264E-05 -0.15031046611816E-04 0.21385134119954E+03 -0.33856415073342E-04 0.42661347477312E-05 NWECHKEVO:UA 13 001 004 0.21090675053822E-05 0.31713133370971E-05 0.21377935010403E+03 -0.40445121858208E-04 -0.54989449665528E-05 NWECHKEVO:UA 13 001 005 0.30451493480920E-04 -0.18284403001908E-04 0.21545646796919E+03 -0.42130887042681E-04 0.14684047934687E-04 -....

up to timestep 180 (hard-coded,the first 3 hours if timestep 1 minute)

Plotting

The results are easily plotted with any graphs utility (e.g. gnuplot)

+....

up to timestep 180 (hard-coded,the first 3 hours if timestep 1 minute)

Plotting

The results are easily plotted with any graphs utility (e.g. gnuplot)

diff --git a/dev/DataAssimilation/ObservationOperators/index.html b/dev/DataAssimilation/ObservationOperators/index.html index be55460548..90199ed2ef 100644 --- a/dev/DataAssimilation/ObservationOperators/index.html +++ b/dev/DataAssimilation/ObservationOperators/index.html @@ -104,4 +104,4 @@ ENDDO : - : + : diff --git a/dev/DataAssimilation/Screening/index.html b/dev/DataAssimilation/Screening/index.html index d009000e16..2df97f7c43 100644 --- a/dev/DataAssimilation/Screening/index.html +++ b/dev/DataAssimilation/Screening/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Screening

Introduction

Screening (configuration 002 of ARPEGE/IFS model) carries out quality control of observations.

A useful presentation (Martin Ridal) from the "Hirlam-B Training Week on HARMONIE system" training course is available here: MR_screenandminim.pdf. Most of the information on this page is based on his presentation.

Inputs

  • First guess (the same file with 5 different names):

    • ICMSHMIN1INIT
    • ICMSHMIN1IMIN
    • ICMRFMIN10000
    • ELSCFMIN1ALBC000
    • ELSCFMIN1ALBC
  • Input/output ODB directory structure

    • ${d_DB}/ECMA
    • ${d_DB}/ECMA.${base1}
  • Constants and statistics (MAY NEED TO BE UPDATED)

    • correl.dat
    • sigmab.dat
    • rszcoef_fmt
    • errgrib
    • rt_coef_atovs_newpred_ieee.dat
    • bcor_noaa.dat
    • chanspec_noaa.dat
    • rmtberr_noaa.dat
    • cstlim_noaa.dat
  • Namelist: See %screening in nam/harmonie_namelists.pm

Screening tasks

(Based on Martin Ridal's presentation).

  • Preliminary check of observations
    • Check of completeness of the reports
    • Check if station altitude is present
    • Check of the reporting practice for SYNOP & TEMP mass observations
  • Blacklisting: A blacklist is applied to discard observations of known poor quality and/or that cannot be properly handled by the data assimilation. A selection of variables for assimilation is done using the data selection part of the blacklist file and the information hard-coded in Arpege/Aladin (orographic rejection limit, land-sea rejection...). Decisions based on the blacklist are feedback to the CMA. Blacklisting is defined in src/bla/mf_blacklist.b
  • Background quality control: flags are assigned to observations – 1 => probably correct, 2 => probably incorrect, 3 => incorrect.
  • Vertical consistency of multilevel report:
    • The duplicated levels, in multi-level reports, are removed from the reports
    • If 4 consecutive layers are found to be of suspicious quality then these layers are rejected
  • Removal of duplicated reports
    • In case of co-located airep reports of the same observation types (time, position), some or all of the content of one of the reports is rejected
  • Redundancy check
    • performed for active reports that are co-located and originate from the same station
    • LAND SYNOP: the report closest to the centre of the screening time window with most active data is retained
    • SHIP SYNOP: redundant if the moving platforms are within a circle of 1^o^ radius src/arpifs/obs_preproc/sufglim.F90 RSHIDIS = 111000._JPRB
    • TEMP and PILOT: same stations are considered at the same time in the redundancy check
    • A SYNOP mass observation is redundant if there are any TEMP geopotential height observations (made in the same time and the same station) that are no more than 50hPa above the SYNOP mass observation
  • Thinning: High resolution data needs to be reduced to reduce correlated errors and reduce the amount of data

Output

The quality control information will be put into the input ECMA ODB(s) and a newly created CCMA to used by the 3DVAR minimization.

A valuable summary about screening decisions can be found in HM_Date_YYYYMMDDHH.html:

  • Look for “SCREENING STATISTICS” to get:
    • STATUS summary
    • EVENT summary
    • Number of variables, departures and missing departures
    • Diagnostic JO-table
    • CCMA ODB and updated ECMA ODB

Screening Events listed under "EVENT SUMMARY OF REPORTS:"

Description
1NO DATA IN THE REPORT
2ALL DATA REJECTED
3BAD REPORTING PRACTICE
4REJECTED DUE TO RDB FLAG
5ACTIVATED DUE TO RDB FLAG
6ACTIVATED BY WHITELIST
7HORIZONTAL POSITION OUT OF RANGE
8VERTICAL POSITION OUT OF RANGE
9TIME OUT OF RANGE
10REDUNDANT REPORT
11REPORT OVER LAND
12REPORT OVER SEA
13MISSING STATION ALTITUDE
14MODEL SUR. TOO FAR FROM STAT. ALT.
15REPORT REJECTED THROUGH THE NAMELIST
16FAILED QUALITY CONTROL
+

Screening

Introduction

Screening (configuration 002 of ARPEGE/IFS model) carries out quality control of observations.

A useful presentation (Martin Ridal) from the "Hirlam-B Training Week on HARMONIE system" training course is available here: MR_screenandminim.pdf. Most of the information on this page is based on his presentation.

Inputs

  • First guess (the same file with 5 different names):

    • ICMSHMIN1INIT
    • ICMSHMIN1IMIN
    • ICMRFMIN10000
    • ELSCFMIN1ALBC000
    • ELSCFMIN1ALBC
  • Input/output ODB directory structure

    • ${d_DB}/ECMA
    • ${d_DB}/ECMA.${base1}
  • Constants and statistics (MAY NEED TO BE UPDATED)

    • correl.dat
    • sigmab.dat
    • rszcoef_fmt
    • errgrib
    • rt_coef_atovs_newpred_ieee.dat
    • bcor_noaa.dat
    • chanspec_noaa.dat
    • rmtberr_noaa.dat
    • cstlim_noaa.dat
  • Namelist: See %screening in nam/harmonie_namelists.pm

Screening tasks

(Based on Martin Ridal's presentation).

  • Preliminary check of observations
    • Check of completeness of the reports
    • Check if station altitude is present
    • Check of the reporting practice for SYNOP & TEMP mass observations
  • Blacklisting: A blacklist is applied to discard observations of known poor quality and/or that cannot be properly handled by the data assimilation. A selection of variables for assimilation is done using the data selection part of the blacklist file and the information hard-coded in Arpege/Aladin (orographic rejection limit, land-sea rejection...). Decisions based on the blacklist are feedback to the CMA. Blacklisting is defined in src/bla/mf_blacklist.b
  • Background quality control: flags are assigned to observations – 1 => probably correct, 2 => probably incorrect, 3 => incorrect.
  • Vertical consistency of multilevel report:
    • The duplicated levels, in multi-level reports, are removed from the reports
    • If 4 consecutive layers are found to be of suspicious quality then these layers are rejected
  • Removal of duplicated reports
    • In case of co-located airep reports of the same observation types (time, position), some or all of the content of one of the reports is rejected
  • Redundancy check
    • performed for active reports that are co-located and originate from the same station
    • LAND SYNOP: the report closest to the centre of the screening time window with most active data is retained
    • SHIP SYNOP: redundant if the moving platforms are within a circle of 1^o^ radius src/arpifs/obs_preproc/sufglim.F90 RSHIDIS = 111000._JPRB
    • TEMP and PILOT: same stations are considered at the same time in the redundancy check
    • A SYNOP mass observation is redundant if there are any TEMP geopotential height observations (made in the same time and the same station) that are no more than 50hPa above the SYNOP mass observation
  • Thinning: High resolution data needs to be reduced to reduce correlated errors and reduce the amount of data

Output

The quality control information will be put into the input ECMA ODB(s) and a newly created CCMA to used by the 3DVAR minimization.

A valuable summary about screening decisions can be found in HM_Date_YYYYMMDDHH.html:

  • Look for “SCREENING STATISTICS” to get:
    • STATUS summary
    • EVENT summary
    • Number of variables, departures and missing departures
    • Diagnostic JO-table
    • CCMA ODB and updated ECMA ODB

Screening Events listed under "EVENT SUMMARY OF REPORTS:"

Description
1NO DATA IN THE REPORT
2ALL DATA REJECTED
3BAD REPORTING PRACTICE
4REJECTED DUE TO RDB FLAG
5ACTIVATED DUE TO RDB FLAG
6ACTIVATED BY WHITELIST
7HORIZONTAL POSITION OUT OF RANGE
8VERTICAL POSITION OUT OF RANGE
9TIME OUT OF RANGE
10REDUNDANT REPORT
11REPORT OVER LAND
12REPORT OVER SEA
13MISSING STATION ALTITUDE
14MODEL SUR. TOO FAR FROM STAT. ALT.
15REPORT REJECTED THROUGH THE NAMELIST
16FAILED QUALITY CONTROL
diff --git a/dev/DataAssimilation/SingleObs/index.html b/dev/DataAssimilation/SingleObs/index.html index d130f3afeb..1cab9784d0 100644 --- a/dev/DataAssimilation/SingleObs/index.html +++ b/dev/DataAssimilation/SingleObs/index.html @@ -13,4 +13,4 @@ 37 } else { 38 $nprocx=1; 39 $nprocy=1; -40 }
  • Launch the single observation impact experiment:

    ./Harmonie start DTG=2012061003 DTGEND=2012061006
  • The resulting analysis file be found as $SCRATCH/hm_home/<exp>/archive/2012/06/10/06/MXMIN1999+0000. You can now diagnose the 3D-VAR analysis increments of the sinob-experiment taking the difference between the analysis MXMIN1999+0000 (analysis) and the first guess, $SCRATCH/hm_home/<exp>/archive/2012/06/10/03/ICMSHHARM+0003. Plot horizontal and vertical cross-sections of temperature and other variables using your favorite software (EpyGram for example).

  • Note that you can change position of observation, observation error, variable to be observed etc. Investigate these options by taking a closer look at the script Create_single_obs.

    Read more about radiance single observation experiments here. In ec:/smx/sinob_wiki_ml you will also find OBSOUL_amsua7, a file for generating a satellati radiance amsu a channel 7 single observation impact experiment.

    +40 }
  • Launch the single observation impact experiment:

    ./Harmonie start DTG=2012061003 DTGEND=2012061006
  • The resulting analysis file be found as $SCRATCH/hm_home/<exp>/archive/2012/06/10/06/MXMIN1999+0000. You can now diagnose the 3D-VAR analysis increments of the sinob-experiment taking the difference between the analysis MXMIN1999+0000 (analysis) and the first guess, $SCRATCH/hm_home/<exp>/archive/2012/06/10/03/ICMSHHARM+0003. Plot horizontal and vertical cross-sections of temperature and other variables using your favorite software (EpyGram for example).

  • Note that you can change position of observation, observation error, variable to be observed etc. Investigate these options by taking a closer look at the script Create_single_obs.

    Read more about radiance single observation experiments here. In ec:/smx/sinob_wiki_ml you will also find OBSOUL_amsua7, a file for generating a satellati radiance amsu a channel 7 single observation impact experiment.

    diff --git a/dev/DataAssimilation/StructureFunctions/index.html b/dev/DataAssimilation/StructureFunctions/index.html index 1786ea4d82..b63c71ec38 100644 --- a/dev/DataAssimilation/StructureFunctions/index.html +++ b/dev/DataAssimilation/StructureFunctions/index.html @@ -90,4 +90,4 @@ ecp stab_your_eda_exp.bal.gz ec:/smx/jbdata/. (with your own filename and directory) ``` - also create a tar-file with all `*.xy`, `*.y`, `*.cv`, `*.bal` and `*.cvt` and put on ecfs for future diagnostical purposes) These new files are you final background error statistics to be diagnosed (compared with STEP 1 ones perhaps) and inserted to your data assimilation by modyfying `include.ass` (as in bullet 3 above) to point to your new files.

    Diagnosis of background error statistics

    1. Diagnosis of background error statistics is a rather complicated task. To get an idea of what the correlations and covariances should look like take a look in the article: Berre, L., 2000: Estimation of synoptic and meso scale forecast error covariances in a limited area model. Mon. Wea. Rev., 128, 644-667. Software for investigating and graphically illustrate different aspects of the background error statistics has been developed and statistics generated for different domains has been investigated using the AccordDaTools package. With this software you can also compare your newly generated background error statistics with the one generated for other HARMONIE domains. This will give you and idea if your statistics seems reasonable. For diagnosing the newly derived background error statistics follow these instructions:

    2. Get the code and scripts:

      • Download and install AccordDaTools following instructions in the README
      • Don't forget to add the package tools directory to your PATH:
      • export PATH=/path/to/da_tools:$PATH
    3. Run Jb diagnostics script:

      • For example for a new domain using horizontal grid-spacing of 2500 m and (Harmonie) 65 vertical levels:
        jbdiagnose -b jb_data/stab_IRELAND25_064_480.bal -c jb_data/stab_IRELAND25_064_480.cv -g 2500 -l harmL65 -e jbdiag_IRELAND25_064
      • The output will be made written to jbdiag_IRELAND25_064
    1. The AccordDaTools package also provides two tools for plotting the data produced by jbdiagnose, plotjbbal and plotjbdiag. plotjbbal plots Jb balances for different parameters. plotjbdiag produces spectral density (spdens) and vertical correlation (vercor) diagnostic plots for your structure funtions. For example:

      • plotjbbal:

        plotjbbal -t stdv -p QQ -r jbdiag_ -e IRELAND25_064
      • plotjbdiag:

        plotjbdiag -l 50 -t vercor -p QQ -r jbdiag_ -e IRELAND25_064

    Run 3DVAR/4DVAR with the new background error statistics

    1. create hm_home/jb_da. Then cd $HOME/hm_home/jb_da.

    2. create experiment by typing

      ~hlam/Harmonie setup -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1
    3. In scr/include.ass set JBDIR=ec:/$uid/jbdata (uid being your userid, in this example 'ec:/smx/jbdata') and f_JBCV is name of your .cv file in ec:/$uid/jbdata (without .gz) and f_JBBAL is 'name of your .bal file in ec:/$uid/jbdata (without .gz) (in this example, f_JBCV=stab_METCOOPD_65_20200601_360.cv, stab_METCOOPD_65_20200601_360.bal). Add these three lines instead of the three lines in include.ass that follows right after the elif statement: elif [ "$DOMAIN" = METCOOP25D]; then. If domain is other than METCOOP25D one has to look for the alternative name of the domain.

    4. From $HOME/hm_home/jb_da launch experiment by typing

      ~hlam/Harmonie start DTG=2021010100 DTGEND=2021010103
    5. The resulting analysis file be found under $TEMP/hm_home/jb_da/archive/2021/01/01/03 and it will be called MXMIN1999+0000 and on and ec:/$uid/harmonie/2021/01/01/03. To diagnose the 3D-VAR analysis increments of the jb_da-experiment, copy the files MXMIN1999+0000 (analysis) and ICMSHHARM+0003 (fg) to $SCRATCH. The first guess (background) file can be found on $TEMP/hm_home/jb_da/archive/2021/01/01/00 and ec:/$uid/harmonie/jb_da/2021/01/01/00. Convert from FA-file format to GRIB with the gl-software ($SCRATCH/hm_home/jb_da/bin/gl) by typing ./gl -p MXMIN1999+0000 and ./gl -p ICMSHANAL+0000. Then plot the difference between files file with your favorite software. Plot horizontal and vertical cross-sections of temperature and other variables using your favourite software (epygram for example).

    6. Now you have managed to insert the newly generated background error statistics to the assimilation system and managed to carry out a full scale data assimilation system and plot the analysis increments. The next natural step to further diagnose the background error statistics is to carry out a single observation impact experiment, utilizing your newly generated background error statistics. Note the variables REDNMC and REDZONE in include.ass. REDNMC is the scaling factor for the background error statistics (default value 0.6/0.9) for METCOOP25D/NEW_DOMAIN). REDZONE described how far from the lateral boundaries (in km) the observations need to be located to be assimilated (default value 150/100) for METCOOP25D/NEW_DOMAIN.

    In-line Interpolation and Extrapolation of Jb-statistics

    In case you do not have existing background error statistics derived for your domain there is a built technical possibility to use Jb-files from another domain derived with the same number of vertical levels. From this host Jb-files background error statistics are then interpolated or extrapolated to the current domain configuration. The assumption is then (which is in general questionable) that the statistics derived derived on the host domain is as well valid for the current domain. If the longest side of the host domain is shorter than the longest side of the current domain an extrapolation of background error covariance spectra is needed. Such extrapolation should be avoided over a wide range of wavenumbers. Therefore it is recommended that the longest side of the host Jb-file is as long or longer than the longest side of the current domain.The interpolation is invoked by in ecf/config_exp.h set JB_INTERPOL=yeś and JB_REF_DOMAIN=$HOST_JB, where $HOST_JB is for example METCOOP25B. These settings will activate runnning of script jbconv.sh (in case no Jb files present for current domain), called from Fetch_assim_data.

    On-going work & future developments

    Recent and on-going work as well as plans for future developments:

    References

    + also create a tar-file with all `*.xy`, `*.y`, `*.cv`, `*.bal` and `*.cvt` and put on ecfs for future diagnostical purposes) These new files are you final background error statistics to be diagnosed (compared with STEP 1 ones perhaps) and inserted to your data assimilation by modyfying `include.ass` (as in bullet 3 above) to point to your new files.

    Diagnosis of background error statistics

    1. Diagnosis of background error statistics is a rather complicated task. To get an idea of what the correlations and covariances should look like take a look in the article: Berre, L., 2000: Estimation of synoptic and meso scale forecast error covariances in a limited area model. Mon. Wea. Rev., 128, 644-667. Software for investigating and graphically illustrate different aspects of the background error statistics has been developed and statistics generated for different domains has been investigated using the AccordDaTools package. With this software you can also compare your newly generated background error statistics with the one generated for other HARMONIE domains. This will give you and idea if your statistics seems reasonable. For diagnosing the newly derived background error statistics follow these instructions:

    2. Get the code and scripts:

      • Download and install AccordDaTools following instructions in the README
      • Don't forget to add the package tools directory to your PATH:
      • export PATH=/path/to/da_tools:$PATH
    3. Run Jb diagnostics script:

      • For example for a new domain using horizontal grid-spacing of 2500 m and (Harmonie) 65 vertical levels:
        jbdiagnose -b jb_data/stab_IRELAND25_064_480.bal -c jb_data/stab_IRELAND25_064_480.cv -g 2500 -l harmL65 -e jbdiag_IRELAND25_064
      • The output will be made written to jbdiag_IRELAND25_064
    1. The AccordDaTools package also provides two tools for plotting the data produced by jbdiagnose, plotjbbal and plotjbdiag. plotjbbal plots Jb balances for different parameters. plotjbdiag produces spectral density (spdens) and vertical correlation (vercor) diagnostic plots for your structure funtions. For example:

      • plotjbbal:

        plotjbbal -t stdv -p QQ -r jbdiag_ -e IRELAND25_064
      • plotjbdiag:

        plotjbdiag -l 50 -t vercor -p QQ -r jbdiag_ -e IRELAND25_064

    Run 3DVAR/4DVAR with the new background error statistics

    1. create hm_home/jb_da. Then cd $HOME/hm_home/jb_da.

    2. create experiment by typing

      ~hlam/Harmonie setup -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1
    3. In scr/include.ass set JBDIR=ec:/$uid/jbdata (uid being your userid, in this example 'ec:/smx/jbdata') and f_JBCV is name of your .cv file in ec:/$uid/jbdata (without .gz) and f_JBBAL is 'name of your .bal file in ec:/$uid/jbdata (without .gz) (in this example, f_JBCV=stab_METCOOPD_65_20200601_360.cv, stab_METCOOPD_65_20200601_360.bal). Add these three lines instead of the three lines in include.ass that follows right after the elif statement: elif [ "$DOMAIN" = METCOOP25D]; then. If domain is other than METCOOP25D one has to look for the alternative name of the domain.

    4. From $HOME/hm_home/jb_da launch experiment by typing

      ~hlam/Harmonie start DTG=2021010100 DTGEND=2021010103
    5. The resulting analysis file be found under $TEMP/hm_home/jb_da/archive/2021/01/01/03 and it will be called MXMIN1999+0000 and on and ec:/$uid/harmonie/2021/01/01/03. To diagnose the 3D-VAR analysis increments of the jb_da-experiment, copy the files MXMIN1999+0000 (analysis) and ICMSHHARM+0003 (fg) to $SCRATCH. The first guess (background) file can be found on $TEMP/hm_home/jb_da/archive/2021/01/01/00 and ec:/$uid/harmonie/jb_da/2021/01/01/00. Convert from FA-file format to GRIB with the gl-software ($SCRATCH/hm_home/jb_da/bin/gl) by typing ./gl -p MXMIN1999+0000 and ./gl -p ICMSHANAL+0000. Then plot the difference between files file with your favorite software. Plot horizontal and vertical cross-sections of temperature and other variables using your favourite software (epygram for example).

    6. Now you have managed to insert the newly generated background error statistics to the assimilation system and managed to carry out a full scale data assimilation system and plot the analysis increments. The next natural step to further diagnose the background error statistics is to carry out a single observation impact experiment, utilizing your newly generated background error statistics. Note the variables REDNMC and REDZONE in include.ass. REDNMC is the scaling factor for the background error statistics (default value 0.6/0.9) for METCOOP25D/NEW_DOMAIN). REDZONE described how far from the lateral boundaries (in km) the observations need to be located to be assimilated (default value 150/100) for METCOOP25D/NEW_DOMAIN.

    In-line Interpolation and Extrapolation of Jb-statistics

    In case you do not have existing background error statistics derived for your domain there is a built technical possibility to use Jb-files from another domain derived with the same number of vertical levels. From this host Jb-files background error statistics are then interpolated or extrapolated to the current domain configuration. The assumption is then (which is in general questionable) that the statistics derived derived on the host domain is as well valid for the current domain. If the longest side of the host domain is shorter than the longest side of the current domain an extrapolation of background error covariance spectra is needed. Such extrapolation should be avoided over a wide range of wavenumbers. Therefore it is recommended that the longest side of the host Jb-file is as long or longer than the longest side of the current domain.The interpolation is invoked by in ecf/config_exp.h set JB_INTERPOL=yeś and JB_REF_DOMAIN=$HOST_JB, where $HOST_JB is for example METCOOP25B. These settings will activate runnning of script jbconv.sh (in case no Jb files present for current domain), called from Fetch_assim_data.

    On-going work & future developments

    Recent and on-going work as well as plans for future developments:

    References

    diff --git a/dev/DataAssimilation/Surface/CANARI/index.html b/dev/DataAssimilation/Surface/CANARI/index.html index eb9b1f9772..18917c3408 100644 --- a/dev/DataAssimilation/Surface/CANARI/index.html +++ b/dev/DataAssimilation/Surface/CANARI/index.html @@ -17,4 +17,4 @@ export ODB_MERGEODB_DIRECT= ... optional direct ODB merge, If your ODB was not merged previously use 1
  • Concerning the observation use, another file is necessary, but it is without any interest for CANARI (just part of variational analysis code is not controlled by a logical keyt !) The file can be obtained on "tori" via gget var.misc.rszcoef_fmt.01.

    ln -s rszcoef_fmt var.misc.rszcoef_fmt.01
  • The climatological files

    ln  -s  climfile_${mm}  ICMSHANALCLIM
     ln  -s  climfile_${mm2} ICMSHANALCLI2
  • The namelist file

    ln -s namelist fort.4 
  • The ISBA files

  • run CANARI

    MASTERODB -c701 -vmeteo -maladin -eANAL -t1. -ft0 -aeul

    OUTPUTs

    NODE*

    Sample of script is attached.

    As a part of the system training in Copenhagen in 2008, Roger prepared an intoduction to CANARI, which is found in HarmonieSystemTraining2008/Lecture/SurfaceAssimilation on hirlam.org

    References

    +ln -s G_file ICMSHANALFGIN

    run CANARI

    MASTERODB -c701 -vmeteo -maladin -eANAL -t1. -ft0 -aeul

    OUTPUTs

    NODE*

    Sample of script is attached.

    As a part of the system training in Copenhagen in 2008, Roger prepared an intoduction to CANARI, which is found in HarmonieSystemTraining2008/Lecture/SurfaceAssimilation on hirlam.org

    References

    diff --git a/dev/DataAssimilation/Surface/CANARI_EKF_SURFEX/index.html b/dev/DataAssimilation/Surface/CANARI_EKF_SURFEX/index.html index b4d777403f..2fa43d630b 100644 --- a/dev/DataAssimilation/Surface/CANARI_EKF_SURFEX/index.html +++ b/dev/DataAssimilation/Surface/CANARI_EKF_SURFEX/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Surface variables assimilated / read in EKF_MAIN

    From cycle 37 EKF is implemented in research/development mode. The following tiles and variables are modified:

    NATURE

    WG2/WG1/TG2/TG1

    The uppermost two levels in ISBA of soil moisture and temperature are assimilated. With CANARI/CANARI_OI_MAIN by an OI method, by CANARI_SURFEX_EKF by an Extended Kalman Filter (EKF).

    For 2012 it is planned to have a re-writing of OI_MAIN/EKF_MAIN to be the same binary in order to be able to apply the work done for OI_MAIN in EKF_MAIN and thus reduce the maintainance costs.

    +

    Surface variables assimilated / read in EKF_MAIN

    From cycle 37 EKF is implemented in research/development mode. The following tiles and variables are modified:

    NATURE

    WG2/WG1/TG2/TG1

    The uppermost two levels in ISBA of soil moisture and temperature are assimilated. With CANARI/CANARI_OI_MAIN by an OI method, by CANARI_SURFEX_EKF by an Extended Kalman Filter (EKF).

    For 2012 it is planned to have a re-writing of OI_MAIN/EKF_MAIN to be the same binary in order to be able to apply the work done for OI_MAIN in EKF_MAIN and thus reduce the maintainance costs.

    diff --git a/dev/DataAssimilation/Surface/CANARI_OI_MAIN/index.html b/dev/DataAssimilation/Surface/CANARI_OI_MAIN/index.html index fffe8e2430..a6a9b46743 100644 --- a/dev/DataAssimilation/Surface/CANARI_OI_MAIN/index.html +++ b/dev/DataAssimilation/Surface/CANARI_OI_MAIN/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Surface variables assimilated / read in OI_main

    CANARI_OI_MAIN is the surface assimilation scheme which emulates what is done in CANARI for old_surface, but by using the external surface schme SURFEX.

    The default surface model is SURFEX and the default surface assimilation scheme is CANARI_OI_MAIN.

    NATURE

    WG2/WG1/TG2/TG1

    The uppermost two levels in ISBA of soil moisture and temperature are assimilated. With CANARI/CANARI_OI_MAIN by an OI method, by CANARI_SURFEX_EKF by an Extended Kalman Filter (EKF).

    SNOW

    The snow analysis is performed in CANARI and is controlled by the key: LAESNM. This is set default to be true in scr/RunCanari. And if running with SURFEX this will need to be true also in scr/OI_main as the SURFEX snow then needs to be updated by the analysis done in CANARI.

    SEA

    SST/SIC

    The only option for SST/SIC at the moment is to take it from the boundaries.

    • ecf/config_exp.h :SST=BOUNDARY

    If you are using boundaries from IFS the task Interpol_sst will interpolate sst from your boundary file and take into account that SST in the IFS files is not defined over land (as for HIRLAM) and also use an extra-polation routine to propagate the SST into narrow fjords.

    There is a SST analysis built-in in CANARI but not used by HARMONIE or METEO-FRANCE.

    WATER

    LAKE temperature

    Lake temperatures are updated in OI_main and are extrapolated from the land surface temperatures.

    TOWN

    ROAD temperature

    Only used when TEB is activated (key: LAROME). Increment for TG2 is added to to ROAD layer 3.

    +

    Surface variables assimilated / read in OI_main

    CANARI_OI_MAIN is the surface assimilation scheme which emulates what is done in CANARI for old_surface, but by using the external surface schme SURFEX.

    The default surface model is SURFEX and the default surface assimilation scheme is CANARI_OI_MAIN.

    NATURE

    WG2/WG1/TG2/TG1

    The uppermost two levels in ISBA of soil moisture and temperature are assimilated. With CANARI/CANARI_OI_MAIN by an OI method, by CANARI_SURFEX_EKF by an Extended Kalman Filter (EKF).

    SNOW

    The snow analysis is performed in CANARI and is controlled by the key: LAESNM. This is set default to be true in scr/RunCanari. And if running with SURFEX this will need to be true also in scr/OI_main as the SURFEX snow then needs to be updated by the analysis done in CANARI.

    SEA

    SST/SIC

    The only option for SST/SIC at the moment is to take it from the boundaries.

    • ecf/config_exp.h :SST=BOUNDARY

    If you are using boundaries from IFS the task Interpol_sst will interpolate sst from your boundary file and take into account that SST in the IFS files is not defined over land (as for HIRLAM) and also use an extra-polation routine to propagate the SST into narrow fjords.

    There is a SST analysis built-in in CANARI but not used by HARMONIE or METEO-FRANCE.

    WATER

    LAKE temperature

    Lake temperatures are updated in OI_main and are extrapolated from the land surface temperatures.

    TOWN

    ROAD temperature

    Only used when TEB is activated (key: LAROME). Increment for TG2 is added to to ROAD layer 3.

    diff --git a/dev/DataAssimilation/Surface/SurfaceAnalysis/index.html b/dev/DataAssimilation/Surface/SurfaceAnalysis/index.html index 36265b8766..28bc196319 100644 --- a/dev/DataAssimilation/Surface/SurfaceAnalysis/index.html +++ b/dev/DataAssimilation/Surface/SurfaceAnalysis/index.html @@ -16,4 +16,4 @@ (edit then nam/LISTE_NOIRE_DIAP to insert, e.g. at the last line, following 1 SHIP 24 11 DBKR 03062012 - + diff --git a/dev/EPS/BDSTRATEGY/index.html b/dev/EPS/BDSTRATEGY/index.html index 49298591e3..04a9f729bd 100644 --- a/dev/EPS/BDSTRATEGY/index.html +++ b/dev/EPS/BDSTRATEGY/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Boundary strategies for HarmonEPS: SLAF and EC ENS

    Presently there are two available options for choosing boundaries when running HarmonEPS: EC ENS or SLAF In the branch harmonEPS-40h1.1 SLAF is set as default

    Settings for SLAF (default in branch harmonEPS-40h1.1 )Settings for EC ENS
    ecf/config_exp.hBDSTRATEGY=simulate_operationalBDSTRATEGY=eps_ec
    BDINT=1 (can be set to larger value)BDINT=3 (or larger, hourly input is not possible)
    msms/harmonie.pmComment out SLAF settings: #SLAFLAG, #SLAFDIFF, #SLAFK
    'ENSBDMBR' => [ 0]'ENSBDMBR' => [ 0, 1..10] (or any other members from EC ENS you would like to use)

    More information about how to treat the settings in harmonie.pm, see: here Note that BDSTRATEGY=eps_ec uses EC ENS data as stored in the GLAMEPS archive (as ECMWF does not store model levels in MARS). Only EC ENS at 00UTC and 12UTC are in this archive, and with 3h output, hence you need to use BDINT=3 for this option.

    +

    Boundary strategies for HarmonEPS: SLAF and EC ENS

    Presently there are two available options for choosing boundaries when running HarmonEPS: EC ENS or SLAF In the branch harmonEPS-40h1.1 SLAF is set as default

    Settings for SLAF (default in branch harmonEPS-40h1.1 )Settings for EC ENS
    ecf/config_exp.hBDSTRATEGY=simulate_operationalBDSTRATEGY=eps_ec
    BDINT=1 (can be set to larger value)BDINT=3 (or larger, hourly input is not possible)
    msms/harmonie.pmComment out SLAF settings: #SLAFLAG, #SLAFDIFF, #SLAFK
    'ENSBDMBR' => [ 0]'ENSBDMBR' => [ 0, 1..10] (or any other members from EC ENS you would like to use)

    More information about how to treat the settings in harmonie.pm, see: here Note that BDSTRATEGY=eps_ec uses EC ENS data as stored in the GLAMEPS archive (as ECMWF does not store model levels in MARS). Only EC ENS at 00UTC and 12UTC are in this archive, and with 3h output, hence you need to use BDINT=3 for this option.

    diff --git a/dev/EPS/Howto/index.html b/dev/EPS/Howto/index.html index e117e26fcf..a8d055a360 100644 --- a/dev/EPS/Howto/index.html +++ b/dev/EPS/Howto/index.html @@ -47,4 +47,4 @@ 'FirstHour' => sub { my $mbr = shift; return $ENV{StartHour} % &Env('FCINT',$mbr); } - );

    ANAATMO is straightforward, only the control members need an exception from blending, so using a hash is most appropriate. Similarly for FCINT. For PHYSICS we have used an array and the fact that the array will be recycled. Thus member 0 will be the AROME control, while member 1 will be the ALARO control. The reason why we did not simply put a 2-element array [ 'arome','alaro'] to be repeated is that since the ECMWF perturbations come in +/- pairs, we don't want all the '+' perturbations to be always with the same physics (and the '-' perturbations with the other type). Therefore, we added a second pair with the order reversed, to alternate +/- perturbations between AROME and ALARO members. ENSCTL follows the same pattern as PHYSICS. Note the need for 3-digit numbers in ENSCTL, at present this is necessary to avoid parsing errors in the preparation step of mini-SMS.

    Note also how we have used ENSBDMBR. For both the AROME control (member 0) and ALARO control (member 1), we have used the EC EPS control member 0 to provide boundaries. The syntax 1..20 is a perl shorthand for the list 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20.

    Note added after changeset [12537]: The setting of ENSBDMBR created a race condition in the boundary extraction for runs at ECMWF. This is hopefully solved by the new definition for BDDIR, which makes use of the possibility of having a subroutine to compute the member specific settings. Another example where a subroutine came out handy was for the setting of FirstHour.

    Further reading

    More specific instructions and information about known problems can be found here.

    + );

    ANAATMO is straightforward, only the control members need an exception from blending, so using a hash is most appropriate. Similarly for FCINT. For PHYSICS we have used an array and the fact that the array will be recycled. Thus member 0 will be the AROME control, while member 1 will be the ALARO control. The reason why we did not simply put a 2-element array [ 'arome','alaro'] to be repeated is that since the ECMWF perturbations come in +/- pairs, we don't want all the '+' perturbations to be always with the same physics (and the '-' perturbations with the other type). Therefore, we added a second pair with the order reversed, to alternate +/- perturbations between AROME and ALARO members. ENSCTL follows the same pattern as PHYSICS. Note the need for 3-digit numbers in ENSCTL, at present this is necessary to avoid parsing errors in the preparation step of mini-SMS.

    Note also how we have used ENSBDMBR. For both the AROME control (member 0) and ALARO control (member 1), we have used the EC EPS control member 0 to provide boundaries. The syntax 1..20 is a perl shorthand for the list 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20.

    Note added after changeset [12537]: The setting of ENSBDMBR created a race condition in the boundary extraction for runs at ECMWF. This is hopefully solved by the new definition for BDDIR, which makes use of the possibility of having a subroutine to compute the member specific settings. Another example where a subroutine came out handy was for the setting of FirstHour.

    Further reading

    More specific instructions and information about known problems can be found here.

    diff --git a/dev/EPS/SLAF/Get_pertdia.pl.pm/index.html b/dev/EPS/SLAF/Get_pertdia.pl.pm/index.html index 9ea7f22bde..3ced0521a1 100644 --- a/dev/EPS/SLAF/Get_pertdia.pl.pm/index.html +++ b/dev/EPS/SLAF/Get_pertdia.pl.pm/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/dev/EPS/SLAF/index.html b/dev/EPS/SLAF/index.html index 5bd2b5bbf6..6521bdfb06 100644 --- a/dev/EPS/SLAF/index.html +++ b/dev/EPS/SLAF/index.html @@ -23,4 +23,4 @@ ... 009 42 36 06 0.95 28.92 130.72 127.48 11 009 48 36 06 0.95 14.80 176.10 175.48 11 -

    The SLAKF can then be adjusted to achieve a uniform level of STDV for all member. Note that the response may be different for different seasons and will vary between IFS versions. An example of SLAF diagnostics from MetCoOp can be seen in the figure below

    Examples

    Below is an example for 2016052006 for the two different approaches of SLAF described above:

    +

    The SLAKF can then be adjusted to achieve a uniform level of STDV for all member. Note that the response may be different for different seasons and will vary between IFS versions. An example of SLAF diagnostics from MetCoOp can be seen in the figure below

    Examples

    Below is an example for 2016052006 for the two different approaches of SLAF described above:

    diff --git a/dev/EPS/SPP/index.html b/dev/EPS/SPP/index.html index d15cb0e7f8..1deb56a956 100644 --- a/dev/EPS/SPP/index.html +++ b/dev/EPS/SPP/index.html @@ -65,4 +65,4 @@ pattern 3 for CLDDPTHDP using seed 980493159 KGET_SEED_SPP: ICE_CLD_WGT 10008 1362729695 pattern 4 for ICE_CLD_WGT using seed 1362729695 -...

    would give us

    Perturbationraw patternscaled pattern
    PSIGQSATS001EZDIAG01S002EZDIAG01
    CLDDPTHS003EZDIAG01S004EZDIAG01
    CLDDPTHDPS005EZDIAG01S006EZDIAG01
    ICE_CLD_WGTS007EZDIAG01S008EZDIAG01

    and so on

    SPPT pattern EZDIAG02 (same in all levels)

    SPP tendencies PtendU EZDIAG03

    SPP tendencies PtendV EZDIAG04

    SPP tendencies PtendT EZDIAG05

    SPP tendencies PtendQ EZDIAG06

    Suggestions for parameters to include in SPP:

    ParameterDescriptionDeterministic value cy43Suggested range of valuessuggestion for parameter to correlate withPerson responsible for implementing
    Terminal fall velocities of rain, snow and graupelSibbo
    RFRMIN(39)Depo_rate_graupelRFRMIN 39 and 40 should approximately respect log10C = -3.55 x + 3.89, see eq. 6.2 on p. 108 in the meso-NH documentation: [https://hirlam.org/trac/attachment/wiki/HarmonieSystemDocumentation/EPS/SPP/sciICE3doc_p3.pdf Doc]Pirkka
    RFRMIN(40)Depo_rate_snow)RFRMIN 39 and 40 should approximately respect log10C = -3.55 x + 3.89, see eq. 6.2 on p. 108 in the meso-NH documentation: [https://hirlam.org/trac/attachment/wiki/HarmonieSystemDocumentation/EPS/SPP/sciICE3doc_p3.pdf Doc]Pirkka
    RFRMIN(16)Distr_snow_cto be correlated with RFRMIN(17)
    RFRMIN(17)Distr_snow_xto be correlated with RFRMIN(16)

    Experiments

    List with cy43h22 experiments is here: [wiki:HarmonieSystemDocumentation/EPS/ExplistSPPcy43 List of experiments]

    A guide for running the tuning experiments is here: [wiki:HarmonieSystemDocumentation/EPS/HowtoSPPcy43 Guide]

    +...

    would give us

    Perturbationraw patternscaled pattern
    PSIGQSATS001EZDIAG01S002EZDIAG01
    CLDDPTHS003EZDIAG01S004EZDIAG01
    CLDDPTHDPS005EZDIAG01S006EZDIAG01
    ICE_CLD_WGTS007EZDIAG01S008EZDIAG01

    and so on

    SPPT pattern EZDIAG02 (same in all levels)

    SPP tendencies PtendU EZDIAG03

    SPP tendencies PtendV EZDIAG04

    SPP tendencies PtendT EZDIAG05

    SPP tendencies PtendQ EZDIAG06

    Suggestions for parameters to include in SPP:

    ParameterDescriptionDeterministic value cy43Suggested range of valuessuggestion for parameter to correlate withPerson responsible for implementing
    Terminal fall velocities of rain, snow and graupelSibbo
    RFRMIN(39)Depo_rate_graupelRFRMIN 39 and 40 should approximately respect log10C = -3.55 x + 3.89, see eq. 6.2 on p. 108 in the meso-NH documentation: [https://hirlam.org/trac/attachment/wiki/HarmonieSystemDocumentation/EPS/SPP/sciICE3doc_p3.pdf Doc]Pirkka
    RFRMIN(40)Depo_rate_snow)RFRMIN 39 and 40 should approximately respect log10C = -3.55 x + 3.89, see eq. 6.2 on p. 108 in the meso-NH documentation: [https://hirlam.org/trac/attachment/wiki/HarmonieSystemDocumentation/EPS/SPP/sciICE3doc_p3.pdf Doc]Pirkka
    RFRMIN(16)Distr_snow_cto be correlated with RFRMIN(17)
    RFRMIN(17)Distr_snow_xto be correlated with RFRMIN(16)

    Experiments

    List with cy43h22 experiments is here: [wiki:HarmonieSystemDocumentation/EPS/ExplistSPPcy43 List of experiments]

    A guide for running the tuning experiments is here: [wiki:HarmonieSystemDocumentation/EPS/HowtoSPPcy43 Guide]

    diff --git a/dev/EPS/SPPImplementation/index.html b/dev/EPS/SPPImplementation/index.html index 797b37bfd3..9271952796 100644 --- a/dev/EPS/SPPImplementation/index.html +++ b/dev/EPS/SPPImplementation/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    The SPP implementation in IAL and HARMONIE

    The Stochastically Perturbed Parameterizations scheme (SPP) introduces stochastic perturbations to values of chosen closure parameters representing efficiencies or rates of change in parameterized atmospheric (sub)processes. See here for more information. See the main SPP documentation for selection of settings.

    Controling routines

    The SPP data structure and logics is controlled by the following routines

    RoutineDescription
    src/arpifs/module/spp_mod.F90Defines SPP scheme types TSPP_CONFIG_PAR and TSPP_CONFIG for the parameter config and the overall config respectively
    src/arpifs/module/spp_mod_type.F90Harmonie specific data types TSPP_CONFIG_TYPE, ATM_SPP_VARS, SFX_VARS, control and the methods CLEAR_SSP_TYPE, SET_SPP_TYPE, APPLY_SPP, APPLY_SPP_SURFEX, DIA_SPP, SET_ALL_ATM_SPP, SET_ALL_SFX_SPP, CLEAR_ALL_ATM_SPP, CLEAR_ALL_SFX_SPP
    src/surfex/SURFEX/modd_sfx_spp.F90SURFEX specific data types, control and methods CLEAR_SFX_SPP, SET_SFX_SPP, APPLY_SFX_SPP, CLEAR_ALL_SFX_SPP, SPP_MASK, SPP_DEMASK, PREP_SPP_SFX. Partly duplicates spp_mod_type.F90
    src/arpifs/namelist/namspp.nam.hThe SPP namelist
    src/arpifs/setup/get_spp_conf.F90Setup defaults and read the SPP namelist. Initialises the SPG parameters
    src/arpifs/phys_dmn/ini_spp.F90Initialises the pattern used for SPP
    src/arpifs/phys_dmn/evolve_spp.F90Control routine for pattern propagation
    src/mse/internals/aroset_spp.F90Initialises the SURFEX part of SPP

    Note that the control routines shared with IFS will be totally rewritten, and much neater, with the introduction of CY49T1. See e.g. spp_def_mod.F90, spp_gen_mod.F90

    SPG routines

    The pattern used for SPP within HARMONIE is SPG and the code for this is found under src/utilities/spg. For the propagation of the pattern we find the routine EVOLVE_ARP_SPG in src/arp/module/spectral_arp_mod.F90

    Applying the patterns

    In apl_arome.F90 the HARMONIE specific data types are initialised with SET_ALL_ATM_SPP and SET_ALL_SFX_SPP. These routine groups the different parameters and connects them to a pattern and a the correct diagnostic field EZDIAG if requested.

    Applying the patterns in the upper air part

    In the routine were a specific parameter is used the pattern is applied by calling APPLY_SPP. This is done for each parameter accoding to the table below.

    PerturbationRoutine
    RADGRsrc/arpifs/phys_dmn/apl_arome.F90
    RADSNsrc/arpifs/phys_dmn/apl_arome.F90
    RFAC_TWOCsrc/arpifs/phys_dmn/vdfexcuhl.F90
    RZC_Hsrc/arpifs/phys_dmn/vdfexcuhl.F90
    RZL_INFsrc/arpifs/phys_dmn/vdfexcuhl.F90
    RZMFDRYsrc/arpifs/phys_dmn/vdfhghtnhl.F90
    RZMBCLOSUREsrc/arpifs/phys_dmn/vdfhghtnhl.F90
    CLDDPTHDPsrc/arpifs/phys_dmn/vdfhghtnhl.F90
    RLWINHFsrc/arpifs/phys_radi/recmwf.F90
    RSWINHFsrc/arpifs/phys_radi/recmwf.F90
    PSIGQSATsrc/mpa/micro/internals/condensation.F90
    ICE_CLD_WGTsrc/mpa/micro/internals/condensation.F90
    ICENUsrc/mpa/micro/internals/rain_ice_old.F90
    KGN_ACONsrc/mpa/micro/internals/rain_ice_old.F90
    KGN_SBGRsrc/mpa/micro/internals/rain_ice_old.F90
    ALPHAsrc/mpa/micro/internals/rain_ice_old.F90
    RZNUCsrc/mpa/micro/internals/rain_ice_old.F90

    Applying the patterns in SURFEX

    As SURFEX should have no dependencies to external modules the data is copied into the internalt SURFEX SPP data structure in AROSET_SPP called from ARO_GROUND_PARAM.

    For SURFEX the parameter table looks like

    PerturbationRoutine
    CVsrc/surfex/SURFEX/coupling_isban.F90
    LAIsrc/surfex/SURFEX/coupling_isban.F90
    RSMINsrc/surfex/SURFEX/coupling_isban.F90

    In SURFEX we also have to pack/unpack the data arrays to only use the active points for a specific tile or patch. This is done in the SPP_MASK and SPP_DEMASK routines found in src/surfex/SURFEX/modd_sfx_spp.F90 and called from src/surfex/SURFEX/coupling_surf_atmn.F90. At the time of writing returning the diagnostics of the pattern doesn't work satisfactory.

    The additional code changes done for SPP in SURFEX can be viewed here

    +

    The SPP implementation in IAL and HARMONIE

    The Stochastically Perturbed Parameterizations scheme (SPP) introduces stochastic perturbations to values of chosen closure parameters representing efficiencies or rates of change in parameterized atmospheric (sub)processes. See here for more information. See the main SPP documentation for selection of settings.

    Controling routines

    The SPP data structure and logics is controlled by the following routines

    RoutineDescription
    src/arpifs/module/spp_mod.F90Defines SPP scheme types TSPP_CONFIG_PAR and TSPP_CONFIG for the parameter config and the overall config respectively
    src/arpifs/module/spp_mod_type.F90Harmonie specific data types TSPP_CONFIG_TYPE, ATM_SPP_VARS, SFX_VARS, control and the methods CLEAR_SSP_TYPE, SET_SPP_TYPE, APPLY_SPP, APPLY_SPP_SURFEX, DIA_SPP, SET_ALL_ATM_SPP, SET_ALL_SFX_SPP, CLEAR_ALL_ATM_SPP, CLEAR_ALL_SFX_SPP
    src/surfex/SURFEX/modd_sfx_spp.F90SURFEX specific data types, control and methods CLEAR_SFX_SPP, SET_SFX_SPP, APPLY_SFX_SPP, CLEAR_ALL_SFX_SPP, SPP_MASK, SPP_DEMASK, PREP_SPP_SFX. Partly duplicates spp_mod_type.F90
    src/arpifs/namelist/namspp.nam.hThe SPP namelist
    src/arpifs/setup/get_spp_conf.F90Setup defaults and read the SPP namelist. Initialises the SPG parameters
    src/arpifs/phys_dmn/ini_spp.F90Initialises the pattern used for SPP
    src/arpifs/phys_dmn/evolve_spp.F90Control routine for pattern propagation
    src/mse/internals/aroset_spp.F90Initialises the SURFEX part of SPP

    Note that the control routines shared with IFS will be totally rewritten, and much neater, with the introduction of CY49T1. See e.g. spp_def_mod.F90, spp_gen_mod.F90

    SPG routines

    The pattern used for SPP within HARMONIE is SPG and the code for this is found under src/utilities/spg. For the propagation of the pattern we find the routine EVOLVE_ARP_SPG in src/arp/module/spectral_arp_mod.F90

    Applying the patterns

    In apl_arome.F90 the HARMONIE specific data types are initialised with SET_ALL_ATM_SPP and SET_ALL_SFX_SPP. These routine groups the different parameters and connects them to a pattern and a the correct diagnostic field EZDIAG if requested.

    Applying the patterns in the upper air part

    In the routine were a specific parameter is used the pattern is applied by calling APPLY_SPP. This is done for each parameter accoding to the table below.

    PerturbationRoutine
    RADGRsrc/arpifs/phys_dmn/apl_arome.F90
    RADSNsrc/arpifs/phys_dmn/apl_arome.F90
    RFAC_TWOCsrc/arpifs/phys_dmn/vdfexcuhl.F90
    RZC_Hsrc/arpifs/phys_dmn/vdfexcuhl.F90
    RZL_INFsrc/arpifs/phys_dmn/vdfexcuhl.F90
    RZMFDRYsrc/arpifs/phys_dmn/vdfhghtnhl.F90
    RZMBCLOSUREsrc/arpifs/phys_dmn/vdfhghtnhl.F90
    CLDDPTHDPsrc/arpifs/phys_dmn/vdfhghtnhl.F90
    RLWINHFsrc/arpifs/phys_radi/recmwf.F90
    RSWINHFsrc/arpifs/phys_radi/recmwf.F90
    PSIGQSATsrc/mpa/micro/internals/condensation.F90
    ICE_CLD_WGTsrc/mpa/micro/internals/condensation.F90
    ICENUsrc/mpa/micro/internals/rain_ice_old.F90
    KGN_ACONsrc/mpa/micro/internals/rain_ice_old.F90
    KGN_SBGRsrc/mpa/micro/internals/rain_ice_old.F90
    ALPHAsrc/mpa/micro/internals/rain_ice_old.F90
    RZNUCsrc/mpa/micro/internals/rain_ice_old.F90

    Applying the patterns in SURFEX

    As SURFEX should have no dependencies to external modules the data is copied into the internalt SURFEX SPP data structure in AROSET_SPP called from ARO_GROUND_PARAM.

    For SURFEX the parameter table looks like

    PerturbationRoutine
    CVsrc/surfex/SURFEX/coupling_isban.F90
    LAIsrc/surfex/SURFEX/coupling_isban.F90
    RSMINsrc/surfex/SURFEX/coupling_isban.F90

    In SURFEX we also have to pack/unpack the data arrays to only use the active points for a specific tile or patch. This is done in the SPP_MASK and SPP_DEMASK routines found in src/surfex/SURFEX/modd_sfx_spp.F90 and called from src/surfex/SURFEX/coupling_surf_atmn.F90. At the time of writing returning the diagnostics of the pattern doesn't work satisfactory.

    The additional code changes done for SPP in SURFEX can be viewed here

    diff --git a/dev/EPS/SPPT/index.html b/dev/EPS/SPPT/index.html index 4e3895fae6..e7aa91a5d3 100644 --- a/dev/EPS/SPPT/index.html +++ b/dev/EPS/SPPT/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    SPPT

    ** Since CY46h1 SPPT is no longer supported in HarmonEPS **

    The SPPT configuration within HarmonEPS is being tested over the period 2016053000 to 2016060500 using the !MetCoOp domain. It has been found that there are some problems with the default pattern generator and thus it has been decided to use the Stochastic Pattern Generator (SPG).

    Below is a table of experiments which will be completed in order to find a suitable configuration of the SPG control parameters TAU (time correlation scale) and XLCOR (length correlation scale). The value of the standard deviation of the perturbation amplitudes (SDEV_SDT) is kept fixed at 0.20 as is the clipping ratio of the perturbations (XCLIP_RATIO_SDT=5.0). These values along with the default value for XLCOR come from suggested settings used by Mihaly Szucs.

    First of all, keeping the XLCOR parameter constant (set at the default value of 2000000), TAU will be varied between 1h and 24h as shown in the table. The value of TAU is in seconds in the table below. The value of XLCOR is in metres.

    The experiments are started by typing ~hlam/Harmonie start DTG=2016053000 DTGEND=2016060500 BUILD=yes

    = Experiment Name == Who == DTG == DTGEND == Version == Domain == TAU == XLCOR == Description and Comments == Status == Verification =
    SPPT_only_40h111_2000km_1hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B36002000000XLCOR constant, TAU varyingSuspendedNo
    SPPT_only_40h111_2000km_3hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B108002000000XLCOR constant, TAU varyingCrashNo
    SPPT_only_40h111_2000km_6hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B216002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_9hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B324002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_12hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B432002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_15hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B540002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_18hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B648002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_21hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B756002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_24hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B864002000000XLCOR constant, TAU varyingCompleteYes

    Once these experiments have been completed testing will commence on keeping the time correlation scale constant and the spatial scale will be varied. Below is a table of experiments to this effect.

    A default value of 8h will be used for TAU as per the suggested value from Mihaly Szucs.

    = Experiment Name == Who == DTG == DTGEND == Version == Domain == TAU == XLCOR == Description and Comments == Status == Verification =
    SPPTonly40h111100km8hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800100000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111200km8hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800200000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111400km8hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800400000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111600km8hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800600000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111800km8hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800800000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111000km8hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001000000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111200km8hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001200000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111500km8hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001500000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111800km8hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001800000XLCOR varying, TAU constantCompleteYes

    The next step in the SPPT sensitivity analysis will be a set of experiments designed to test the impact of the SDEV parameter. Default values of 8h and 2000000 for TAU and XLCOR are used respectively.

    The XCLIPRATIOSDT parameter will also be adjusted as a function of the SDEVSDT value. Initially keeping the clipping at 1.0 (clipping value is XCLIPRATIOSDT * SDEVSDT), but also exploring other options.

    = Experiment Name == Who == DTG == DTGEND == Version == Domain == SDEV_SDT == XCLIPRATIOSDT == Description and Comments == Status == Verification =
    SPPTonly40h111sdev01Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.110.0SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev02Janne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.25.0SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev03Karoliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.33.3SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev04Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.42.5SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev05Janne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.52.0SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev06Karoliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.61.65SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev07Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.71.4SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev08Janne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.81.25SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev09Karoliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.91.1SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev10Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B1.01.0SDEV and XCLIP varyingCompleteYes
    +

    SPPT

    ** Since CY46h1 SPPT is no longer supported in HarmonEPS **

    The SPPT configuration within HarmonEPS is being tested over the period 2016053000 to 2016060500 using the !MetCoOp domain. It has been found that there are some problems with the default pattern generator and thus it has been decided to use the Stochastic Pattern Generator (SPG).

    Below is a table of experiments which will be completed in order to find a suitable configuration of the SPG control parameters TAU (time correlation scale) and XLCOR (length correlation scale). The value of the standard deviation of the perturbation amplitudes (SDEV_SDT) is kept fixed at 0.20 as is the clipping ratio of the perturbations (XCLIP_RATIO_SDT=5.0). These values along with the default value for XLCOR come from suggested settings used by Mihaly Szucs.

    First of all, keeping the XLCOR parameter constant (set at the default value of 2000000), TAU will be varied between 1h and 24h as shown in the table. The value of TAU is in seconds in the table below. The value of XLCOR is in metres.

    The experiments are started by typing ~hlam/Harmonie start DTG=2016053000 DTGEND=2016060500 BUILD=yes

    = Experiment Name == Who == DTG == DTGEND == Version == Domain == TAU == XLCOR == Description and Comments == Status == Verification =
    SPPT_only_40h111_2000km_1hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B36002000000XLCOR constant, TAU varyingSuspendedNo
    SPPT_only_40h111_2000km_3hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B108002000000XLCOR constant, TAU varyingCrashNo
    SPPT_only_40h111_2000km_6hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B216002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_9hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B324002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_12hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B432002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_15hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B540002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_18hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B648002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_21hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B756002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_24hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B864002000000XLCOR constant, TAU varyingCompleteYes

    Once these experiments have been completed testing will commence on keeping the time correlation scale constant and the spatial scale will be varied. Below is a table of experiments to this effect.

    A default value of 8h will be used for TAU as per the suggested value from Mihaly Szucs.

    = Experiment Name == Who == DTG == DTGEND == Version == Domain == TAU == XLCOR == Description and Comments == Status == Verification =
    SPPTonly40h111100km8hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800100000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111200km8hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800200000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111400km8hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800400000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111600km8hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800600000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111800km8hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800800000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111000km8hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001000000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111200km8hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001200000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111500km8hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001500000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111800km8hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001800000XLCOR varying, TAU constantCompleteYes

    The next step in the SPPT sensitivity analysis will be a set of experiments designed to test the impact of the SDEV parameter. Default values of 8h and 2000000 for TAU and XLCOR are used respectively.

    The XCLIPRATIOSDT parameter will also be adjusted as a function of the SDEVSDT value. Initially keeping the clipping at 1.0 (clipping value is XCLIPRATIOSDT * SDEVSDT), but also exploring other options.

    = Experiment Name == Who == DTG == DTGEND == Version == Domain == SDEV_SDT == XCLIPRATIOSDT == Description and Comments == Status == Verification =
    SPPTonly40h111sdev01Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.110.0SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev02Janne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.25.0SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev03Karoliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.33.3SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev04Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.42.5SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev05Janne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.52.0SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev06Karoliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.61.65SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev07Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.71.4SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev08Janne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.81.25SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev09Karoliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.91.1SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev10Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B1.01.0SDEV and XCLIP varyingCompleteYes
    diff --git a/dev/EPS/Setup/index.html b/dev/EPS/Setup/index.html index 9b843a00ed..d0291c4fe4 100644 --- a/dev/EPS/Setup/index.html +++ b/dev/EPS/Setup/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/dev/EPS/System/index.html b/dev/EPS/System/index.html index 4d372a03f4..9d33668e46 100644 --- a/dev/EPS/System/index.html +++ b/dev/EPS/System/index.html @@ -46,4 +46,4 @@ ),

    To activate the change we also need to change scr/Get_namelist, the script that builds the namelist for us to take the member_$ENSMBR change into account.

     ...
      forecast|dfi|traj4d)
         NAMELIST_CONFIG="$DEFAULT dynamics $DYNAMICS $PHYSICS ${DYNAMICS}_${PHYSICS} $SURFACE $EXTRA_FORECAST_OPTIONS member_$ENSMBR"
    - ...

    Repeat this for all your members with the changes you would like to apply.

    + ...

    Repeat this for all your members with the changes you would like to apply.

    diff --git a/dev/ExperimentConfiguration/ConfigureYourExperiment/index.html b/dev/ExperimentConfiguration/ConfigureYourExperiment/index.html index 1a82ed7ce5..a505a013e0 100644 --- a/dev/ExperimentConfiguration/ConfigureYourExperiment/index.html +++ b/dev/ExperimentConfiguration/ConfigureYourExperiment/index.html @@ -314,4 +314,4 @@ MAIL_TESTBED= # testbed results summary

    Testbed

    export TESTBED_LIST="AROME AROME_1D AROME_3DVAR \
                          AROME_BD_ARO AROME_BD_ARO_IO_SERV \
                          HarmonEPS HarmonEPS_IFSENS \
    -                     AROME_CLIMSIM"
    + AROME_CLIMSIM" diff --git a/dev/ExperimentConfiguration/How_to_use_hires_topography/index.html b/dev/ExperimentConfiguration/How_to_use_hires_topography/index.html index eaf44ed7c3..f8ddff288a 100644 --- a/dev/ExperimentConfiguration/How_to_use_hires_topography/index.html +++ b/dev/ExperimentConfiguration/How_to_use_hires_topography/index.html @@ -187,4 +187,4 @@ AOSIP > 001:013-00600-105@20051219_00:00+0000 000 A/S i+ 0.000E+000 8.055E-003 548.990E-003 14.087E-003 AOSJP > 001:014-00600-105@20051219_00:00+0000 000 A/S j+ 0.000E+000 8.297E-003 461.020E-003 14.306E-003 AOSIM > 001:015-00600-105@20051219_00:00+0000 000 A/S i- 0.000E+000 8.280E-003 521.020E-003 14.863E-003 - AOSJM > 001:016-00600-105@20051219_00:00+0000 000 A/S i- 0.000E+000 8.454E-003 471.930E-003 15.079E-003

    Presently, derivations are done automatically, so there is nothing to worry for the user from the point of view of technical implementation. However, eventually there are needs for further development and improvements when the high-resolution source data on topography will be used.

    Conclusion

    In order to replace the (relatively) coarse-resolution GTOPO30 topography with higher-resolution data (e.g., from Aster), it is enough to generate replacements for the gtopo30.hdr and gtopo30.dir files in the $HM_CLDATA/PGD directory, as described in the upper part of this page.

    + AOSJM > 001:016-00600-105@20051219_00:00+0000 000 A/S i- 0.000E+000 8.454E-003 471.930E-003 15.079E-003

    Presently, derivations are done automatically, so there is nothing to worry for the user from the point of view of technical implementation. However, eventually there are needs for further development and improvements when the high-resolution source data on topography will be used.

    Conclusion

    In order to replace the (relatively) coarse-resolution GTOPO30 topography with higher-resolution data (e.g., from Aster), it is enough to generate replacements for the gtopo30.hdr and gtopo30.dir files in the $HM_CLDATA/PGD directory, as described in the upper part of this page.

    diff --git a/dev/ExperimentConfiguration/ModelDomain/index.html b/dev/ExperimentConfiguration/ModelDomain/index.html index 0db7c29004..ed3de8a32e 100644 --- a/dev/ExperimentConfiguration/ModelDomain/index.html +++ b/dev/ExperimentConfiguration/ModelDomain/index.html @@ -23,4 +23,4 @@ OUTGEO%PROJLAT = 60.0 OUTGEO%PROJLAT2 = 60.0 OUTGEO%PROJLON = 0.0, -/

    Running gl using this namelist by

    gl -n namelist_file

    will create an GRIB file with a constant orography which you can use for plotting.

    +/

    Running gl using this namelist by

    gl -n namelist_file

    will create an GRIB file with a constant orography which you can use for plotting.

    diff --git a/dev/ExperimentConfiguration/Namelists/index.html b/dev/ExperimentConfiguration/Namelists/index.html index 506815f0e5..78d808c2d4 100644 --- a/dev/ExperimentConfiguration/Namelists/index.html +++ b/dev/ExperimentConfiguration/Namelists/index.html @@ -292,4 +292,4 @@ #NAMELIST=$WRK/$WDIR/namelist_forecast #Get_namelist forecast $NAMELIST NAMELIST=$HM_LIB/nam/namelist_forecast_with_a_unique_name -

    For namelists not present in the dictionary you just copy them to you local nam directory.

    There is also a description on how to generate new namelist dictionaries here.

    +

    For namelists not present in the dictionary you just copy them to you local nam directory.

    There is also a description on how to generate new namelist dictionaries here.

    diff --git a/dev/ExperimentConfiguration/PlatformConfiguration/index.html b/dev/ExperimentConfiguration/PlatformConfiguration/index.html index 5bc8005105..bf4e9dbffa 100644 --- a/dev/ExperimentConfiguration/PlatformConfiguration/index.html +++ b/dev/ExperimentConfiguration/PlatformConfiguration/index.html @@ -12,4 +12,4 @@ ./config-sh/config.YOURHOST ## YOURHOST task submission settings ./suites/harmonie.pm ## perl module to define ensemble settings ./ecf/config_exp.h ## your experiment definition (scientific type options) -./scr/include.ass ## assimilation specific settings

    But, what if your host configuration is not available in the HARMONIE system? Host specific configuration files in PATH_TO_HARMONIE/config-sh must be available for your host and configuration files for the compilation of the code must be available. This documentation attempts to describe what is required.

    Host config files

    Env_system -> config-sh/config.YOURHOST

    The config.YOURHOST file defines host specific variables such as some input directory locations. If your YOURHOST is not already included in HARMONIE it may be work looking at config.* files in config-sh/ to see what other people have done. The table below outlines variables set in config-sh/config-sh.YOURHOST and what the variables do:

    Variable nameDescription
    COMPCENTREcontrols special ECMWF solutions (such as MARS) where required. Set to LOCAL if you are unsure
    HARMONIE_CONFIGdefines the config file used by Makeup compilation
    MAKEUP_BUILD_DIRlocation of where Makeup compiles the HARMONIE code
    MAKE_OWN_PRECOMPILEDyes=>install pre-compiled code in $PRECOMPILED
    PRECOMPILEDlocation of (optional) pre-compiled HARMONIE code
    E923_DATA_PATHlocation of input data for E923, climate generation
    PGD_DATA_PATHlocation of input data for PGD, surfex climate generation
    ECOSG_DATA_PATHlocation of input data for ECOCLIMAP2G
    GMTED2010_DATA_PATHlocation of HRES DEM
    SOILGRID_DATA_PATHlocation of SOILGRID data
    HM_SAT_CONSTlocation of constants for satellite assimilation
    RTTOV_COEFDIRlocation of RTTOV coefficients
    HM_DATAlocation of top working directory for the experiment
    HM_LIBlocation of src/scripts and compiled code
    TASK_LIMITMaximum number of jobs submitted by ECFLOW
    RSYNC_EXCLUDEused to exclude .git* sub-directories from copy of source code for compilation
    DR_HOOK_IGNORE_SIGNALSenvironment variable used by Dr Hook to ignore certain "signals"
    HOST0define primary host name
    HOSTNdefine other host name(s)
    HOST_INSTALL0=> install on HOST0, 0:...:N => install on HOST0,...,HOSTN
    MAKEmake command may need to be explicity defined. Set to make for most platforms
    MKDIRmkdir command (default: mkdir -p)
    JOBOUTDIRwhere ECFLOW writes its log files
    ECCODES_DEFINITION_PATHlocation of local ecCodes definition files
    BUFR_TABLESlocation of local BUFR tables

    Env_submit -> config-sh/submit.YOURHOST

    The Env_submit file uses perl to tell the HARMONIE scheduler how to execute programs - which programs should be run on multiple processors and define batch submissions if required.

    perldescription
    %backg_jobdefines variables for jobs run in the background on HOST0
    %scalar_jobdefines variables for single processor batch jobs
    %par_jobdefines variables for multi-processor batch jobs
    @backg_listlist of tasks to be submitted as a background job
    @scalar_listlist of tasks to be submitted as a scalar job
    @par_listlist of tasks to be submitted as parallel job
    default"wildcard" task name to defined default type of job for unlisted tasks

    Host summary

    YOURHOSTHost typebatchContactNotes
    AEMET.cirrus.gnu
    AEMET.nimbus.ifort.mpi
    biSMHI
    centos8
    cirrus
    debian11
    ecgbswitched off
    ecgb-ccaECMWF HPC with MPI dual hostslurm/PBSswitched off
    ECMWF.atosECMWF Atos HPC with MPIslurm
    fedora33
    fedora34
    KNMI.bullx_b720KNMI Atos HPC with MPIslurmBert van Ulft
    LinuxPCGeneral Linux PC no MPInone
    LinuxPC-MPIGeneral Linux PC with MPInone
    LinuxPC-MPI-KNMIKNMI Linux workstation (Fedora)none
    LinuxPC-MPI-ubuntuUbuntu Linux PC with MPInone
    LinuxPC-serial
    METIE.LinuxPCMETIE CentOS 6 PC with MPInoneEoin Whelan
    METIE.LinuxPC8
    METIE.LinuxRH7gnuMETIE Redhat 7 server with MPInoneEoin Whelan
    METIE.LinuxRH7gnu-dev
    METIE.reaserve8
    METIE.reaserve8musc
    nebula
    nebula-gnu
    opensuse
    SMHI.Linda4SMHI
    SMHI.Linda5SMHI
    stratus
    teho
    ubuntu18
    ubuntu20
    ubuntu20_nompi
    voima

    Compilation config files

    Makeup

    config files required for compilation of code using Makeup ...

    More information on Makeup is available here: Build with Makeup

    Obsmon

    For config files required for compilation of obsmon check util/obsmon/config

    +./scr/include.ass ## assimilation specific settings

    But, what if your host configuration is not available in the HARMONIE system? Host specific configuration files in PATH_TO_HARMONIE/config-sh must be available for your host and configuration files for the compilation of the code must be available. This documentation attempts to describe what is required.

    Host config files

    Env_system -> config-sh/config.YOURHOST

    The config.YOURHOST file defines host specific variables such as some input directory locations. If your YOURHOST is not already included in HARMONIE it may be work looking at config.* files in config-sh/ to see what other people have done. The table below outlines variables set in config-sh/config-sh.YOURHOST and what the variables do:

    Variable nameDescription
    COMPCENTREcontrols special ECMWF solutions (such as MARS) where required. Set to LOCAL if you are unsure
    HARMONIE_CONFIGdefines the config file used by Makeup compilation
    MAKEUP_BUILD_DIRlocation of where Makeup compiles the HARMONIE code
    MAKE_OWN_PRECOMPILEDyes=>install pre-compiled code in $PRECOMPILED
    PRECOMPILEDlocation of (optional) pre-compiled HARMONIE code
    E923_DATA_PATHlocation of input data for E923, climate generation
    PGD_DATA_PATHlocation of input data for PGD, surfex climate generation
    ECOSG_DATA_PATHlocation of input data for ECOCLIMAP2G
    GMTED2010_DATA_PATHlocation of HRES DEM
    SOILGRID_DATA_PATHlocation of SOILGRID data
    HM_SAT_CONSTlocation of constants for satellite assimilation
    RTTOV_COEFDIRlocation of RTTOV coefficients
    HM_DATAlocation of top working directory for the experiment
    HM_LIBlocation of src/scripts and compiled code
    TASK_LIMITMaximum number of jobs submitted by ECFLOW
    RSYNC_EXCLUDEused to exclude .git* sub-directories from copy of source code for compilation
    DR_HOOK_IGNORE_SIGNALSenvironment variable used by Dr Hook to ignore certain "signals"
    HOST0define primary host name
    HOSTNdefine other host name(s)
    HOST_INSTALL0=> install on HOST0, 0:...:N => install on HOST0,...,HOSTN
    MAKEmake command may need to be explicity defined. Set to make for most platforms
    MKDIRmkdir command (default: mkdir -p)
    JOBOUTDIRwhere ECFLOW writes its log files
    ECCODES_DEFINITION_PATHlocation of local ecCodes definition files
    BUFR_TABLESlocation of local BUFR tables

    Env_submit -> config-sh/submit.YOURHOST

    The Env_submit file uses perl to tell the HARMONIE scheduler how to execute programs - which programs should be run on multiple processors and define batch submissions if required.

    perldescription
    %backg_jobdefines variables for jobs run in the background on HOST0
    %scalar_jobdefines variables for single processor batch jobs
    %par_jobdefines variables for multi-processor batch jobs
    @backg_listlist of tasks to be submitted as a background job
    @scalar_listlist of tasks to be submitted as a scalar job
    @par_listlist of tasks to be submitted as parallel job
    default"wildcard" task name to defined default type of job for unlisted tasks

    Host summary

    YOURHOSTHost typebatchContactNotes
    AEMET.cirrus.gnu
    AEMET.nimbus.ifort.mpi
    biSMHI
    centos8
    cirrus
    debian11
    ecgbswitched off
    ecgb-ccaECMWF HPC with MPI dual hostslurm/PBSswitched off
    ECMWF.atosECMWF Atos HPC with MPIslurm
    fedora33
    fedora34
    KNMI.bullx_b720KNMI Atos HPC with MPIslurmBert van Ulft
    LinuxPCGeneral Linux PC no MPInone
    LinuxPC-MPIGeneral Linux PC with MPInone
    LinuxPC-MPI-KNMIKNMI Linux workstation (Fedora)none
    LinuxPC-MPI-ubuntuUbuntu Linux PC with MPInone
    LinuxPC-serial
    METIE.LinuxPCMETIE CentOS 6 PC with MPInoneEoin Whelan
    METIE.LinuxPC8
    METIE.LinuxRH7gnuMETIE Redhat 7 server with MPInoneEoin Whelan
    METIE.LinuxRH7gnu-dev
    METIE.reaserve8
    METIE.reaserve8musc
    nebula
    nebula-gnu
    opensuse
    SMHI.Linda4SMHI
    SMHI.Linda5SMHI
    stratus
    teho
    ubuntu18
    ubuntu20
    ubuntu20_nompi
    voima

    Compilation config files

    Makeup

    config files required for compilation of code using Makeup ...

    More information on Makeup is available here: Build with Makeup

    Obsmon

    For config files required for compilation of obsmon check util/obsmon/config

    diff --git a/dev/ExperimentConfiguration/UpdateNamelists/index.html b/dev/ExperimentConfiguration/UpdateNamelists/index.html index bb4c989d95..14dd8e7a82 100644 --- a/dev/ExperimentConfiguration/UpdateNamelists/index.html +++ b/dev/ExperimentConfiguration/UpdateNamelists/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/dev/ExperimentConfiguration/UseofObservation/index.html b/dev/ExperimentConfiguration/UseofObservation/index.html index 5b66a5e3de..efd7da0624 100644 --- a/dev/ExperimentConfiguration/UseofObservation/index.html +++ b/dev/ExperimentConfiguration/UseofObservation/index.html @@ -19,4 +19,4 @@ export PAOB_OBS=0 # PAOB not defined everywhere export SCATT_OBS=0 # Scatterometer data not defined everywhere export LIMB_OBS=0 # LIMB observations, GPS Radio Occultations -export RADAR_OBS=0 # Radar +export RADAR_OBS=0 # Radar diff --git a/dev/ExperimentConfiguration/VerticalGrid/index.html b/dev/ExperimentConfiguration/VerticalGrid/index.html index b59e67f824..bb8039b8a0 100644 --- a/dev/ExperimentConfiguration/VerticalGrid/index.html +++ b/dev/ExperimentConfiguration/VerticalGrid/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    HARMONIE Vertical Model Level Definitions

    HARMONIE vertical coordinate

    HARMONIE model, similar to that of HIRLAM, is constructed for a general pressure based and terrain following vertical coordinate $\eta(p,p_s)$, where

    \[\eta(0,P_s) = 0\]

    and

    \[\eta(p_s,p_s) = 1\]

    The formulation corresponds to the ECMWF hybrid system. The model is formulated for a spherical coordinate system ($\lambda$, $\theta$), but in the code two metric coefficients $(h_x,h_y)$ have been introduced. This is done to prepare the model for any orthogonal coordinate system or map projection with axes (x,y).

    To represent the vertical variation of the dependent variables (U, V, T and Q), the atmosphere is divided into "nlev" layers. These layers are defined by the pressures at the interfaces between them (the `half-levels'). From the general expression

    \[p_{k+1/2} = A_{k+1/2} (n) + B_{k+1/2}(n) * p_s(x,y)\]

    for $k=0,1,...,nlev$

    the vertical surfaces for half-levels are defined. Pure pressure surfaces are obtained for $B=0$ and pure $\sigma$ surfaces for $A=0$. `full-level' pressure associated with each model level (middle of two half layers) is then determined accordingly.

    Definition of model levels in HARMONIE

    The script src/Vertical_levels.pl contains definition of vertical levels that have been used in the HIRLAM community for research and/or operational purposes. Currently the default model setup defines 65-level structure as derived by Per Unden, SMHI. Model level definitions for commonly used vertical structures in HARMONIE are listed below.

    • FourtyLevel: HIRLAM_40 model levels (same as Hirlam 6.2.1, Nov 2003 - HIRLAM 7.0, 2006 )
    • SixtyLevel: HIRLAM-60 model levels (same as Hirlam 7.1, March 2007 - 2012 )
    • [wiki:MFSixtyLevel MF_60]: MF-60 model levels (same as Meteo France AROME since 2010 )
    • SixtyfiveLevel: 65 model levels (same as Hirlam 7.4, March 2012 - )
    • other levels: Prague87, MF70, 40 (ALADIN-40), ECMWF_60.

    Note that VLEV is the name of the set of A/B coefficients defining your levels set in ecf/config_exp.h. There are e.g. more than one definition for 60 levels. To print the levels just run scr/Vertical_levels.pl

    Usage: scr/Vertical_levels.pl [VLEV PRINT_OPTION] where:

    • VLEV: name of your level definition
    • PRINT_OPTION=AHALF: print A coefficients for VLEV
    • PRINT_OPTION=BHALF: print B coefficients for VLEV
    • PRINT_OPTION=NLEV: print number of levels for VLEV
    • PRINT_OPTION=NRFP3S: print NRFP3S namelist values for VLEV

    See here for ECMWF level definitions.

    When performing HARMONIE experiment, users can select vertical levels by changing VLEV in ecf/config_exp.h. If a non-standard level number is to be chosen, the script scr/Vertical_levels.pl needs to be edited to add layer definition.

    Define new eta levels

    A brief description and some code on how to create new eta levels can be found in here.

    There is also an interactive tool that can help you in creating a new set of levels.

    The method is based on a program by Pierre Bénard, Meteo France, that is described in this gmapdoc article.

    Relevant corresponding data set for different vertical structure

    HARMONIE 3D-VAR and 4DVAR upper air data assimilation needs background error structure function for each given vertical layer structure. It is noted that the structure function data included in the reference HARMONIE repository const/jb_data is only useful for reference configuration. Users that runs 3DVAR/4DVAR are strongly recommended to derive proper structure function data following instructions in the Harmonie wiki using own data archive to avoid improper use of structure function.

    +

    HARMONIE Vertical Model Level Definitions

    HARMONIE vertical coordinate

    HARMONIE model, similar to that of HIRLAM, is constructed for a general pressure based and terrain following vertical coordinate $\eta(p,p_s)$, where

    \[\eta(0,P_s) = 0\]

    and

    \[\eta(p_s,p_s) = 1\]

    The formulation corresponds to the ECMWF hybrid system. The model is formulated for a spherical coordinate system ($\lambda$, $\theta$), but in the code two metric coefficients $(h_x,h_y)$ have been introduced. This is done to prepare the model for any orthogonal coordinate system or map projection with axes (x,y).

    To represent the vertical variation of the dependent variables (U, V, T and Q), the atmosphere is divided into "nlev" layers. These layers are defined by the pressures at the interfaces between them (the `half-levels'). From the general expression

    \[p_{k+1/2} = A_{k+1/2} (n) + B_{k+1/2}(n) * p_s(x,y)\]

    for $k=0,1,...,nlev$

    the vertical surfaces for half-levels are defined. Pure pressure surfaces are obtained for $B=0$ and pure $\sigma$ surfaces for $A=0$. `full-level' pressure associated with each model level (middle of two half layers) is then determined accordingly.

    Definition of model levels in HARMONIE

    The script src/Vertical_levels.pl contains definition of vertical levels that have been used in the HIRLAM community for research and/or operational purposes. Currently the default model setup defines 65-level structure as derived by Per Unden, SMHI. Model level definitions for commonly used vertical structures in HARMONIE are listed below.

    • FourtyLevel: HIRLAM_40 model levels (same as Hirlam 6.2.1, Nov 2003 - HIRLAM 7.0, 2006 )
    • SixtyLevel: HIRLAM-60 model levels (same as Hirlam 7.1, March 2007 - 2012 )
    • [wiki:MFSixtyLevel MF_60]: MF-60 model levels (same as Meteo France AROME since 2010 )
    • SixtyfiveLevel: 65 model levels (same as Hirlam 7.4, March 2012 - )
    • other levels: Prague87, MF70, 40 (ALADIN-40), ECMWF_60.

    Note that VLEV is the name of the set of A/B coefficients defining your levels set in ecf/config_exp.h. There are e.g. more than one definition for 60 levels. To print the levels just run scr/Vertical_levels.pl

    Usage: scr/Vertical_levels.pl [VLEV PRINT_OPTION] where:

    • VLEV: name of your level definition
    • PRINT_OPTION=AHALF: print A coefficients for VLEV
    • PRINT_OPTION=BHALF: print B coefficients for VLEV
    • PRINT_OPTION=NLEV: print number of levels for VLEV
    • PRINT_OPTION=NRFP3S: print NRFP3S namelist values for VLEV

    See here for ECMWF level definitions.

    When performing HARMONIE experiment, users can select vertical levels by changing VLEV in ecf/config_exp.h. If a non-standard level number is to be chosen, the script scr/Vertical_levels.pl needs to be edited to add layer definition.

    Define new eta levels

    A brief description and some code on how to create new eta levels can be found in here.

    There is also an interactive tool that can help you in creating a new set of levels.

    The method is based on a program by Pierre Bénard, Meteo France, that is described in this gmapdoc article.

    Relevant corresponding data set for different vertical structure

    HARMONIE 3D-VAR and 4DVAR upper air data assimilation needs background error structure function for each given vertical layer structure. It is noted that the structure function data included in the reference HARMONIE repository const/jb_data is only useful for reference configuration. Users that runs 3DVAR/4DVAR are strongly recommended to derive proper structure function data following instructions in the Harmonie wiki using own data archive to avoid improper use of structure function.

    diff --git a/dev/ExperimentConfiguration/namelist_sfx_forecast/index.html b/dev/ExperimentConfiguration/namelist_sfx_forecast/index.html index 572e5c4312..82285d57fe 100644 --- a/dev/ExperimentConfiguration/namelist_sfx_forecast/index.html +++ b/dev/ExperimentConfiguration/namelist_sfx_forecast/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/dev/ForecastModel/Forecast/index.html b/dev/ForecastModel/Forecast/index.html index 2534a4a0c6..1289e3bc38 100644 --- a/dev/ForecastModel/Forecast/index.html +++ b/dev/ForecastModel/Forecast/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Forecast

    scr/Forecast is the script, which initiates actual forecast run (ALADIN/AROME/ALARO depending on FLAG and PHFLAG).

    • Input parameters: none.
    • Data: Boundary files (ELSCF*-files). Initial file (fc_start). If data assimilation is used, fc_start is the analysis file. In case of dynamical adaptation, fc_start is the first boundary file. In case of AROME, Surfex initial file (SURFXINI.lfi) is also needed scr/Prep_ini_surfex.
    • Namelists: namelist templates nam/namelist_fcst${FLAG}_default are fetched based on FLAG and PHFLAG. The templates are completed in scr/Forecast based on the choices of NPROCX, NPROCY (see config-sh/submit.*), TFLAG, OUTINT, BDINT and REDUCELFI. In case of AROME also the namelists to control SURFEX-scheme nam/TEST.des and nam/EXSEG1.nam are needed.
    • Executables: as defined by MODEL.
    • Output: Forecast files (spectral files ICMSHALAD+*). In case of AROME, Surfex files containing the surface data (AROMOUT_*.lfi).

    Forecast namelists

    The current switches in the HARMONIE system (in ecf/config_exp.h) provide only very limited possibility to control the different aspects of the model. If the user wants to have more detailed control on the specific schemes etc., one has to modify the variety of the namelists options.

    In general, the different namelist options are documented in the source code modules (e.g. src/arp/module/*.F90). Below is listed information on some of the choices.

    NH-dynamics/advection/time stepping:

    • A detailed overview of the such options has been given by Vivoda (2008).

    Upper air physics switches

    • Switches related to different schemes of ALADIN/ALARO physics, src/arp/module/yomphy.F90.
    • Switches related to physics schemes in AROME src/arp/module/yomarphy.F90.
    • Switches to tune different aspects of physics, src/arp/module/yomphy0.F90, src/arp/module/yomphy1.F90, src/arp/module/yomphy2.F90 and src/arp/module/yomphy3.F90
    • Switches related to HIRLAM physics, src/arp/module/yhloption.F90 and src/arp/setup/suhloption.F90.

    Initialization switch

    • Initialization is controlled by namelist NAMINI/NEINI, src/arp/module/yomini.F90.

    Horizontal diffusion switches

    • Horizontal diffusion is controlled by namelist NAMDYN/RDAMP*, src/arp/module/yomdyn.F90#L55. Larger the coefficient, less diffusion.

    MPP switches

    • The number of processors in HARMONIE are given in config-sh/submit.*. These values are transfered in to src/arp/module/yomct0.F90#L276 and src/arp/module/yommp.F90.

    Surface SURFEX switches

    • The SURFEX scheme is controlled through namelist settings in nam/surfex_namelists.pm. The different options are described here.

    Archiving

    Archiving has a two layer structure. Firstly, all the needed analysis forecast and field extract files are stored in ARCHIVE directory by scr/Archive_fc. This is the place where the postprocessing step expects to find the files.

    At ECMWF all the requested files are stored to ECFS into directory ECFSLOC by the script scr/Archive_ECMWF

    +

    Forecast

    scr/Forecast is the script, which initiates actual forecast run (ALADIN/AROME/ALARO depending on FLAG and PHFLAG).

    • Input parameters: none.
    • Data: Boundary files (ELSCF*-files). Initial file (fc_start). If data assimilation is used, fc_start is the analysis file. In case of dynamical adaptation, fc_start is the first boundary file. In case of AROME, Surfex initial file (SURFXINI.lfi) is also needed scr/Prep_ini_surfex.
    • Namelists: namelist templates nam/namelist_fcst${FLAG}_default are fetched based on FLAG and PHFLAG. The templates are completed in scr/Forecast based on the choices of NPROCX, NPROCY (see config-sh/submit.*), TFLAG, OUTINT, BDINT and REDUCELFI. In case of AROME also the namelists to control SURFEX-scheme nam/TEST.des and nam/EXSEG1.nam are needed.
    • Executables: as defined by MODEL.
    • Output: Forecast files (spectral files ICMSHALAD+*). In case of AROME, Surfex files containing the surface data (AROMOUT_*.lfi).

    Forecast namelists

    The current switches in the HARMONIE system (in ecf/config_exp.h) provide only very limited possibility to control the different aspects of the model. If the user wants to have more detailed control on the specific schemes etc., one has to modify the variety of the namelists options.

    In general, the different namelist options are documented in the source code modules (e.g. src/arp/module/*.F90). Below is listed information on some of the choices.

    NH-dynamics/advection/time stepping:

    • A detailed overview of the such options has been given by Vivoda (2008).

    Upper air physics switches

    • Switches related to different schemes of ALADIN/ALARO physics, src/arp/module/yomphy.F90.
    • Switches related to physics schemes in AROME src/arp/module/yomarphy.F90.
    • Switches to tune different aspects of physics, src/arp/module/yomphy0.F90, src/arp/module/yomphy1.F90, src/arp/module/yomphy2.F90 and src/arp/module/yomphy3.F90
    • Switches related to HIRLAM physics, src/arp/module/yhloption.F90 and src/arp/setup/suhloption.F90.

    Initialization switch

    • Initialization is controlled by namelist NAMINI/NEINI, src/arp/module/yomini.F90.

    Horizontal diffusion switches

    • Horizontal diffusion is controlled by namelist NAMDYN/RDAMP*, src/arp/module/yomdyn.F90#L55. Larger the coefficient, less diffusion.

    MPP switches

    • The number of processors in HARMONIE are given in config-sh/submit.*. These values are transfered in to src/arp/module/yomct0.F90#L276 and src/arp/module/yommp.F90.

    Surface SURFEX switches

    • The SURFEX scheme is controlled through namelist settings in nam/surfex_namelists.pm. The different options are described here.

    Archiving

    Archiving has a two layer structure. Firstly, all the needed analysis forecast and field extract files are stored in ARCHIVE directory by scr/Archive_fc. This is the place where the postprocessing step expects to find the files.

    At ECMWF all the requested files are stored to ECFS into directory ECFSLOC by the script scr/Archive_ECMWF

    diff --git a/dev/ForecastModel/ForecastSettings/index.html b/dev/ForecastModel/ForecastSettings/index.html index 3d0b09b08a..ab8f69fc02 100644 --- a/dev/ForecastModel/ForecastSettings/index.html +++ b/dev/ForecastModel/ForecastSettings/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Forecast Settings

    This page gives some details and advice on appropriate settings for the HARMONIE-AROME forecast

    Microphysics

    ICE-T

    Switch ICE-T on by setting LICET=.TRUE. in harmonie_namelist.pm under &NAMPARAR in %arome. When using ICE-T (LICET), LOCND2 should be set to True, and LMODICEDEP preferably to False. LICET will override LKOGAN, so by default LKOGAN=F. Documentation: (Engdahl et al., 2020)

    Description: ICE-T is a modified cloud microphysics scheme that builds upon ICE3 and OCN2D, with elements from the Thompson scheme from WRF. ICE-T was developed in cy40h1.1 for the purpose of better representation of supercooled liquid water, and downstream forecasts of atmospheric icing. The changes include stricter conditions for ice nucleation, less efficient collection of liquid water by snow and graupel, and variable rain size distribution depending on the source of the rain. (Rain originating from melting snow or graupel have larger drops, than rain originating from warm processes.)

    Shallow Convection

    LSHALLOWMF activates (.TRUE.) or de-activates (.FALSE.) the DUAL (dry and moist) mass flux shallow convection parameterisation. Note that with LSHALLOWMF=.FALSE. the mass flux activity as a source term for TKE in the turbulence scheme (energy cascade) will be also eliminated. Also the moist updraft transport contribution to the cloud scheme is eliminated with LSHALLOWMF=.FALSE.. See for details of the convection scheme and links to the cloud and turbulence scheme: https://doi.org/10.5194/gmd-15-1513-2022.

    The scale-aware convection scheme is activated by setting LSCAWAREMF=.TRUE.. Setting this reduces the dry and moist (if present) mass flux using a tangent function scaled with the dry boundary layer height $h$ for the dry updraft and sub-cloud height plus cloud layer depth $h+h_c$ for the moist updraft:

    \[f = \tanh\left(1.86 \frac{\Delta x}{h+h_c}\right)\]

    NOTE: this option can only be used when LSHALLOWMF=.TRUE..

    To support the model when it is trying to build up convection itself, the setting LWTHRESH=.TRUE. can be used. Depending on the gridsize, a vertical velocity threshold is defined. If the absolute value of the vertical velocity in a grid column exceeds this threshold the shallow convection is shut down.

    NOTE: this option can only be active when LSHALLOWMF=.TRUE..

    Turbulence scheme

    HARATU

    HARATU (HArmonie with RAcmo TUrbulence scheme) is the default (HARATU=yes in config_exp.h) turbulence scheme in HARMONIE-AROME originally developed for RACMO (Regional Atmospheric Climate MOdel). The length scale of this turbulence scheme is described by @(Lenderink and Holtslag, 2004). Note that HARATU is only tested in combination with LSHALLOWMF=.TRUE. and CMF_UPDRAFT='DUAL'. The later convection scheme provides input to the HARATU turbulence scheme to present the important energy cascade (from large to small scales), see https://doi.org/10.5194/gmd-15-1513-2022

    +

    Forecast Settings

    This page gives some details and advice on appropriate settings for the HARMONIE-AROME forecast

    Microphysics

    ICE-T

    Switch ICE-T on by setting LICET=.TRUE. in harmonie_namelist.pm under &NAMPARAR in %arome. When using ICE-T (LICET), LOCND2 should be set to True, and LMODICEDEP preferably to False. LICET will override LKOGAN, so by default LKOGAN=F. Documentation: (Engdahl et al., 2020)

    Description: ICE-T is a modified cloud microphysics scheme that builds upon ICE3 and OCN2D, with elements from the Thompson scheme from WRF. ICE-T was developed in cy40h1.1 for the purpose of better representation of supercooled liquid water, and downstream forecasts of atmospheric icing. The changes include stricter conditions for ice nucleation, less efficient collection of liquid water by snow and graupel, and variable rain size distribution depending on the source of the rain. (Rain originating from melting snow or graupel have larger drops, than rain originating from warm processes.)

    Shallow Convection

    LSHALLOWMF activates (.TRUE.) or de-activates (.FALSE.) the DUAL (dry and moist) mass flux shallow convection parameterisation. Note that with LSHALLOWMF=.FALSE. the mass flux activity as a source term for TKE in the turbulence scheme (energy cascade) will be also eliminated. Also the moist updraft transport contribution to the cloud scheme is eliminated with LSHALLOWMF=.FALSE.. See for details of the convection scheme and links to the cloud and turbulence scheme: https://doi.org/10.5194/gmd-15-1513-2022.

    The scale-aware convection scheme is activated by setting LSCAWAREMF=.TRUE.. Setting this reduces the dry and moist (if present) mass flux using a tangent function scaled with the dry boundary layer height $h$ for the dry updraft and sub-cloud height plus cloud layer depth $h+h_c$ for the moist updraft:

    \[f = \tanh\left(1.86 \frac{\Delta x}{h+h_c}\right)\]

    NOTE: this option can only be used when LSHALLOWMF=.TRUE..

    To support the model when it is trying to build up convection itself, the setting LWTHRESH=.TRUE. can be used. Depending on the gridsize, a vertical velocity threshold is defined. If the absolute value of the vertical velocity in a grid column exceeds this threshold the shallow convection is shut down.

    NOTE: this option can only be active when LSHALLOWMF=.TRUE..

    Turbulence scheme

    HARATU

    HARATU (HArmonie with RAcmo TUrbulence scheme) is the default (HARATU=yes in config_exp.h) turbulence scheme in HARMONIE-AROME originally developed for RACMO (Regional Atmospheric Climate MOdel). The length scale of this turbulence scheme is described by @(Lenderink and Holtslag, 2004). Note that HARATU is only tested in combination with LSHALLOWMF=.TRUE. and CMF_UPDRAFT='DUAL'. The later convection scheme provides input to the HARATU turbulence scheme to present the important energy cascade (from large to small scales), see https://doi.org/10.5194/gmd-15-1513-2022

    diff --git a/dev/ForecastModel/HR/index.html b/dev/ForecastModel/HR/index.html index e270fef412..59dcf46224 100644 --- a/dev/ForecastModel/HR/index.html +++ b/dev/ForecastModel/HR/index.html @@ -29,4 +29,4 @@ 'RDAMPVD' => '20.,', 'RDAMPVOR' => '20.,', },

    With a quadratic or cubic grid with non-zero VESL, these defaults have been found to be adequate. Without VESL, higher levels of diffusion through lover RDAMP* values of 10 or even 1 are necessary.

    SLHD

    Experiments at Météo France suggest not to use SLHD on hydrometeors: c.f. ASM 2020 presentation by Yann Seity.

    In ecf/config_exp.h

    LGRADSP=yes                             # Apply Wedi/Hortal vorticity dealiasing (yes|no)
    -LUNBC=yes                               # Apply upper nested boundary condition (yes|no)

    Sample configurations

    Coming soon...

    +LUNBC=yes # Apply upper nested boundary condition (yes|no)

    Sample configurations

    Coming soon...

    diff --git a/dev/ForecastModel/NearRealTimeAerosols/index.html b/dev/ForecastModel/NearRealTimeAerosols/index.html index d6b7c4b9dd..6538c690c8 100644 --- a/dev/ForecastModel/NearRealTimeAerosols/index.html +++ b/dev/ForecastModel/NearRealTimeAerosols/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Near Real Time Aerosols

    The model can be configured to use near-real-time aerosols from CAMS. This is done by setting USEAERO=camsnrt in ecf/config_exp.h, which leads to retrieval of boundary files containing aerosol mass mixing ratio fields from CAMS. Other values of USEAERO are related to use and generation of climatological (2D) aerosol. Please see scr/forecastmodelsettings.sh for some further details.

    • Namelist NAMNRTAER contains switches related to n.r.t. aerosols in cloud microphysics under src/mpa.
    • Namelist NAMPHY contains definitions for src/arpifs LAEROSEA,LAEROLAN,LAEROSOO,LAERODES,LAEROVOL,LAEROSUL,LAEROMMR,LAERONRT . LAERONRT is set true when n.r.t. aerosols are used. The others are related to climatological aerosol and are set false when n.r.t. aerosols are used.
    • Aerosol fields in YAERO_NL are defined in namelist NAMGFL. Variable NAERO defines the number of available n.r.t. aerosol species (14).
    • Namelist NAERAD contains definition of NAER=1/0 to use or not to use climatological aerosol for radiation. When LAERONRT is set true, NAER is set to 0.

    NAMNRTAER namelist

    The switches and some parameters can be set in NAMNRTAER (in nam/harmonie_namelists.pm)

    • LCAMS_NRT: switch on the use of CAMS aerosols in HARMONIE-AROME, the Mass mixing ratio fields must be present in the first guess and the boundary conditions. The number and name of those fields are specified in the namelist NAMGFL.
    • SSMINLO: Supersaturation at sfc level. (default 0.05%). The supersaturation activates the condensation nuclei (CN) to obtain CCN.
    • SSMINUP: Supersaturation over SSHEIGHT height (default 0.08%).
    • SSHEIGHT: Height over wich minimum SS is SSMINUP (default 100 m).
    • SSMAX: Maximum supersaturation (default 1.0%).
    • SSFACVV: Factor for dependence of SS with vertical velocity (0.0-1.0).
    • SSFACSS: Factor for dependence of SS with coarse sea salt (0.0-1.0).
    • CCNMIN: Minimum number concentration of Cloud Condensation Nuclei (CCN) inside the cloud: It is considered 10E6 (10 cm-3). Other values can be considered, but probably not over 50cm-3.
    • CLDROPMIN: Minimum CDNC inside the cloud. It is practically the same as CCNMIN. Other values can be considered, but probably not over 50cm-3.
    • IFNMINSIZE: Minimum radius of aerosol ice nucleating particles (default 0.01 micrometer).
    • LMOCA_NRT: In case of getting the aerosol fields from MOCAGE (still not in use).
    • LAEIFN: To activate Ice nuclei (mainly dust and hydrophobic organic matter and Black carbon).
    • LAERDRDEP: Activates the aerosol dry deposition. (FALSE by default).
    • LAECCN2CLDR: By default LAECCN2CLDR=FALSE, that is CDNC=CCN.
    • LAERSSEM: switch for sea salt emission (FALSE by default).
    +

    Near Real Time Aerosols

    The model can be configured to use near-real-time aerosols from CAMS. This is done by setting USEAERO=camsnrt in ecf/config_exp.h, which leads to retrieval of boundary files containing aerosol mass mixing ratio fields from CAMS. Other values of USEAERO are related to use and generation of climatological (2D) aerosol. Please see scr/forecastmodelsettings.sh for some further details.

    • Namelist NAMNRTAER contains switches related to n.r.t. aerosols in cloud microphysics under src/mpa.
    • Namelist NAMPHY contains definitions for src/arpifs LAEROSEA,LAEROLAN,LAEROSOO,LAERODES,LAEROVOL,LAEROSUL,LAEROMMR,LAERONRT . LAERONRT is set true when n.r.t. aerosols are used. The others are related to climatological aerosol and are set false when n.r.t. aerosols are used.
    • Aerosol fields in YAERO_NL are defined in namelist NAMGFL. Variable NAERO defines the number of available n.r.t. aerosol species (14).
    • Namelist NAERAD contains definition of NAER=1/0 to use or not to use climatological aerosol for radiation. When LAERONRT is set true, NAER is set to 0.

    NAMNRTAER namelist

    The switches and some parameters can be set in NAMNRTAER (in nam/harmonie_namelists.pm)

    • LCAMS_NRT: switch on the use of CAMS aerosols in HARMONIE-AROME, the Mass mixing ratio fields must be present in the first guess and the boundary conditions. The number and name of those fields are specified in the namelist NAMGFL.
    • SSMINLO: Supersaturation at sfc level. (default 0.05%). The supersaturation activates the condensation nuclei (CN) to obtain CCN.
    • SSMINUP: Supersaturation over SSHEIGHT height (default 0.08%).
    • SSHEIGHT: Height over wich minimum SS is SSMINUP (default 100 m).
    • SSMAX: Maximum supersaturation (default 1.0%).
    • SSFACVV: Factor for dependence of SS with vertical velocity (0.0-1.0).
    • SSFACSS: Factor for dependence of SS with coarse sea salt (0.0-1.0).
    • CCNMIN: Minimum number concentration of Cloud Condensation Nuclei (CCN) inside the cloud: It is considered 10E6 (10 cm-3). Other values can be considered, but probably not over 50cm-3.
    • CLDROPMIN: Minimum CDNC inside the cloud. It is practically the same as CCNMIN. Other values can be considered, but probably not over 50cm-3.
    • IFNMINSIZE: Minimum radius of aerosol ice nucleating particles (default 0.01 micrometer).
    • LMOCA_NRT: In case of getting the aerosol fields from MOCAGE (still not in use).
    • LAEIFN: To activate Ice nuclei (mainly dust and hydrophobic organic matter and Black carbon).
    • LAERDRDEP: Activates the aerosol dry deposition. (FALSE by default).
    • LAECCN2CLDR: By default LAECCN2CLDR=FALSE, that is CDNC=CCN.
    • LAERSSEM: switch for sea salt emission (FALSE by default).
    diff --git a/dev/ForecastModel/OCDN2/index.html b/dev/ForecastModel/OCDN2/index.html index b833b1f578..81be6178c8 100644 --- a/dev/ForecastModel/OCDN2/index.html +++ b/dev/ForecastModel/OCDN2/index.html @@ -5,4 +5,4 @@ gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash});

    Documentation of OCND2 modification of ICE3/ICE4 microphysics in AROME

    Introduction

    This option was implemented in ICE3/ICE4 microphysics in 2014 in order to improve the performance of the HARMONIE-AROME model configuration in Winter over the Arctic/Subarctic region. Errors corrected were mainly for missing low clouds in moderately cold conditions, an excess of ice clouds in the case of severe cold weather and an excess of cirrus clouds.

    Implementation in CY46 - switching on the parameterisation

    To use the parameterisation go to nam/harmonie_namelists.pm and set LOCND2 = .TRUE. in the namparar namelist.

    About the Code

    In CY46, there are two coding versions of ICE3/ICE4 - rain_ice_old.F90 and rain_ice.F90. The variable CMICRO determines which is used, OLD3 for rain_ice_old.F90 and ICE3 for rain_ice.F90. The structure of the code differs between these versions, and since the time stepping procedure is different, the result differs too. But the content of the modification for OCND2 is the same in both versions. The modifications can be be found by searching for

    IF(OCND2) THEN \
         --- new code ---\
    -ENDIF 

    The main OCDN2 modifications are

    1. Tuning factors for reducing the rate of deposition/evaporation of snow and graupel. See code block “1.2 COMPUTE SOME CONSTANT PARAMETERS” in rain_ice_old.F90 or in ice4_slow.F90. The tuning factors are then used later in rain_ice_old.F90, see code block “3.4.3 compute the deposition on rs: RVDEPS" for snow and in “3.4.6 compute the deposition on rg: RVDEPG” for graupel. In the rain_ice.F90 framework it is all done in the routine ice4_slow.F90. More information about the tuning parameters is included later in this documentation.
    2. Mask to limit computation: Set by tuning parameters in the code block “1.2 COMPUTE SOME CONSTANT PARAMETERS“ in rain_ice_old.F90 or in aro_rain_ice.F90 within the rain_ice.F90 framework. For OCND2=FALSE the limits are hard-coded.
    3. The cloud ice crystal concentration: Modified with OCND2, see code block “3.1.1 compute the cloud ice concentration” in rain_ice_old or ice4_nucleation.F90 within the rain_ice.F90 framework.
    4. Turn large cloud ice crystals into snow: See code block “3.4.5 B:” in rain_ice_old.F90 or ice4_fast_si.F90 within the rain_ice.F90 framework.
    5. Omit collision between snow and graupel since the effect in nature is very small and thus better to omit and speed up the computation a little. See code block “6.2.5” in rain_ice_old.F90 or ice4_fast_rg.F90 respectively
    6. Sub grid-scale calculation of deposition/evaporation of cloud ice. See code block “7.2 Bergeron-Findeisen effect: RCBERI” in rain_ice_old.F90 or ice4_fast_si.F90 for the rain_ice.F90 set up.

    There is also an important difference in condensation.F90: With OCND2, only liquid cloud water is handled within the statistical cloud scheme, not both ice and water as is the case with OCND2=F. With OCND2=F, the total cloud cover is calculated directly from the statistical cloud scheme. With OCND2=T, the total cloud cover is calculated as a sum of a liquid part, which is basically just the cloud cover from the statistical cloud scheme and an ice part which is based on the relative humidity with respect to ice and on the content of solid water species.

    There are two new routines for OCND2:

    1. icecloud.F90 is used for the sub grid-scale handling of relative humidity with respect to ice and thus for ice clouds. It is called from condensation.F90.
    2. ice4_fast_si.F90 is only used by the newer rain_ice.F90 routine. As already mentioned, it deals with deposition/evaporation of cloud ice.

    Tuning parameters

    The tuning parameters used specifically for OCND2 can be divided into three categories:

    Only having an effect if OCND2 is set to TRUE and used for SPP (April 2023).

    VariableDescription
    RFMIN(21)Tuning factor for ice clouds, such as cirrus. A larger value means a larger effect of the presence of solid water and thus more ice clouds. (The value is somewhat dependent on what kind of measurement one compares with, and how thin a cirrus cloud should be to be counted as a cloud. A range of 0.5 to 3 should be enough.)

    Only having effect if OCND2 is set to TRUE but currently (April 2023) not used in SPP.

    VariableDescription
    RFMIN(12)Threshold supersaturation with respect to ice in the supersaturated part of the grid-box for treatment in the microphysics computation. A larger value gives more supersaturation and a somewhat faster computation. Values that are too large are physically unrealistic, but there seems to be no consensus about the best value.
    RFMIN(13)Threshold mixing ratio for different non-vapor water species treated in the microphysics computation. Larger values result in faster computation, but possible important processes, when only small mixing ratios of water species are present, may be missed.
    RFMIN(15)Ice crystal diameter(m) for conversion from cloud ice to snow. Larger values lead to more ice and less snow.
    RFMIN(27)Experimental! Minimum temperature (K) used for Meyers ice number concentration. Larger values give less ice for temperatures below RFRMIN(27).
    RFMIN(39)Speed factor for deposition/evaporation rate of graupel. Larger values give faster deposition /evaporation.
    RFMIN(40)Speed factor for deposition/evaporation rate of snow. Larger values give faster deposition /evaporation.

    Have an effect even when OCND2 is not used, but designed for OCND2

    VariableDescription
    RFRMIN(1),RFRMIN(2),RFRMIN(3) and RFRMIN(4)Different thresholds for snow, ice, graupel and graupel again, respectively, leading to conversion of super-cooled rain into graupel. A higher value gives more super-cooled rain, but may be less physically realistic.
    RFRMIN(7)Tuning factor for the collisions between rain and snow. Higher values give less super-cooled rain and more snow. Zero means that those collisions are disregarded (probably OK).

    Full list of RFRMIN variables (included here for completeness, not all OCDN2-related)

    VariableValueDescription
    RFMIN(1)1.0E−5Higher value means more supercooled rain and somewhat less graupel.
    RFMIN(2)1.0E−8""
    RFMIN(3)3.0E-7""
    RFMIN(4)3.0E-7""
    RFMIN(5)1.0E-7Higher value means less graupel and more snow. Experimental.
    RFMIN(6)0.15Higher value means more graupel and less snow. Experimental.
    RFMIN(7)0.Higher value means less supercooled rain and somewhat more snow.
    RFMIN(8)1.> 1. Increase melt of graupel, < 1 decrease it. Experimental.
    RFMIN(9)1.> 1 means increase IN-concentration and <1 decrease.
    RFMIN(10)10.>10 means faster Kogan autoconversion <10 slower, only active for LKOGAN=T. This originates from the fact that the formula was based on an LES model with a higher horizontal resolution. It is easy to show that with a coarser resolution and an inhomogeneous cloud liquid field one has to add a compensating factor in order to retain the original mean autoconversion. Tests shows that a lower value e.g 3 would be better, and more in line with what ECMWF is using. The value 10 is, to some extent, a way of decreasing fog, but now we have a lot of other ways to reduce fog.
    RFMIN(11)1.Setting e.g. 0.01 means that subgrid-scale fraction of cloud water is used. Minimum cloud fraction=0.01. Only active for LKOGAN=T.
    RFMIN(12)0.The level of supersaturation in the ice-supersaturated part of grid-box needed to be treated in ice microphysics. (Greg Thompson recommend a higher value 0.05-0.25, in MetCoOp 0.05 is used) Higher value means faster computations, but also that any ice deposition in clear sky is neglected for ice-supersaturated between zero and RFRMIN(12). Only used with OCND2.
    RFMIN(13)1.0E-15The mixing-ratio of any water species needed to be treated in ice microphysics. The value 1.0E-15 is taken from old Hirlam. Only used with OCND2.
    RFMIN(14)120.Time scale for conversion of large ice crystals to snow. Only used with LMODICEDEP (Experimental).
    RFMIN(15)1.0E-4Diameter for conversion ice crystals into snow. Larger value gives more ice and less snow.
    RFMIN(16)0.“C” parameter for size distribution of snow. (constant for number concentration, N=Cλ^x) Only active if non-zero. Experimental
    RFMIN(17)0.“x” parameter for size distribution of snow. (slope for number concentration, N=Cλ^x) Only active if RFRMIN(16) is non-zero. Experimental.
    RFMIN(18)0.With RFRMIN(18)=1, snow and graupel melt are based on wet bulb temperature, instead of temperature and leads to slower melting. Experimental.
    RFMIN(19)0.Threshold cloud thickness for StCu/Cu transition [m] Only active for EDMF scheme and if non-zero, but very small effect.
    RFMIN(20)0.Threshold cloud thickness used in shallow/deep decision [m]. Only active for EDMF scheme and if non-zero, higher value gives more shallow convection and less deep model resolved convection.
    RFMIN(21)1.Tuning parameter for ice clouds. Larger value gives more cirrus and other ice clouds.
    RFMIN(22)1.Tuning parameter for CDNC at lowest model level . Lower value give lower CDNC. RFRMIN(22)=0.5 means CDNC= old CDNC x 0.5.
    RFMIN(23)0.5Tuning parameter only active with LHGT_QS. The lower limit for reduction of VSIGQSAT.
    RFMIN(24)1.5Tuning parameter only active with LHGT_QS. The upper limit for increase of VSIGQSAT.
    RFMIN(25)30.Tuning parameter only active with LHGT_QS. The level thickness for which VSIGQSAT is unchanged with LHGT_QS.
    RFMIN(26)0.If > 0.01, it replaces default CDNC everywhere. So RFRMIN(26)=50E6 (Beware of that it is in m-3!) gives CDNC = 50 cm-3 at reference level (1000 hPa) and RFRMIN(26) x pressure/ ref-pressure elsewhere.
    RFMIN(27)0.Minimum assumed temperature with respect to Meyers IN - concentration (K). Gives less IN concentration for temperatures below the value set. Experimental!
    RFMIN(28)0.Currently not used.
    RFMIN(29)0.If >0. and RFRMIN(22)>0 it gives the upper limit in metres for which the reduction of CDNC has an effect. A linear decrease from the lowest level to RFRMIN(29) meters is assumed.
    RFMIN(30)1.If not unity, CDNC is reduced/increased over sea with a factor RFRMIN(30) for the lowest model level and linearly reaching "no change" at RFRMIN(29) m height. If RFRMIN(29) is unset, RFRMIN(30) only affects the lowest model level.
    RFRMIN(31:38)0.Currently not used.
    RFRMIN(39)0.25.Reduction factor for deposition/evaporation of graupel. Only used when OCND2=T and LMODICEDEP=F.
    RFRMIN(40)0.15Reduction factor for deposition/evaporation of snow. Only used when OCND2=T and LMODICEDEP=F.
    +ENDIF

    The main OCDN2 modifications are

    1. Tuning factors for reducing the rate of deposition/evaporation of snow and graupel. See code block “1.2 COMPUTE SOME CONSTANT PARAMETERS” in rain_ice_old.F90 or in ice4_slow.F90. The tuning factors are then used later in rain_ice_old.F90, see code block “3.4.3 compute the deposition on rs: RVDEPS" for snow and in “3.4.6 compute the deposition on rg: RVDEPG” for graupel. In the rain_ice.F90 framework it is all done in the routine ice4_slow.F90. More information about the tuning parameters is included later in this documentation.
    2. Mask to limit computation: Set by tuning parameters in the code block “1.2 COMPUTE SOME CONSTANT PARAMETERS“ in rain_ice_old.F90 or in aro_rain_ice.F90 within the rain_ice.F90 framework. For OCND2=FALSE the limits are hard-coded.
    3. The cloud ice crystal concentration: Modified with OCND2, see code block “3.1.1 compute the cloud ice concentration” in rain_ice_old or ice4_nucleation.F90 within the rain_ice.F90 framework.
    4. Turn large cloud ice crystals into snow: See code block “3.4.5 B:” in rain_ice_old.F90 or ice4_fast_si.F90 within the rain_ice.F90 framework.
    5. Omit collision between snow and graupel since the effect in nature is very small and thus better to omit and speed up the computation a little. See code block “6.2.5” in rain_ice_old.F90 or ice4_fast_rg.F90 respectively
    6. Sub grid-scale calculation of deposition/evaporation of cloud ice. See code block “7.2 Bergeron-Findeisen effect: RCBERI” in rain_ice_old.F90 or ice4_fast_si.F90 for the rain_ice.F90 set up.

    There is also an important difference in condensation.F90: With OCND2, only liquid cloud water is handled within the statistical cloud scheme, not both ice and water as is the case with OCND2=F. With OCND2=F, the total cloud cover is calculated directly from the statistical cloud scheme. With OCND2=T, the total cloud cover is calculated as a sum of a liquid part, which is basically just the cloud cover from the statistical cloud scheme and an ice part which is based on the relative humidity with respect to ice and on the content of solid water species.

    There are two new routines for OCND2:

    1. icecloud.F90 is used for the sub grid-scale handling of relative humidity with respect to ice and thus for ice clouds. It is called from condensation.F90.
    2. ice4_fast_si.F90 is only used by the newer rain_ice.F90 routine. As already mentioned, it deals with deposition/evaporation of cloud ice.

    Tuning parameters

    The tuning parameters used specifically for OCND2 can be divided into three categories:

    Only having an effect if OCND2 is set to TRUE and used for SPP (April 2023).

    VariableDescription
    RFMIN(21)Tuning factor for ice clouds, such as cirrus. A larger value means a larger effect of the presence of solid water and thus more ice clouds. (The value is somewhat dependent on what kind of measurement one compares with, and how thin a cirrus cloud should be to be counted as a cloud. A range of 0.5 to 3 should be enough.)

    Only having effect if OCND2 is set to TRUE but currently (April 2023) not used in SPP.

    VariableDescription
    RFMIN(12)Threshold supersaturation with respect to ice in the supersaturated part of the grid-box for treatment in the microphysics computation. A larger value gives more supersaturation and a somewhat faster computation. Values that are too large are physically unrealistic, but there seems to be no consensus about the best value.
    RFMIN(13)Threshold mixing ratio for different non-vapor water species treated in the microphysics computation. Larger values result in faster computation, but possible important processes, when only small mixing ratios of water species are present, may be missed.
    RFMIN(15)Ice crystal diameter(m) for conversion from cloud ice to snow. Larger values lead to more ice and less snow.
    RFMIN(27)Experimental! Minimum temperature (K) used for Meyers ice number concentration. Larger values give less ice for temperatures below RFRMIN(27).
    RFMIN(39)Speed factor for deposition/evaporation rate of graupel. Larger values give faster deposition /evaporation.
    RFMIN(40)Speed factor for deposition/evaporation rate of snow. Larger values give faster deposition /evaporation.

    Have an effect even when OCND2 is not used, but designed for OCND2

    VariableDescription
    RFRMIN(1),RFRMIN(2),RFRMIN(3) and RFRMIN(4)Different thresholds for snow, ice, graupel and graupel again, respectively, leading to conversion of super-cooled rain into graupel. A higher value gives more super-cooled rain, but may be less physically realistic.
    RFRMIN(7)Tuning factor for the collisions between rain and snow. Higher values give less super-cooled rain and more snow. Zero means that those collisions are disregarded (probably OK).

    Full list of RFRMIN variables (included here for completeness, not all OCDN2-related)

    VariableValueDescription
    RFMIN(1)1.0E−5Higher value means more supercooled rain and somewhat less graupel.
    RFMIN(2)1.0E−8""
    RFMIN(3)3.0E-7""
    RFMIN(4)3.0E-7""
    RFMIN(5)1.0E-7Higher value means less graupel and more snow. Experimental.
    RFMIN(6)0.15Higher value means more graupel and less snow. Experimental.
    RFMIN(7)0.Higher value means less supercooled rain and somewhat more snow.
    RFMIN(8)1.> 1. Increase melt of graupel, < 1 decrease it. Experimental.
    RFMIN(9)1.> 1 means increase IN-concentration and <1 decrease.
    RFMIN(10)10.>10 means faster Kogan autoconversion <10 slower, only active for LKOGAN=T. This originates from the fact that the formula was based on an LES model with a higher horizontal resolution. It is easy to show that with a coarser resolution and an inhomogeneous cloud liquid field one has to add a compensating factor in order to retain the original mean autoconversion. Tests shows that a lower value e.g 3 would be better, and more in line with what ECMWF is using. The value 10 is, to some extent, a way of decreasing fog, but now we have a lot of other ways to reduce fog.
    RFMIN(11)1.Setting e.g. 0.01 means that subgrid-scale fraction of cloud water is used. Minimum cloud fraction=0.01. Only active for LKOGAN=T.
    RFMIN(12)0.The level of supersaturation in the ice-supersaturated part of grid-box needed to be treated in ice microphysics. (Greg Thompson recommend a higher value 0.05-0.25, in MetCoOp 0.05 is used) Higher value means faster computations, but also that any ice deposition in clear sky is neglected for ice-supersaturated between zero and RFRMIN(12). Only used with OCND2.
    RFMIN(13)1.0E-15The mixing-ratio of any water species needed to be treated in ice microphysics. The value 1.0E-15 is taken from old Hirlam. Only used with OCND2.
    RFMIN(14)120.Time scale for conversion of large ice crystals to snow. Only used with LMODICEDEP (Experimental).
    RFMIN(15)1.0E-4Diameter for conversion ice crystals into snow. Larger value gives more ice and less snow.
    RFMIN(16)0.“C” parameter for size distribution of snow. (constant for number concentration, N=Cλ^x) Only active if non-zero. Experimental
    RFMIN(17)0.“x” parameter for size distribution of snow. (slope for number concentration, N=Cλ^x) Only active if RFRMIN(16) is non-zero. Experimental.
    RFMIN(18)0.With RFRMIN(18)=1, snow and graupel melt are based on wet bulb temperature, instead of temperature and leads to slower melting. Experimental.
    RFMIN(19)0.Threshold cloud thickness for StCu/Cu transition [m] Only active for EDMF scheme and if non-zero, but very small effect.
    RFMIN(20)0.Threshold cloud thickness used in shallow/deep decision [m]. Only active for EDMF scheme and if non-zero, higher value gives more shallow convection and less deep model resolved convection.
    RFMIN(21)1.Tuning parameter for ice clouds. Larger value gives more cirrus and other ice clouds.
    RFMIN(22)1.Tuning parameter for CDNC at lowest model level . Lower value give lower CDNC. RFRMIN(22)=0.5 means CDNC= old CDNC x 0.5.
    RFMIN(23)0.5Tuning parameter only active with LHGT_QS. The lower limit for reduction of VSIGQSAT.
    RFMIN(24)1.5Tuning parameter only active with LHGT_QS. The upper limit for increase of VSIGQSAT.
    RFMIN(25)30.Tuning parameter only active with LHGT_QS. The level thickness for which VSIGQSAT is unchanged with LHGT_QS.
    RFMIN(26)0.If > 0.01, it replaces default CDNC everywhere. So RFRMIN(26)=50E6 (Beware of that it is in m-3!) gives CDNC = 50 cm-3 at reference level (1000 hPa) and RFRMIN(26) x pressure/ ref-pressure elsewhere.
    RFMIN(27)0.Minimum assumed temperature with respect to Meyers IN - concentration (K). Gives less IN concentration for temperatures below the value set. Experimental!
    RFMIN(28)0.Currently not used.
    RFMIN(29)0.If >0. and RFRMIN(22)>0 it gives the upper limit in metres for which the reduction of CDNC has an effect. A linear decrease from the lowest level to RFRMIN(29) meters is assumed.
    RFMIN(30)1.If not unity, CDNC is reduced/increased over sea with a factor RFRMIN(30) for the lowest model level and linearly reaching "no change" at RFRMIN(29) m height. If RFRMIN(29) is unset, RFRMIN(30) only affects the lowest model level.
    RFRMIN(31:38)0.Currently not used.
    RFRMIN(39)0.25.Reduction factor for deposition/evaporation of graupel. Only used when OCND2=T and LMODICEDEP=F.
    RFRMIN(40)0.15Reduction factor for deposition/evaporation of snow. Only used when OCND2=T and LMODICEDEP=F.
    diff --git a/dev/ForecastModel/Outputlist/index.html b/dev/ForecastModel/Outputlist/index.html index 7998b2efde..a43969fdc1 100644 --- a/dev/ForecastModel/Outputlist/index.html +++ b/dev/ForecastModel/Outputlist/index.html @@ -7,4 +7,4 @@ table { font-size: 11px; } -

    Parameter list and GRIB definitions

    HARMONIE system output

    The HARMONIE system writes its primary output, in FA format, to the upper air history files ICMSHHARM+llll and the SURFEX history files ICMSHHARM+llll.sfx, where HARM is the four-character experiment identifier set in the configuration file config_exp.h, and llll is normally the current timestep in hours. The files are designed to be complete snapshots of respective model state described by the system for a particular time point. In addition more model output including post-processing/diagnostic fields can be written out during the forecast model integration, such as those model diagnostics or pressure level diagnostics, also in FA format, as PFHARMDOMAIN+llll. The FA files can be considered to be internal format files. All of them can be converted to GRIB files during the run for external usage. The name convention is as follows:

    GRIB1 table 2 version in HARMONIE

    To avoid conflicts with archived HIRLAM data HARMONIE uses version 253 of table 2. The table is based on the standard WMO version 3 of table 2 and postion 000-127 is kept the same as in the WMO. Note that accumulated and instantaneous versions of the same parameter differ only by the time range indicator. It is thus not sufficient to specify parameter, type and level when you refer to an accumulated parameter, but the time range indicator has to be included as well.

    The translation of SURFEX files to GRIB1 is still incomplete and contains several WMO violations. This is not changed in the current release but will revised later. However, the upper air history file also includes the most common surface parameters and should be sufficient for most users.

    The current table 2 version 253 definition files for gribapi can be found in `util/glgrib_api/definitions/`. These local definition files assume centre=233 (Dublin) and should be copied to your own GRIB-API installation. You are strongly recommended to set your own code for generating centre fore operational usage of the data.

    GRIB2 in HARMONIE

    The possibility to convert to GRIB2 has been introduced in release-43h2. So far the conversion is restricted to atmospheric history and fullpos files only. To get the output in GRIB2 set ARCHIVE_FORMAT=GRIB2 in ecf/config_exp.h. Please notice that if ARCHIVE_FORMAT=GRIB2 is selected, SURFEX files will be converted to GRIB1 anyway (for the time being). To convert from GRIB1 with GRIB2 using grib_filter we have to tell EcCodes how to translate the parameters. This is done by using the internal HARMONIE tables and setting

    export ECCODES_DEFINITION_PATH=$SOME_PATH_TO_GL/gl/definitions:$SOME_PATH_TO_ECCODES/share/eccodes/definitions

    Note that there are a few parameters that are not translated to GRIB2 to and those has to be excluded explicitly.

    List of parameters

    header abbreviations in the tables:

    abbr.descriptionsee table
    lvTlevelTypelevel types
    iOPindicatorOfParameterindicator of parameter
    ddiscipline
    pCparameterCategory
    pNparameterNumber
    levlevel
    sTstepTypetime range indicator

    3D model state variables on model levels (1-NLEV), levelType=hybrid

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SNNNHUMI.SPECIFIqhushy510101inskg/kgSpecific humidity
    SNNNLIQUID_WATERcwat_condclwhy7601831inskg/kgSpecific cloud liquid water content
    SNNNSOLID_WATERciwc_condclihy5801841inskg/kgSpecific cloud ice water content
    SNNNSNOWsnow_cond#hy18401861inskg/kgSpecific snow water content
    SNNNRAINrain_cond#hy18101851inskg/kgSpecific rain water content
    SNNNGRAUPELgrpl_cond#hy20101321inskg/kgSpecific graupel
    SNNNTKEtketkehy200019111insJ/kgTurbulent Kinetic Energy
    SNNNCLOUD_FRACTItccclthy71061921ins0-1Total cloud cover
    SNNNPRESS.DEPARTpdep#hy2120381insPaPressure departure
    SNNNTEMPERATUREttahy110001insKTemperature
    SNNNVERTIC.DIVERvdiv#hy213021921inss-1Vertical Divergence
    SNNNWIND.U.PHYSuuahy330221insm/su-component of wind
    SNNNWIND.V.PHYSvvahy340231insm/sv-component of wind

    2D Surface, prognostic/diagnostic near-surface and soil variables, levelType=heightAboveGround

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SURFPRESSIONprespshag10300insPaSurface pressure
    SURFTEMPERATUREtts_radhag110000insKSurface temperature
    CLSTEMPERATUREttashag110002insKTemperature at 2m
    CLSMAXI.TEMPERATtmaxtasmaxhag150002maxKMaximum temperature (FREQ_RESET_TEMP)
    CLSMINI.TEMPERATtmintasminhag160002minKMinimum temperature (FREQ_RESET_TEMP)
    CLSVENT.ZONALuuashag3302210insm/su-component of wind at 10m, relative to model coordinates
    CLSVENT.MERIDIENvvashag3402310insm/sv-component of wind at 10m, relative to model coordinates
    CLSHUMI.SPECIFIQqhusshag510102inskg/kgSpecific humidity at 2m
    CLSHUMI.RELATIVErhurshag52011922ins0-1Relative humidity at 2m
    SURFRESERV.NEIGEsdwesnwhag6501600inskg/m2Snow depth water equivalent
    CLPMHAUT.MOD.XFUmldzmlahag6701930insmHeight (in meters) of the PBL out of the model
    SURFNEBUL.TOTALEtccclt_inshag71061920ins0-1Total cloud cover
    SURFNEBUL.CONVECcccclc_inshag72061930ins0-1Convective cloud cover
    SURFNEBUL.BASSElcccll_inshag73061940ins0-1Low cloud cover
    SURFNEBUL.MOYENNmccclm_inshag74061950ins0-1Medium cloud cover
    SURFNEBUL.HAUTEhccclh_inshag75061960ins0-1High cloud cover
    SURFRAYT.SOLAIREswavr#hag1160470insW/m2Instantaneous surface solar radiation (SW down global) Parameter identifier was 116, again is???
    SURFRAYT.TERRESTlwavr#hag1150540insW/m2Instantaneous longwave radiation flux
    SURFCAPE.MOD.XFUcapecapehag1600760insJ/kgModel output CAPE (not calculated by AROME physics)
    SURFDIAGHAILxhail#hag161012030ins0-1AROME hail diagnostic, LXXDIAGH = .TRUE.
    CLSU.RAF.MOD.XFUugstugshag162022310maxm/sU-momentum of gusts from the model. LXXGST = .TRUE. in NAMXFU. gives gust between current and previous output time step (FREQ_RESET_GUST)
    CLSV.RAF.MOD.XFUvgstvgshag163022410maxm/sV-momentum of gusts from the model. LXXGST = .TRUE. in NAMXFU. gives gust between current and previous output time step (FREQ_RESET_GUST)
    SURFINSPLUIErain#hag18101650inskg/m2Instantaneous rain
    SURFINSNEIGEsnow#hag18401530inskg/m2Instantaneous snow
    SURFINSGRAUPELgrpl#hag20101750inskg/m2Instantaneous graupel
    CLSMINI.HUMI.RELrmn2m#hag2410112min0-1Minimum relative moisture at 2m over 3h
    CLSMAXI.HUMI.RELrmx2m#hag2420112max0-1Maximum relative moisture at 2m over 3h
    CLSRAFALES.POSfgwsgsmaxhag228022210maxm/sGust wind speed

    2D Surface, accumulated near-surface and soil variables

    Note that all these are coded with stepType=accum

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    S065RAYT SOL CLcssw#hy130041165accJ/m2SW net clear sky rad
    S065RAYT THER CLcslw#hy13105665accJ/m2LW net clear sky rad
    SURFACCGRAUPELgrplprgrplhag20101750acckg/m2Accumulated graupel
    SURFACCNEIGEsnowprsnhag18401530acckg/m2Accumulated snowfall
    SURFACCPLUIErainprrainhag18101650acckg/m2Accumulated rain
    SURFDIR NORM IRRdneridshag1403630accJ/m2Direct normal exposure
    SURFFLU.CHA.SENSshfhfsshag12200110accJ/m2Sensible heat flux
    SURFFLU.LAT.MEVAlhehfls_evahag132011930accJ/m2Latent heat flux through evaporation
    SURFFLU.LAT.MSUBlhsubhfls_sblhag244012020accJ/kgLatent Heat Sublimation
    SURFFLU.MEVAP.EAwevapevspsblhag2450160acckg/m2Water evaporation
    SURFFLU.MSUBL.NEsnsubsbl_snowhag24601620acckg/m2Snow sublimation
    SURFFLU.RAY.SOLAnswrsrsnshag1110490accJ/m2Net shortwave radiation flux (surface)
    SURFFLU.RAY.THERnlwrsrlnshag1120550accJ/m2Net longwave radiation flux (surface)
    SURFRAYT DIR SURswavrrsdsdirhag1160470accJ/m2Shortwave radiation flux
    SURFRAYT SOLA DEgradrsdshag1170430accJ/m2Global radiation flux
    SURFRAYT THER DElwavrrldshag1150540accJ/m2Longwave radiation flux
    SURFTENS.TURB.MEvflxtauvhag125021990accN/m2Momentum flux, v-component
    SURFTENS.TURB.ZOuflxtauuhag124021980accN/m2Momentum flux, u-component

    2D TOA, diagnostic and accumulated variables, levelType=nominalTop

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SOMMFLU.RAY.SOLAnswrtrsntnt1130490accJ/m2Net shortwave radiation flux(atmosph.top)
    SOMMFLU.RAY.THERnlwrtrlntnt1140550accJ/m2Net longwave radiation flux(atmosph.top)
    SOMMRAYT.SOLAIREnswrt#nt1130490insW/m2Net shortwave radiation flux(atmosph.top)
    SOMMRAYT.TERRESTnlwrt#nt1140550insW/m2Net longwave radiation flux(atmosph.top)
    TOPRAYT DIR SOMswavrrsdtnt1160470accJ/m2TOA Accumulated SW down radiation Parameter identifier was 117
    SOMMTBOZCLEARbtozcs#nt170-1-1-10-KBrightness temperature OZ clear
    SOMMTBOZCLOUDbtozcl#nt171-1-1-10-KBrightness temperature OZ cloud
    SOMMTBIRCLEARbtircs#nt172-1-1-10-KBrightness temperature IR clear
    SOMMTBIRCLOUDbtircl#nt173-1-1-10-KBrightness temperature IR cloud
    SOMMTBWVCLEARbtwvcs#nt174-1-1-10-KBrightness temperature WV clear
    SOMMTBWVCLOUDbtwvcl#nt175-1-1-10-KBrightness temperature WV cloud

    2D Surface, Postprocessed variables (fullpos)

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SURFCAPE.POS.F00capecapehag1600760insJ/kgConvective available potential energy (CAPE)
    SURFCIEN.POS.F00cincinhag1650770insJ/kgConvective inhibition (CIN)
    SURFLIFTCONDLEVlcl#ac1670360insmLifting condensation level (LCL)
    SURFFREECONVLEVlfc#lfc1680360insmLevel of free convection (LFC)
    SURFEQUILIBRLEVlnb#lnb1690360insmLevel of neutral buoyancy (LNB)

    2D Surface, constant near-surface and soil variables

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SPECSURFGEOPOTENzphis_shag60340insm2/s2Geopotential relative to mean sea level. "... contains a GRID POINT orography which is the interpolation of the departure orography"
    SURFIND.TERREMERlsmlsmhag812000ins0-1Land-sea mask
    SURFAEROS.SEAaers#hag2510131920inskg/kgSurface aerosol sea (Marine aerosols, locally defined GRIB)
    SURFAEROS.LANDaerl#hag2520131930inskg/kgSurface aerosol land (Continental aerosols, locally defined GRIB)
    SURFAEROS.SOOTaerc#hag2530131940inskg/kgSurface carbon aerosol (Carbone aerosols, locally defined GRIB)
    SURFAEROS.DESERTaerd#hag2540131950inskg/kgSurface aerosol desert (Desert aerosols, locally defined GRIB)
    SURFAEROS.VOLCAN##hag197-1-1-1-1Surface aerosol volcan (Stratospheric ash, to be locally defined GRIB)
    SURFAEROS.SULFAT##hag198-1-1-1-1Surface aerosol sulfate (Stratospheric sulfate, to be locally defined GRIB)
    SURFA.OF.OZONEao#hag2480141920inskg/kgA Ozone, First ozone profile (A), locally defined GRIB
    SURFB.OF.OZONEbo#hag2490141930inskg/kgB Ozone, Second ozone profile (B), locally defined GRIB
    SURFC.OF.OZONEco#hag2500141940inskg/kgC Ozone, Third ozone profile (C), locally defined GRIB
    PROFTEMPERATUREslt#dbl8523180insKSoil Temperature
    PROFRESERV.EAUsm#dbl8623200inskg/m2Deep Soil Wetness
    PROFPROP.RMAX.EAswv#dbl23823250inskg/m2Climate relaxed deep soil wetness
    PROFRESERV.GLACEwsoice#dbl19323220inskg/m2Deep soil ice

    2D variables on special surfaces

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    KT273ISOT_ALTITh#isot803627315insmAltitude of 0-degree isotherm
    KT263ISOT_ALTITh#isot803626315insmAltitude of -10-degree isotherm
    SURFISOTPW0.MALTh#isot0wb80360insmAltitude of iso-tprimw=0
    SURFTOT.WAT.VAPOwvintprwea5401640inskg/m2Total column integral water vapour
    WFPOWERINSwfpower_inswfpower_insea21102390insMWWind power production, instantaneous (LWINDFARM=.TRUE. in NAMPHY)
    WFPOWERACCwfpower_accwfpower_accea21102390accMJWind power production, accumulated (LWINDFARM=.TRUE. in NAMPHY)

    Postprocessed variables on different surface types

    Through the postprocessing sofware fullpos HARMONIE offers a number of variables postprocessed on different surface types. For the current choice of variables, surfaces and levels please see scr/Select_postp.pl.

    State variables and diagnostics on pressure levels, leveltype=isobaricInhPa

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    PNNNNNWIND.U.PHYuuapl33022NNNNNinsm/su-component of wind
    PNNNNNWIND.V.PHYvvapl34023NNNNNinsm/sv-component of wind
    PNNNNNTEMPERATURttapl11000NNNNNinsKTemperature
    PNNNNNHUMI.SPECIqhuspl51010NNNNNinskg/kgSpecific humidity
    PNNNNNLIQUID_WATcwat_condclwpl760183NNNNNinskg/kgSpecific cloud liquid water content
    PNNNNNSOLID_WATEciwc_condclipl580184NNNNNinskg/kgSpecific cloud ice water content
    PNNNNNCLOUD_FRACtcc#pl7106192NNNNNins0-1Total cloud cover
    PNNNNNSNOWsnow_cond#pl1840186NNNNNinskg/kgSpecific snow water content
    PNNNNNRAINrain_cond#pl1810185NNNNNinskg/kgSpecific rain water content
    PNNNNNGRAUPELgrpl_cond#pl2010132NNNNNinskg/kgSpecific graupel
    PNNNNNGEOPOTENTIzphipl6034NNNNNinsm2/s2Geopotential
    PNNNNNHUMI_RELATrhurpl5201192NNNNNins0-1Relative humidity
    PNNNNNTHETA_PRIMpaptthetaEpl14003NNNNNinsKPseudo-adiabatic potential temperature
    PNNNNNTHETA_VIRTvptmp#pl1760015NNNNNinsKVirtual potential temperature
    PNNNNNVERT.VELOCwwapl40029NNNNNinsm/sGeometrical vertical velocity
    PNNNNNPOT_VORTICpvpvpl40214NNNNNinsK m2/kg/sPotential vorticity
    PNNNNNABS_VORTICabsv#pl410210NNNNNinss-1Absolute vorticity
    PNNNNNDIVERGENCEd#pl440213NNNNNinss-1Relative divergence

    State variables and diagnostics on height levels, levelType=heightAboveGround

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    HNNNNNWIND.U.PHYuuahag33022NNNNNinsm/su-component of wind
    HNNNNNWIND.V.PHYvvahag34023NNNNNinsm/sv-component of wind
    HNNNNNTEMPERATURttahag11000NNNNNinsKTemperature
    HNNNNNLIQUID_WATcwat_condclwhag760183NNNNNinskg/kgSpecific cloud liquid water content
    HNNNNNSOLID_WATEciwc_condclihag580184NNNNNinskg/kgSpecific cloud ice water content
    HNNNNNCLOUD_FRACtccclthag7106192NNNNNins0-1Total cloud cover
    HNNNNNSNOWsnow_cond#hag1840186NNNNNinskg/kgSpecific snow water content
    HNNNNNRAINrain_cond#hag1810185NNNNNinskg/kgSpecific rain water content
    HNNNNNGRAUPELgrpl_cond#hag2010132NNNNNinskg/kgSpecific graupel
    HNNNNNHUMI_RELATrhurhag5201192NNNNNins0-1Relative humidity
    HNNNNNPRESSUREpresphag1030NNNNNinsPaPressure

    State variables and diagnostics on PV levels, GRIB1 level type 117, levelType=potentialVorticity

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    VNNNGEOPOTENTIELz#pv6034NNNinsm2/s2Geopotential
    VNNNTEMPERATUREt#pv11000NNNinsKTemperature
    VNNNPRESSUREpres#pv1030NNNinsPaPressure
    VNNNHUMI_RELATIVr#pv5201192NNNins0-1Relative humidity
    VNNNHUMI.SPECIFIq#pv51010NNNinskg/kgSpecific humidity
    VNNNWIND.U.PHYSu#pv33022NNNinsm/su-component of wind
    VNNNWIND.V.PHYSv#pv34023NNNinsm/sv-component of wind
    VNNNVITESSE_VERTomega#pv39028NNNinsPa/sPressure vertical velocity (DYNAMICS=h)
    VNNNVERT.VELOCITw#pv40029NNNinsm/sGeometrical vertical velocity (DYNAMICS=nh)
    VNNNTEMPE_POTENTpt#pv13002NNNinsKPotential temperature
    VNNNABS_VORTICITabsv#pv410210NNNinss-1Absolute vorticity
    VNNNDIVERGENCEd#pv440213NNNinss-1Relative divergence
    VNNNTHETAPRIMWpapt#pv14003NNNinsKPseudo-adiabatic potential temperature

    State variables and diagnostics on Theta levels, GRIB1 level type 113, levelType=theta

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    TNNNGEOPOTENTIELz#th6034NNNinsm2/s2Geopotential
    TNNNTEMPERATUREt#th11000NNNinsKTemperature
    TNNNPRESSUREpres#th1030NNNinsPaPressure
    TNNNHUMI_RELATIVr#th5201192NNNins0-1Relative humidity
    TNNNHUMI.SPECIFIq#th51010NNNinskg/kgSpecific humidity
    TNNNWIND.U.PHYSu#th33022NNNinsm/su-component of wind
    TNNNWIND.V.PHYSv#th34023NNNinsm/sv-component of wind
    TNNNVITESSE_VERTomega#th39028NNNinsPa/sPressure vertical velocity (DYNAMICS=h)
    TNNNVERT.VELOCITw#th40029NNNinsm/sGeometrical vertical velocity (DYNAMICS=nh)
    TNNNABS_VORTICITabsv#th410210NNNinss-1Absolute vorticity
    TNNNPOT_VORTICITpv#th40214NNNinsK m2/kg/sPotential vorticity
    TNNNDIVERGENCEd#th440213NNNinss-1Relative divergence

    FA fields without any default GRIB1 translation

    Some very special fields are left without any default translation. Please see in the gl documentation on how to add you own translation.

    FA nameUnitComment
    CUF1PRESSURECoupling error field.
    THETAPWP_FLUXK m-4 s-1Instantaneous thetaprimwprim surface flux
    CLPMOCON.MOD.XFUkg kg-1 s-1MOCON model output
    ATMONEBUL.TOTALEAccumulated Total cloud cover.
    ATMONEBUL.CONVECAccumulated Convective cloud cover.
    ATMONEBUL.BASSEAccumulated Low cloud cover.
    ATMONEBUL.MOYENNAccumulated Medium cloud cover.
    ATMONEBUL.HAUTEAccumulated High cloud cover.
    SURFCFU.Q.TURBULAccumulated contribution of Turbulence to Q.
    SURFCFU.CT.TURBULAccumulated contribution of Turbulence to CpT
    SUNSHI. DURATIONSunshine duration.
    SURFFL.U TURBULContribution of Turbulence to U.
    SURFFL.V TURBULContribution of Turbulence to V.
    SURFFL.Q TURBULContribution of Turbulence to Q.
    SURFFL.CT TURBULContribution of Turbulence to CpT
    SNNNSRCSecond order flux.

    Variables postprocessed by gl

    The following fields are can be generated by gl from a history file and are thus not necessarily available as FA fields in Harmonie's FA output. When calculating these post-processed fields, make sure the required fields to derive them are in the input files! For details, check util/gl/grb/postprocess.f90 & the routines called therein.

    Single level fields

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    MSLPRESSUREprespslhas10300insPaMSLP. gl calculates MSLP independent of AROME/!FullPos
    #tdtdhag170062insKDew point temperature
    #vis#hag2001900insmVisibility
    #wdir#ttt31020lllinsDeg. trueWind direction. gl calculates based on u[33,ttt,lll] and v[34,ttt,lll] wind components
    #ws#ttt32021lllinsm/sWind speed. gl calculates based on u[33,ttt,lll] and v[34,ttt,lll] wind components
    TOT.WATER.PRECIPtpprhag610180acckg/m2Total precipitation, gl calculates TP![61,105,0]=rain![181,105,0]+snow![184,105,0]+graupel![201,105,0]+hail![204,105,0]
    TOT.SOLID.PRECIPtpsolidprsolidhag185012000acckg/m2Total solid precipitation, gl calculates ![185,105,0]=snow![184,105,0]+graupel![201,105,0]+hail![204,105,0]
    #mldzmlahag6701930insmMixed layer depth/boundary layer height
    #tcc#hag71061922ins0-1Fog, cloud fraction of lowest model level
    #icei#hag1350ins-Icing index
    #atmiceg#hy??01205insm/sIcing index, Atmospheric ice growth rate
    #icei2#hag/?134011940ins-Icing index version 2
    #psct#hag/ct?1360400insKPseudo satellite image, cloud top temperature (infrared)
    #pstb#hag137041980insKPseudo satellite image, water vapour brightness temperature
    #pstbc#hag138041990insKPseudo satellite image, water vapour br. temp. + correction for clouds
    #pscw#hag139042000ins-Pseudo satellite image, cloud water reflectivity (visible)
    #prtp#hag14401190inscodePrecipitation type, 0:drizzle, 1:rain, 2:sleet, 3:snow, 4:freezing drizzle, 5:freezing rain, 6:graupel, 7:hail
    #fg#ttt2280222lllmaxm/sGust wind speed, calculated from ugst & vgst on corresponding level & levelType
    #hti#hag1480171930ins-Helicopter Triggered lightning Index
    #transmit#hag149061990ins-Transmittance
    #cat#hag145019220ins-|%CAT (clear air turbulence) index
    #bvf#hag1590192020inss-1Brunt Vaisala frequency

    Integrated quantities

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    TOT.NEB.ICEciwc_vintcliviea5801700inskg/m2Vertical integral of cloud ice
    TOT.NEB.WATcwat_vintclqviea7601690inskg/m2Vertical integral of cloud liquid water
    #msca#ea133061970ins0-1Mask of significant cloud amount
    #cape#hag1600760insJ/kgConvective Available Potential Energy, comes in two flavours, cape_version=1|2, where the second is compatible with the ECMWF version
    #cin#hag1650770insJ/kgConvective inhibition, , comes in two flavours, cape_version=1|2, where the second is compatible with the ECMWF version
    #rain_vintclrviea18101450inskg/m2Vertical integral of rain
    #snow_vintclsviea18401460inskg/m2Vertical integral of snow
    #grpl_vintclgviea20101740inskg/m2Vertical integral of garupel
    #cb#ea18606110insmCloud base
    #ct#ea18706120insmCloud top
    #cb38#hag?189061983insmCloud base >3/8
    #lgt#ea2090171920insflash/hLightning intensity
    #lmxws#ea/mw?1420360insmLevel of max wind speed
    #maxucol#ea1640220insm/sMax u-component in column
    #maxvcol#ea1770230insm/sMax v-component in column
    #lmxice#ea143011990insmIcing index, Level of max icing
    #mxicegr#ea141012040ins1Icing index, Max icing growth index (0-4)
    #blice#ea14601960insmIcing index, Bottom level of icing
    #tlice#ea14701950insmIcing index, Top level of icing
    #cat_maxlev#ea1500192010insmLevel of max CAT index
    #cat_max#ea1510191970ins-Max CAT index
    #cat_b#ea1520191980insmBottom level of CAT
    #cat_t#ea1530191990insmTop level of CAT

    GRIB encoding information

    Time units, WMO code table 4

    The following time units are used to encode GRIB edition 1 data

    CodeUnit
    0Minute
    1Hour
    1315 minutes
    1430 minutes

    Time range indicator, WMO code TABLE 5

    CodeabbrDefinition
    0insForecast product valid for reference time + P1 (P1 > 0), or Uninitialized analysis product for reference time (P1 = 0)
    2min/maxProduct with a valid time ranging between reference time + P1 and reference time + P2. Used for min/max values
    3avgAverage (reference time + P1 to reference time + P2)
    4accAccumulation (reference time + P1 to reference time + P2) product considered valid at reference time + P2

    Note that fields available as both instanteous and accumulated values like e.g. rain has the same parameter values and can only be distinguished by the time range indicator.

    Level types, WMO Code table 3

    level typenameabbrWMO/HIRLAM type definitionUnitsnotes
    001surfacesfcGround or water surfaceWMO
    002cloudBasecbCloud base levelWMO
    003cloudTopctLevel of cloud topsWMO
    004isothermZeroisot0Level of 0°C isothermWMO
    005adiabaticCondensationacLevel of Adiabatic Condensation Lifted from the SurfaceWMO
    006maxWindmwMaximum wind levelWMO
    007tropopausetpTropopauseWMO
    008nominalTopntTop-of-atmosphereWMO
    020isothermalisotIsothermal levelTemperature in 1/100 KWMO
    100isobaricInhPaplIsobaric levelhPaWMO
    102meanSeamsAt mean sea level
    103heightAboveSeahasSpecified altitude above mean sea levelAltitude in mWMO
    105heightAboveGroundhagSpecified height above groundAltitude in mWMO
    107sigmaSigma levelSigma value in 1/10000WMO
    109hybridhyHybrid levelWMO
    112depthBelowLandLayerdbl
    113thetathIsentropic (theta) levelPotential temperature in KWMO
    117potentialVorticitypvPotential vorticity surface10-9 K m2 kg-1 s-1WMO
    192isothermZeroWetBulbisot0wb
    200entireAtmosphereeaEntire atmosphere (considered as a single layer)WMO, vertically integrated
    levelFreeConvectionlfcas heightAboveGround in GRIB1
    levelNeutralBuoyancylnbas heightAboveGround in GRIB1

    Harmonie GRIB1 code table 2 version 253 - Indicator of parameter

    Below the indicator of parameter code table for the Harmonie model. It is based on the WMO code table 2 version 3 with local parameters added. Parameter indicators 128-254 are reserved for originating center use. Parameter indicators 000-127 should not be altered. In HARMONIE, radiation fluxes are assumed positive downwards (against the recommendation by WMO).

    ParDescriptionSI Units
    000Reservedn/a
    001PressurePa
    002Pressure reduced to MSLPa
    003Pressure tendencyPa s-1
    004Potential vorticityK m2 kg-1 s-1
    005ICAO Standard Atmosphere reference heightm
    006Geopotentialm2 s-2
    007Geopotential heightgpm
    008Geometrical heightm
    009Standard deviation of heightm
    010Total ozoneDobson
    011TemperatureK
    012Virtual temperatureK
    013Potential temperatureK
    014Pseudo-adiabatic potential temperatureK
    015Maximum temperatureK
    016Minimum temperatureK
    017Dew-point temperatureK
    018Dew-point depression (or deficit)K
    019Lapse rateK m-1
    020Visibilitym
    021Radar spectra (1)-
    022Radar spectra (2)-
    023Radar spectra (3)-
    024Parcel lifted index (to 500 hPa)K
    025Temperature anomalyK
    026Pressure anomalyPa
    027Geopotential height anomalygpm
    028Wave spectra (1)-
    029Wave spectra (2)-
    030Wave spectra (3)-
    031Wind directionDegree true
    032Wind speedm s-1
    033u-component of windm s-1
    034v-component of windm s-1
    035Stream functionm2 s-1
    036Velocity potentialm2 s-1
    037Montgomery stream functionm2 s-1
    038Sigma coordinate vertical velocitys-1
    039Vertical velocityPa s-1
    040Vertical velocitym s-1
    041Absolute vorticitys-1
    042Absolute divergences-1
    043Relative vorticitys-1
    044Relative divergences-1
    045Vertical u-component shears-1
    046Vertical v-component shears-1
    047Direction of currentDegree true
    048Speed of currentm s-1
    049u-component of currentm s-1
    050v-component of currentm s-1
    051Specific humiditykg kg-1
    052Relative humidity%
    053Humidity mixing ratiokg kg-1
    054Precipitable waterkg m-2
    055Vapor pressurePa
    056Saturation deficitPa
    057Evaporationkg m-2
    058Cloud icekg m-2
    059Precipitation ratekg m-2 s-1
    060Thunderstorm probability%
    061Total precipitationkg m-2
    062Large scale precipitationkg m-2
    063Convective precipitationkg m-2
    064Snowfall rate water equivalentkg m-2 s-1
    065Water equivalent of accumulated snow depthkg m-2
    066Snow depthm
    067Mixed layer depthm
    068Transient thermocline depthm
    069Main thermocline depthm
    070Main thermocline anomalym
    071Total cloud cover%
    072Convective cloud cover%
    073Low cloud cover%
    074Medium cloud cover%
    075High cloud cover%
    076Cloud waterkg m-2
    077Best lifted index (to 500 hPa)K
    078Convective snowkg m-2
    079Large scale snowkg m-2
    080Water temperatureK
    081Land cover (1 = land, 0 = sea)Proportion
    082Deviation of sea level from meanm
    083Surface roughnessm
    084Albedo%
    085Soil temperatureK
    086Soil moisture contentkg m-2
    087Vegetation%
    088Salinitykg kg-1
    089Densitykg m-3
    090Water run-offkg m-2
    091Ice cover (1 = ice, 0 = no ice)Proportion
    092Ice thicknessm
    093Direction of ice driftDegree true
    094Speed of ice driftm s-1
    095u-component of ice driftm s-1
    096v-component of ice driftm s-1
    097Ice growth ratem s-1
    098Ice divergences-1
    099Snow meltkg m-2
    100Significant height of combined wind waves and swellm
    101Direction of wind wavesDegree true
    102Significant height of wind wavesm
    103Mean period of wind wavess
    104Direction of swell wavesDegree true
    105Significant height of swell wavesm
    106Mean period of swell wavess
    107Primary wave directionDegree true
    108Primary wave mean periods
    109Secondary wave directionDegree true
    110Secondary wave mean periods
    111Net short-wave radiation flux (surface)W m-2
    112Net long-wave radiation flux (surface)W m-2
    113Net short-wave radiation flux (top of atmosphere)W m-2
    114Net long-wave radiation flux (top of atmosphere)W m-2
    115Long-wave radiation fluxW m-2
    116Short-wave radiation fluxW m-2
    117Global radiation fluxW m-2
    118Brightness temperatureK
    119Radiance (with respect to wave number)W m-1 sr-1
    120Radiance (with respect to wave length)W m-3 sr-1
    121Latent heat fluxW m-2
    122Sensible heat fluxW m-2
    123Boundary layer dissipationW m-2
    124Momentum flux, u-componentN m-2
    125Momentum flux, v-componentN m-2
    126Wind mixing energyJ
    127Image data-
    128Analysed RMS of PHI (CANARI)m2 s-2
    129Forecasted RMS of PHI (CANARI)m2 s-2
    130SW net clear sky radW m-2
    131LW net clear sky radW m-2
    132Latent heat flux through evaporationW m-2
    133Mask of significant cloud amount0-1
    134Icing index version 2-
    135Icing indexCode table
    136Pseudo satellite image, cloud top temperature (infrared)K
    137Pseudo satellite image, water vapour brightness temperatureK
    138Pseudo satellite image, water vapour br. temp. + correction for cloudsK
    139Pseudo satellite image, cloud water reflectivity (visible)?
    140Direct normal irradianceJ m-2
    141Max icing growth index-
    142Level of max wind speedm
    143Level of max icingm
    144Precipition TypeCode table
    145CAT index- / %
    146Bottom level of icingm
    147Top level of icingm
    148Helicopter Triggered ligthning Index-
    149Transmittance-
    150Level of max CAT indexm
    151Max CAT index-
    152Bottom level of CATm
    153Top level of CATm
    154Max Wind speedm s-1
    155Available#
    156Available#
    157Available#
    158Surface downward moon radiationW m-2
    159ABrunt Vaisala frequencys-1
    160CAPEJ kg-1
    161AROME hail diagnostic%
    162U-momentum of gusts out of the modelm s-1
    163V-momentum of gusts out of the modelm s-1
    164Max u-component in columnm s-1
    165Convective inhibition (CIN)J kg-1
    166MOCON out of the modelkg/kg s-1
    167Lifting condensation level (LCL)m
    168Level of free convection (LFC)m
    169Level of neutral boyancy (LNB)m
    170Brightness temperature OZ clearK
    171Brightness temperature OZ cloudK
    172Brightness temperature IR clearK
    173Brightness temperature IR cloudK
    174Brightness temperature WV clearK
    175Brightness temperature WV cloudK
    176Virtual potential temperatureK
    177Max v-component in columnm s-1
    178Available#
    179Available#
    180Available#
    181Rainkg m-2
    182Stratiform Rainkg m-2
    183Convective Rainkg m-2
    184Snowkg m-2
    185Total solid precipitationkg m-2
    186Cloud basem
    187Cloud topm
    188Fraction of urban landProportion
    189Cloud base >3/8m
    190Snow AlbedoProportion
    191Snow densitykg/m3
    192Water on canopykg/m2
    193Soil icekg/m2
    194Available#
    195Gravity wave stress U-compN/m2
    196Gravity wave stress V-compN/m2
    197Available#
    198Available#
    199Vegetation type-
    200TKEm2 s-2
    201Graupelkg m-2
    202Stratiform Graupelkg m-2
    203Convective Graupelkg m-2
    204Hailkg m-2
    205Stratiform Hailkg m-2
    206Convective Hailkg m-2
    207Available#
    208Available#
    209Lightningflash h-1
    210Simulated reflectivitydBz
    211Wind power productionMW or MJ
    212Pressure departurePa
    213Vertical divergences-1
    214UD_OMEGAms-1?
    215DD_OMEGAms-1?
    216UDMESHFRAC-
    217DDMESHFRAC-
    218PSHICONVCL-
    219Surface albedo for non snow covered areasProportion
    220Standard deviation of orography * gm2 s-2
    221Anisotropy coeff of topography-
    222Direction of main axis of topographyrad
    223Roughness length of bare surface * gm2 s-2
    224Roughness length for vegetation * gm2 s-2
    225Fraction of clay within soilProportion
    226Fraction of sand within soilProportion
    227Maximum proportion of vegetationProportion
    228Gust wind speedm s-1
    229Albedo of bare groundProportion
    230Albedo of vegetationProportion
    231Stomatal minimum resistances/m
    232Leaf area indexm2/m2
    233Thetaprimwprim surface fluxKm/s
    234Dominant vegetation index-
    235Surface emissivity-
    236Maximum soil depthm
    237Soil depthm
    238Soil wetnesskg/m2
    239Thermal roughness length * gm2 s-2
    240Resistance to evapotransirations/m
    241Minimum relative moisture at 2 meters%
    242Maximum relative moisture at 2 meters%
    243Duration of total precipitationss
    244Latent Heat SublimationW/m2
    245Water evaporationkg/m2
    246Snow sublimationkg/m2
    247Snow history???
    248A OZONEkg kg-1
    249B OZONEkg kg-1
    250C OZONEkg kg-1
    251Surface aerosol seakg kg-1
    252Surface aerosol landkg kg-1
    253Surface aerosol sootkg kg-1
    254Surface aerosol desertkg kg-1
    255Missing valuen/a

    SURFEX output Harmonie GRIB1 code table 2 version 001

    Levels are used in the conversion of SURFEX output to GRIB to indicate tile/patch/type/level:

    leveldescription
    300Extra yet unknown SURFEX variables
    301Fraction of each vegetation types on PATCH 1
    302Fraction of each vegetation types on PATCH 2
    303Fraction of each vegetation types cy43 (ECOCLIMAP-SG)
    600Physiography fields?
    720Sea ice
    730Sea ice (TICE_LL)
    755Precip
    760Sea
    770in addition to FLake (or instead of it)
    780Flake
    790Patch (*_P fields)
    800ISBA
    810Gridpoint average
    820Surface boundary multi layer fields
    830ISBA - patch 1 (X001*, open land)
    840ISBA - patch 2 (X002*, forest)
    950Town energy balance model (TEB)

    A small selection of fields available in the SURFEX output files is shown below.

    FA nameshortNameNCnamelvTiOPlevsTunitsdescription
    FRAC_SEA#sftofhag32300ins0-1Fraction of sea
    FRAC_WATER#sftlafhag33300ins0-1Fraction of water
    FRAC_NATURE#sftnfhag34300ins0-1Fraction of nature
    FRAC_TOWN#sfturfhag35300ins0-1Fraction of town
    COVER001#lsm10insLAND SEA MASK
    COVER002-COVER243##002-2430insECOCLIMAP I cover types
    COVER255##2550insECOCLIMAP I MY_COVER type
    COVER301-COVER573##001-254 & 001-0190insECOCLIMAP II cover types
    ZS#oroghag80insmOro hgt.
    SST#tosms110insKSST
    SIC#siconcams910ins0-1SIC
    T2M_SEA#tas_seahag11760insKT2m sea
    Q2M_SEA#huss_seahag51760inskg kg-1Q2m sea
    MER10M_SEA#vas_seahag34760insm s-1V10m sea
    ZON10M_SEA#uas_seahag33760insm s-1U10m sea
    T2M_WAT#tas_waterhag11772insKT2m water
    Q2M_WAT#huss_waterhag51770inskg kg-1Q2m water
    MER10M_WAT#vas_waterhag34770insm s-1V10m water
    ZON10M_WAT#uas_waterhag33770insm s-1U10m water
    DSNTISBA#sndhag660insmSnow depth
    WSNTISBA#snwhag130inskg m-2Total snow reservoir
    T2M_ISBA#tas_naturehag11802insKT2m isba
    Q2M_ISBA#huss_naturehag51802inskg kg-1Q2m isba
    X001T2M_P#tashag11832insKT2m of patch 1
    X002T2M_P#tashag11842insKT2m of patch 2
    T2M_TEB#tas_townhag11950insKT2m town
    T2MMAX_TEB#tasmax_townhag15950maxKMax Temp for town
    T2MMIN_TEB#tasmin_townhag16950minKMin Temp for town
    TGL#tg_LLLhag11800+insKTemperature of soil layer L(isba)
    WGL#wsa_LLLhag86800+insm3 m-3Liquid volumetric water content of soil layer L
    WGIL#isa_LLLhag193800+insm3 m-3Frozen volumetric water content of soil layer L
    WR#wrhag12800inskg m-2Liquid water retained by foliage (isba)
    DGL#dsoil_LLLhag23300insmSoil depth of soil layer L

    Harmonie GRIB1 code table 2 version 210

    Used for aerosol fields

    GRIB

    NetCDF

    +

    Parameter list and GRIB definitions

    HARMONIE system output

    The HARMONIE system writes its primary output, in FA format, to the upper air history files ICMSHHARM+llll and the SURFEX history files ICMSHHARM+llll.sfx, where HARM is the four-character experiment identifier set in the configuration file config_exp.h, and llll is normally the current timestep in hours. The files are designed to be complete snapshots of respective model state described by the system for a particular time point. In addition more model output including post-processing/diagnostic fields can be written out during the forecast model integration, such as those model diagnostics or pressure level diagnostics, also in FA format, as PFHARMDOMAIN+llll. The FA files can be considered to be internal format files. All of them can be converted to GRIB files during the run for external usage. The name convention is as follows:

    GRIB1 table 2 version in HARMONIE

    To avoid conflicts with archived HIRLAM data HARMONIE uses version 253 of table 2. The table is based on the standard WMO version 3 of table 2 and postion 000-127 is kept the same as in the WMO. Note that accumulated and instantaneous versions of the same parameter differ only by the time range indicator. It is thus not sufficient to specify parameter, type and level when you refer to an accumulated parameter, but the time range indicator has to be included as well.

    The translation of SURFEX files to GRIB1 is still incomplete and contains several WMO violations. This is not changed in the current release but will revised later. However, the upper air history file also includes the most common surface parameters and should be sufficient for most users.

    The current table 2 version 253 definition files for gribapi can be found in `util/glgrib_api/definitions/`. These local definition files assume centre=233 (Dublin) and should be copied to your own GRIB-API installation. You are strongly recommended to set your own code for generating centre fore operational usage of the data.

    GRIB2 in HARMONIE

    The possibility to convert to GRIB2 has been introduced in release-43h2. So far the conversion is restricted to atmospheric history and fullpos files only. To get the output in GRIB2 set ARCHIVE_FORMAT=GRIB2 in ecf/config_exp.h. Please notice that if ARCHIVE_FORMAT=GRIB2 is selected, SURFEX files will be converted to GRIB1 anyway (for the time being). To convert from GRIB1 with GRIB2 using grib_filter we have to tell EcCodes how to translate the parameters. This is done by using the internal HARMONIE tables and setting

    export ECCODES_DEFINITION_PATH=$SOME_PATH_TO_GL/gl/definitions:$SOME_PATH_TO_ECCODES/share/eccodes/definitions

    Note that there are a few parameters that are not translated to GRIB2 to and those has to be excluded explicitly.

    List of parameters

    header abbreviations in the tables:

    abbr.descriptionsee table
    lvTlevelTypelevel types
    iOPindicatorOfParameterindicator of parameter
    ddiscipline
    pCparameterCategory
    pNparameterNumber
    levlevel
    sTstepTypetime range indicator

    3D model state variables on model levels (1-NLEV), levelType=hybrid

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SNNNHUMI.SPECIFIqhushy510101inskg/kgSpecific humidity
    SNNNLIQUID_WATERcwat_condclwhy7601831inskg/kgSpecific cloud liquid water content
    SNNNSOLID_WATERciwc_condclihy5801841inskg/kgSpecific cloud ice water content
    SNNNSNOWsnow_cond#hy18401861inskg/kgSpecific snow water content
    SNNNRAINrain_cond#hy18101851inskg/kgSpecific rain water content
    SNNNGRAUPELgrpl_cond#hy20101321inskg/kgSpecific graupel
    SNNNTKEtketkehy200019111insJ/kgTurbulent Kinetic Energy
    SNNNCLOUD_FRACTItccclthy71061921ins0-1Total cloud cover
    SNNNPRESS.DEPARTpdep#hy2120381insPaPressure departure
    SNNNTEMPERATUREttahy110001insKTemperature
    SNNNVERTIC.DIVERvdiv#hy213021921inss-1Vertical Divergence
    SNNNWIND.U.PHYSuuahy330221insm/su-component of wind
    SNNNWIND.V.PHYSvvahy340231insm/sv-component of wind

    2D Surface, prognostic/diagnostic near-surface and soil variables, levelType=heightAboveGround

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SURFPRESSIONprespshag10300insPaSurface pressure
    SURFTEMPERATUREtts_radhag110000insKSurface temperature
    CLSTEMPERATUREttashag110002insKTemperature at 2m
    CLSMAXI.TEMPERATtmaxtasmaxhag150002maxKMaximum temperature (FREQ_RESET_TEMP)
    CLSMINI.TEMPERATtmintasminhag160002minKMinimum temperature (FREQ_RESET_TEMP)
    CLSVENT.ZONALuuashag3302210insm/su-component of wind at 10m, relative to model coordinates
    CLSVENT.MERIDIENvvashag3402310insm/sv-component of wind at 10m, relative to model coordinates
    CLSHUMI.SPECIFIQqhusshag510102inskg/kgSpecific humidity at 2m
    CLSHUMI.RELATIVErhurshag52011922ins0-1Relative humidity at 2m
    SURFRESERV.NEIGEsdwesnwhag6501600inskg/m2Snow depth water equivalent
    CLPMHAUT.MOD.XFUmldzmlahag6701930insmHeight (in meters) of the PBL out of the model
    SURFNEBUL.TOTALEtccclt_inshag71061920ins0-1Total cloud cover
    SURFNEBUL.CONVECcccclc_inshag72061930ins0-1Convective cloud cover
    SURFNEBUL.BASSElcccll_inshag73061940ins0-1Low cloud cover
    SURFNEBUL.MOYENNmccclm_inshag74061950ins0-1Medium cloud cover
    SURFNEBUL.HAUTEhccclh_inshag75061960ins0-1High cloud cover
    SURFRAYT.SOLAIREswavr#hag1160470insW/m2Instantaneous surface solar radiation (SW down global) Parameter identifier was 116, again is???
    SURFRAYT.TERRESTlwavr#hag1150540insW/m2Instantaneous longwave radiation flux
    SURFCAPE.MOD.XFUcapecapehag1600760insJ/kgModel output CAPE (not calculated by AROME physics)
    SURFDIAGHAILxhail#hag161012030ins0-1AROME hail diagnostic, LXXDIAGH = .TRUE.
    CLSU.RAF.MOD.XFUugstugshag162022310maxm/sU-momentum of gusts from the model. LXXGST = .TRUE. in NAMXFU. gives gust between current and previous output time step (FREQ_RESET_GUST)
    CLSV.RAF.MOD.XFUvgstvgshag163022410maxm/sV-momentum of gusts from the model. LXXGST = .TRUE. in NAMXFU. gives gust between current and previous output time step (FREQ_RESET_GUST)
    SURFINSPLUIErain#hag18101650inskg/m2Instantaneous rain
    SURFINSNEIGEsnow#hag18401530inskg/m2Instantaneous snow
    SURFINSGRAUPELgrpl#hag20101750inskg/m2Instantaneous graupel
    CLSMINI.HUMI.RELrmn2m#hag2410112min0-1Minimum relative moisture at 2m over 3h
    CLSMAXI.HUMI.RELrmx2m#hag2420112max0-1Maximum relative moisture at 2m over 3h
    CLSRAFALES.POSfgwsgsmaxhag228022210maxm/sGust wind speed

    2D Surface, accumulated near-surface and soil variables

    Note that all these are coded with stepType=accum

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    S065RAYT SOL CLcssw#hy130041165accJ/m2SW net clear sky rad
    S065RAYT THER CLcslw#hy13105665accJ/m2LW net clear sky rad
    SURFACCGRAUPELgrplprgrplhag20101750acckg/m2Accumulated graupel
    SURFACCNEIGEsnowprsnhag18401530acckg/m2Accumulated snowfall
    SURFACCPLUIErainprrainhag18101650acckg/m2Accumulated rain
    SURFDIR NORM IRRdneridshag1403630accJ/m2Direct normal exposure
    SURFFLU.CHA.SENSshfhfsshag12200110accJ/m2Sensible heat flux
    SURFFLU.LAT.MEVAlhehfls_evahag132011930accJ/m2Latent heat flux through evaporation
    SURFFLU.LAT.MSUBlhsubhfls_sblhag244012020accJ/kgLatent Heat Sublimation
    SURFFLU.MEVAP.EAwevapevspsblhag2450160acckg/m2Water evaporation
    SURFFLU.MSUBL.NEsnsubsbl_snowhag24601620acckg/m2Snow sublimation
    SURFFLU.RAY.SOLAnswrsrsnshag1110490accJ/m2Net shortwave radiation flux (surface)
    SURFFLU.RAY.THERnlwrsrlnshag1120550accJ/m2Net longwave radiation flux (surface)
    SURFRAYT DIR SURswavrrsdsdirhag1160470accJ/m2Shortwave radiation flux
    SURFRAYT SOLA DEgradrsdshag1170430accJ/m2Global radiation flux
    SURFRAYT THER DElwavrrldshag1150540accJ/m2Longwave radiation flux
    SURFTENS.TURB.MEvflxtauvhag125021990accN/m2Momentum flux, v-component
    SURFTENS.TURB.ZOuflxtauuhag124021980accN/m2Momentum flux, u-component

    2D TOA, diagnostic and accumulated variables, levelType=nominalTop

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SOMMFLU.RAY.SOLAnswrtrsntnt1130490accJ/m2Net shortwave radiation flux(atmosph.top)
    SOMMFLU.RAY.THERnlwrtrlntnt1140550accJ/m2Net longwave radiation flux(atmosph.top)
    SOMMRAYT.SOLAIREnswrt#nt1130490insW/m2Net shortwave radiation flux(atmosph.top)
    SOMMRAYT.TERRESTnlwrt#nt1140550insW/m2Net longwave radiation flux(atmosph.top)
    TOPRAYT DIR SOMswavrrsdtnt1160470accJ/m2TOA Accumulated SW down radiation Parameter identifier was 117
    SOMMTBOZCLEARbtozcs#nt170-1-1-10-KBrightness temperature OZ clear
    SOMMTBOZCLOUDbtozcl#nt171-1-1-10-KBrightness temperature OZ cloud
    SOMMTBIRCLEARbtircs#nt172-1-1-10-KBrightness temperature IR clear
    SOMMTBIRCLOUDbtircl#nt173-1-1-10-KBrightness temperature IR cloud
    SOMMTBWVCLEARbtwvcs#nt174-1-1-10-KBrightness temperature WV clear
    SOMMTBWVCLOUDbtwvcl#nt175-1-1-10-KBrightness temperature WV cloud

    2D Surface, Postprocessed variables (fullpos)

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SURFCAPE.POS.F00capecapehag1600760insJ/kgConvective available potential energy (CAPE)
    SURFCIEN.POS.F00cincinhag1650770insJ/kgConvective inhibition (CIN)
    SURFLIFTCONDLEVlcl#ac1670360insmLifting condensation level (LCL)
    SURFFREECONVLEVlfc#lfc1680360insmLevel of free convection (LFC)
    SURFEQUILIBRLEVlnb#lnb1690360insmLevel of neutral buoyancy (LNB)

    2D Surface, constant near-surface and soil variables

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SPECSURFGEOPOTENzphis_shag60340insm2/s2Geopotential relative to mean sea level. "... contains a GRID POINT orography which is the interpolation of the departure orography"
    SURFIND.TERREMERlsmlsmhag812000ins0-1Land-sea mask
    SURFAEROS.SEAaers#hag2510131920inskg/kgSurface aerosol sea (Marine aerosols, locally defined GRIB)
    SURFAEROS.LANDaerl#hag2520131930inskg/kgSurface aerosol land (Continental aerosols, locally defined GRIB)
    SURFAEROS.SOOTaerc#hag2530131940inskg/kgSurface carbon aerosol (Carbone aerosols, locally defined GRIB)
    SURFAEROS.DESERTaerd#hag2540131950inskg/kgSurface aerosol desert (Desert aerosols, locally defined GRIB)
    SURFAEROS.VOLCAN##hag197-1-1-1-1Surface aerosol volcan (Stratospheric ash, to be locally defined GRIB)
    SURFAEROS.SULFAT##hag198-1-1-1-1Surface aerosol sulfate (Stratospheric sulfate, to be locally defined GRIB)
    SURFA.OF.OZONEao#hag2480141920inskg/kgA Ozone, First ozone profile (A), locally defined GRIB
    SURFB.OF.OZONEbo#hag2490141930inskg/kgB Ozone, Second ozone profile (B), locally defined GRIB
    SURFC.OF.OZONEco#hag2500141940inskg/kgC Ozone, Third ozone profile (C), locally defined GRIB
    PROFTEMPERATUREslt#dbl8523180insKSoil Temperature
    PROFRESERV.EAUsm#dbl8623200inskg/m2Deep Soil Wetness
    PROFPROP.RMAX.EAswv#dbl23823250inskg/m2Climate relaxed deep soil wetness
    PROFRESERV.GLACEwsoice#dbl19323220inskg/m2Deep soil ice

    2D variables on special surfaces

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    KT273ISOT_ALTITh#isot803627315insmAltitude of 0-degree isotherm
    KT263ISOT_ALTITh#isot803626315insmAltitude of -10-degree isotherm
    SURFISOTPW0.MALTh#isot0wb80360insmAltitude of iso-tprimw=0
    SURFTOT.WAT.VAPOwvintprwea5401640inskg/m2Total column integral water vapour
    WFPOWERINSwfpower_inswfpower_insea21102390insMWWind power production, instantaneous (LWINDFARM=.TRUE. in NAMPHY)
    WFPOWERACCwfpower_accwfpower_accea21102390accMJWind power production, accumulated (LWINDFARM=.TRUE. in NAMPHY)

    Postprocessed variables on different surface types

    Through the postprocessing sofware fullpos HARMONIE offers a number of variables postprocessed on different surface types. For the current choice of variables, surfaces and levels please see scr/Select_postp.pl.

    State variables and diagnostics on pressure levels, leveltype=isobaricInhPa

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    PNNNNNWIND.U.PHYuuapl33022NNNNNinsm/su-component of wind
    PNNNNNWIND.V.PHYvvapl34023NNNNNinsm/sv-component of wind
    PNNNNNTEMPERATURttapl11000NNNNNinsKTemperature
    PNNNNNHUMI.SPECIqhuspl51010NNNNNinskg/kgSpecific humidity
    PNNNNNLIQUID_WATcwat_condclwpl760183NNNNNinskg/kgSpecific cloud liquid water content
    PNNNNNSOLID_WATEciwc_condclipl580184NNNNNinskg/kgSpecific cloud ice water content
    PNNNNNCLOUD_FRACtcc#pl7106192NNNNNins0-1Total cloud cover
    PNNNNNSNOWsnow_cond#pl1840186NNNNNinskg/kgSpecific snow water content
    PNNNNNRAINrain_cond#pl1810185NNNNNinskg/kgSpecific rain water content
    PNNNNNGRAUPELgrpl_cond#pl2010132NNNNNinskg/kgSpecific graupel
    PNNNNNGEOPOTENTIzphipl6034NNNNNinsm2/s2Geopotential
    PNNNNNHUMI_RELATrhurpl5201192NNNNNins0-1Relative humidity
    PNNNNNTHETA_PRIMpaptthetaEpl14003NNNNNinsKPseudo-adiabatic potential temperature
    PNNNNNTHETA_VIRTvptmp#pl1760015NNNNNinsKVirtual potential temperature
    PNNNNNVERT.VELOCwwapl40029NNNNNinsm/sGeometrical vertical velocity
    PNNNNNPOT_VORTICpvpvpl40214NNNNNinsK m2/kg/sPotential vorticity
    PNNNNNABS_VORTICabsv#pl410210NNNNNinss-1Absolute vorticity
    PNNNNNDIVERGENCEd#pl440213NNNNNinss-1Relative divergence

    State variables and diagnostics on height levels, levelType=heightAboveGround

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    HNNNNNWIND.U.PHYuuahag33022NNNNNinsm/su-component of wind
    HNNNNNWIND.V.PHYvvahag34023NNNNNinsm/sv-component of wind
    HNNNNNTEMPERATURttahag11000NNNNNinsKTemperature
    HNNNNNLIQUID_WATcwat_condclwhag760183NNNNNinskg/kgSpecific cloud liquid water content
    HNNNNNSOLID_WATEciwc_condclihag580184NNNNNinskg/kgSpecific cloud ice water content
    HNNNNNCLOUD_FRACtccclthag7106192NNNNNins0-1Total cloud cover
    HNNNNNSNOWsnow_cond#hag1840186NNNNNinskg/kgSpecific snow water content
    HNNNNNRAINrain_cond#hag1810185NNNNNinskg/kgSpecific rain water content
    HNNNNNGRAUPELgrpl_cond#hag2010132NNNNNinskg/kgSpecific graupel
    HNNNNNHUMI_RELATrhurhag5201192NNNNNins0-1Relative humidity
    HNNNNNPRESSUREpresphag1030NNNNNinsPaPressure

    State variables and diagnostics on PV levels, GRIB1 level type 117, levelType=potentialVorticity

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    VNNNGEOPOTENTIELz#pv6034NNNinsm2/s2Geopotential
    VNNNTEMPERATUREt#pv11000NNNinsKTemperature
    VNNNPRESSUREpres#pv1030NNNinsPaPressure
    VNNNHUMI_RELATIVr#pv5201192NNNins0-1Relative humidity
    VNNNHUMI.SPECIFIq#pv51010NNNinskg/kgSpecific humidity
    VNNNWIND.U.PHYSu#pv33022NNNinsm/su-component of wind
    VNNNWIND.V.PHYSv#pv34023NNNinsm/sv-component of wind
    VNNNVITESSE_VERTomega#pv39028NNNinsPa/sPressure vertical velocity (DYNAMICS=h)
    VNNNVERT.VELOCITw#pv40029NNNinsm/sGeometrical vertical velocity (DYNAMICS=nh)
    VNNNTEMPE_POTENTpt#pv13002NNNinsKPotential temperature
    VNNNABS_VORTICITabsv#pv410210NNNinss-1Absolute vorticity
    VNNNDIVERGENCEd#pv440213NNNinss-1Relative divergence
    VNNNTHETAPRIMWpapt#pv14003NNNinsKPseudo-adiabatic potential temperature

    State variables and diagnostics on Theta levels, GRIB1 level type 113, levelType=theta

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    TNNNGEOPOTENTIELz#th6034NNNinsm2/s2Geopotential
    TNNNTEMPERATUREt#th11000NNNinsKTemperature
    TNNNPRESSUREpres#th1030NNNinsPaPressure
    TNNNHUMI_RELATIVr#th5201192NNNins0-1Relative humidity
    TNNNHUMI.SPECIFIq#th51010NNNinskg/kgSpecific humidity
    TNNNWIND.U.PHYSu#th33022NNNinsm/su-component of wind
    TNNNWIND.V.PHYSv#th34023NNNinsm/sv-component of wind
    TNNNVITESSE_VERTomega#th39028NNNinsPa/sPressure vertical velocity (DYNAMICS=h)
    TNNNVERT.VELOCITw#th40029NNNinsm/sGeometrical vertical velocity (DYNAMICS=nh)
    TNNNABS_VORTICITabsv#th410210NNNinss-1Absolute vorticity
    TNNNPOT_VORTICITpv#th40214NNNinsK m2/kg/sPotential vorticity
    TNNNDIVERGENCEd#th440213NNNinss-1Relative divergence

    FA fields without any default GRIB1 translation

    Some very special fields are left without any default translation. Please see in the gl documentation on how to add you own translation.

    FA nameUnitComment
    CUF1PRESSURECoupling error field.
    THETAPWP_FLUXK m-4 s-1Instantaneous thetaprimwprim surface flux
    CLPMOCON.MOD.XFUkg kg-1 s-1MOCON model output
    ATMONEBUL.TOTALEAccumulated Total cloud cover.
    ATMONEBUL.CONVECAccumulated Convective cloud cover.
    ATMONEBUL.BASSEAccumulated Low cloud cover.
    ATMONEBUL.MOYENNAccumulated Medium cloud cover.
    ATMONEBUL.HAUTEAccumulated High cloud cover.
    SURFCFU.Q.TURBULAccumulated contribution of Turbulence to Q.
    SURFCFU.CT.TURBULAccumulated contribution of Turbulence to CpT
    SUNSHI. DURATIONSunshine duration.
    SURFFL.U TURBULContribution of Turbulence to U.
    SURFFL.V TURBULContribution of Turbulence to V.
    SURFFL.Q TURBULContribution of Turbulence to Q.
    SURFFL.CT TURBULContribution of Turbulence to CpT
    SNNNSRCSecond order flux.

    Variables postprocessed by gl

    The following fields are can be generated by gl from a history file and are thus not necessarily available as FA fields in Harmonie's FA output. When calculating these post-processed fields, make sure the required fields to derive them are in the input files! For details, check util/gl/grb/postprocess.f90 & the routines called therein.

    Single level fields

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    MSLPRESSUREprespslhas10300insPaMSLP. gl calculates MSLP independent of AROME/!FullPos
    #tdtdhag170062insKDew point temperature
    #vis#hag2001900insmVisibility
    #wdir#ttt31020lllinsDeg. trueWind direction. gl calculates based on u[33,ttt,lll] and v[34,ttt,lll] wind components
    #ws#ttt32021lllinsm/sWind speed. gl calculates based on u[33,ttt,lll] and v[34,ttt,lll] wind components
    TOT.WATER.PRECIPtpprhag610180acckg/m2Total precipitation, gl calculates TP![61,105,0]=rain![181,105,0]+snow![184,105,0]+graupel![201,105,0]+hail![204,105,0]
    TOT.SOLID.PRECIPtpsolidprsolidhag185012000acckg/m2Total solid precipitation, gl calculates ![185,105,0]=snow![184,105,0]+graupel![201,105,0]+hail![204,105,0]
    #mldzmlahag6701930insmMixed layer depth/boundary layer height
    #tcc#hag71061922ins0-1Fog, cloud fraction of lowest model level
    #icei#hag1350ins-Icing index
    #atmiceg#hy??01205insm/sIcing index, Atmospheric ice growth rate
    #icei2#hag/?134011940ins-Icing index version 2
    #psct#hag/ct?1360400insKPseudo satellite image, cloud top temperature (infrared)
    #pstb#hag137041980insKPseudo satellite image, water vapour brightness temperature
    #pstbc#hag138041990insKPseudo satellite image, water vapour br. temp. + correction for clouds
    #pscw#hag139042000ins-Pseudo satellite image, cloud water reflectivity (visible)
    #prtp#hag14401190inscodePrecipitation type, 0:drizzle, 1:rain, 2:sleet, 3:snow, 4:freezing drizzle, 5:freezing rain, 6:graupel, 7:hail
    #fg#ttt2280222lllmaxm/sGust wind speed, calculated from ugst & vgst on corresponding level & levelType
    #hti#hag1480171930ins-Helicopter Triggered lightning Index
    #transmit#hag149061990ins-Transmittance
    #cat#hag145019220ins-|%CAT (clear air turbulence) index
    #bvf#hag1590192020inss-1Brunt Vaisala frequency

    Integrated quantities

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    TOT.NEB.ICEciwc_vintcliviea5801700inskg/m2Vertical integral of cloud ice
    TOT.NEB.WATcwat_vintclqviea7601690inskg/m2Vertical integral of cloud liquid water
    #msca#ea133061970ins0-1Mask of significant cloud amount
    #cape#hag1600760insJ/kgConvective Available Potential Energy, comes in two flavours, cape_version=1|2, where the second is compatible with the ECMWF version
    #cin#hag1650770insJ/kgConvective inhibition, , comes in two flavours, cape_version=1|2, where the second is compatible with the ECMWF version
    #rain_vintclrviea18101450inskg/m2Vertical integral of rain
    #snow_vintclsviea18401460inskg/m2Vertical integral of snow
    #grpl_vintclgviea20101740inskg/m2Vertical integral of garupel
    #cb#ea18606110insmCloud base
    #ct#ea18706120insmCloud top
    #cb38#hag?189061983insmCloud base >3/8
    #lgt#ea2090171920insflash/hLightning intensity
    #lmxws#ea/mw?1420360insmLevel of max wind speed
    #maxucol#ea1640220insm/sMax u-component in column
    #maxvcol#ea1770230insm/sMax v-component in column
    #lmxice#ea143011990insmIcing index, Level of max icing
    #mxicegr#ea141012040ins1Icing index, Max icing growth index (0-4)
    #blice#ea14601960insmIcing index, Bottom level of icing
    #tlice#ea14701950insmIcing index, Top level of icing
    #cat_maxlev#ea1500192010insmLevel of max CAT index
    #cat_max#ea1510191970ins-Max CAT index
    #cat_b#ea1520191980insmBottom level of CAT
    #cat_t#ea1530191990insmTop level of CAT

    GRIB encoding information

    Time units, WMO code table 4

    The following time units are used to encode GRIB edition 1 data

    CodeUnit
    0Minute
    1Hour
    1315 minutes
    1430 minutes

    Time range indicator, WMO code TABLE 5

    CodeabbrDefinition
    0insForecast product valid for reference time + P1 (P1 > 0), or Uninitialized analysis product for reference time (P1 = 0)
    2min/maxProduct with a valid time ranging between reference time + P1 and reference time + P2. Used for min/max values
    3avgAverage (reference time + P1 to reference time + P2)
    4accAccumulation (reference time + P1 to reference time + P2) product considered valid at reference time + P2

    Note that fields available as both instanteous and accumulated values like e.g. rain has the same parameter values and can only be distinguished by the time range indicator.

    Level types, WMO Code table 3

    level typenameabbrWMO/HIRLAM type definitionUnitsnotes
    001surfacesfcGround or water surfaceWMO
    002cloudBasecbCloud base levelWMO
    003cloudTopctLevel of cloud topsWMO
    004isothermZeroisot0Level of 0°C isothermWMO
    005adiabaticCondensationacLevel of Adiabatic Condensation Lifted from the SurfaceWMO
    006maxWindmwMaximum wind levelWMO
    007tropopausetpTropopauseWMO
    008nominalTopntTop-of-atmosphereWMO
    020isothermalisotIsothermal levelTemperature in 1/100 KWMO
    100isobaricInhPaplIsobaric levelhPaWMO
    102meanSeamsAt mean sea level
    103heightAboveSeahasSpecified altitude above mean sea levelAltitude in mWMO
    105heightAboveGroundhagSpecified height above groundAltitude in mWMO
    107sigmaSigma levelSigma value in 1/10000WMO
    109hybridhyHybrid levelWMO
    112depthBelowLandLayerdbl
    113thetathIsentropic (theta) levelPotential temperature in KWMO
    117potentialVorticitypvPotential vorticity surface10-9 K m2 kg-1 s-1WMO
    192isothermZeroWetBulbisot0wb
    200entireAtmosphereeaEntire atmosphere (considered as a single layer)WMO, vertically integrated
    levelFreeConvectionlfcas heightAboveGround in GRIB1
    levelNeutralBuoyancylnbas heightAboveGround in GRIB1

    Harmonie GRIB1 code table 2 version 253 - Indicator of parameter

    Below the indicator of parameter code table for the Harmonie model. It is based on the WMO code table 2 version 3 with local parameters added. Parameter indicators 128-254 are reserved for originating center use. Parameter indicators 000-127 should not be altered. In HARMONIE, radiation fluxes are assumed positive downwards (against the recommendation by WMO).

    ParDescriptionSI Units
    000Reservedn/a
    001PressurePa
    002Pressure reduced to MSLPa
    003Pressure tendencyPa s-1
    004Potential vorticityK m2 kg-1 s-1
    005ICAO Standard Atmosphere reference heightm
    006Geopotentialm2 s-2
    007Geopotential heightgpm
    008Geometrical heightm
    009Standard deviation of heightm
    010Total ozoneDobson
    011TemperatureK
    012Virtual temperatureK
    013Potential temperatureK
    014Pseudo-adiabatic potential temperatureK
    015Maximum temperatureK
    016Minimum temperatureK
    017Dew-point temperatureK
    018Dew-point depression (or deficit)K
    019Lapse rateK m-1
    020Visibilitym
    021Radar spectra (1)-
    022Radar spectra (2)-
    023Radar spectra (3)-
    024Parcel lifted index (to 500 hPa)K
    025Temperature anomalyK
    026Pressure anomalyPa
    027Geopotential height anomalygpm
    028Wave spectra (1)-
    029Wave spectra (2)-
    030Wave spectra (3)-
    031Wind directionDegree true
    032Wind speedm s-1
    033u-component of windm s-1
    034v-component of windm s-1
    035Stream functionm2 s-1
    036Velocity potentialm2 s-1
    037Montgomery stream functionm2 s-1
    038Sigma coordinate vertical velocitys-1
    039Vertical velocityPa s-1
    040Vertical velocitym s-1
    041Absolute vorticitys-1
    042Absolute divergences-1
    043Relative vorticitys-1
    044Relative divergences-1
    045Vertical u-component shears-1
    046Vertical v-component shears-1
    047Direction of currentDegree true
    048Speed of currentm s-1
    049u-component of currentm s-1
    050v-component of currentm s-1
    051Specific humiditykg kg-1
    052Relative humidity%
    053Humidity mixing ratiokg kg-1
    054Precipitable waterkg m-2
    055Vapor pressurePa
    056Saturation deficitPa
    057Evaporationkg m-2
    058Cloud icekg m-2
    059Precipitation ratekg m-2 s-1
    060Thunderstorm probability%
    061Total precipitationkg m-2
    062Large scale precipitationkg m-2
    063Convective precipitationkg m-2
    064Snowfall rate water equivalentkg m-2 s-1
    065Water equivalent of accumulated snow depthkg m-2
    066Snow depthm
    067Mixed layer depthm
    068Transient thermocline depthm
    069Main thermocline depthm
    070Main thermocline anomalym
    071Total cloud cover%
    072Convective cloud cover%
    073Low cloud cover%
    074Medium cloud cover%
    075High cloud cover%
    076Cloud waterkg m-2
    077Best lifted index (to 500 hPa)K
    078Convective snowkg m-2
    079Large scale snowkg m-2
    080Water temperatureK
    081Land cover (1 = land, 0 = sea)Proportion
    082Deviation of sea level from meanm
    083Surface roughnessm
    084Albedo%
    085Soil temperatureK
    086Soil moisture contentkg m-2
    087Vegetation%
    088Salinitykg kg-1
    089Densitykg m-3
    090Water run-offkg m-2
    091Ice cover (1 = ice, 0 = no ice)Proportion
    092Ice thicknessm
    093Direction of ice driftDegree true
    094Speed of ice driftm s-1
    095u-component of ice driftm s-1
    096v-component of ice driftm s-1
    097Ice growth ratem s-1
    098Ice divergences-1
    099Snow meltkg m-2
    100Significant height of combined wind waves and swellm
    101Direction of wind wavesDegree true
    102Significant height of wind wavesm
    103Mean period of wind wavess
    104Direction of swell wavesDegree true
    105Significant height of swell wavesm
    106Mean period of swell wavess
    107Primary wave directionDegree true
    108Primary wave mean periods
    109Secondary wave directionDegree true
    110Secondary wave mean periods
    111Net short-wave radiation flux (surface)W m-2
    112Net long-wave radiation flux (surface)W m-2
    113Net short-wave radiation flux (top of atmosphere)W m-2
    114Net long-wave radiation flux (top of atmosphere)W m-2
    115Long-wave radiation fluxW m-2
    116Short-wave radiation fluxW m-2
    117Global radiation fluxW m-2
    118Brightness temperatureK
    119Radiance (with respect to wave number)W m-1 sr-1
    120Radiance (with respect to wave length)W m-3 sr-1
    121Latent heat fluxW m-2
    122Sensible heat fluxW m-2
    123Boundary layer dissipationW m-2
    124Momentum flux, u-componentN m-2
    125Momentum flux, v-componentN m-2
    126Wind mixing energyJ
    127Image data-
    128Analysed RMS of PHI (CANARI)m2 s-2
    129Forecasted RMS of PHI (CANARI)m2 s-2
    130SW net clear sky radW m-2
    131LW net clear sky radW m-2
    132Latent heat flux through evaporationW m-2
    133Mask of significant cloud amount0-1
    134Icing index version 2-
    135Icing indexCode table
    136Pseudo satellite image, cloud top temperature (infrared)K
    137Pseudo satellite image, water vapour brightness temperatureK
    138Pseudo satellite image, water vapour br. temp. + correction for cloudsK
    139Pseudo satellite image, cloud water reflectivity (visible)?
    140Direct normal irradianceJ m-2
    141Max icing growth index-
    142Level of max wind speedm
    143Level of max icingm
    144Precipition TypeCode table
    145CAT index- / %
    146Bottom level of icingm
    147Top level of icingm
    148Helicopter Triggered ligthning Index-
    149Transmittance-
    150Level of max CAT indexm
    151Max CAT index-
    152Bottom level of CATm
    153Top level of CATm
    154Max Wind speedm s-1
    155Available#
    156Available#
    157Available#
    158Surface downward moon radiationW m-2
    159ABrunt Vaisala frequencys-1
    160CAPEJ kg-1
    161AROME hail diagnostic%
    162U-momentum of gusts out of the modelm s-1
    163V-momentum of gusts out of the modelm s-1
    164Max u-component in columnm s-1
    165Convective inhibition (CIN)J kg-1
    166MOCON out of the modelkg/kg s-1
    167Lifting condensation level (LCL)m
    168Level of free convection (LFC)m
    169Level of neutral boyancy (LNB)m
    170Brightness temperature OZ clearK
    171Brightness temperature OZ cloudK
    172Brightness temperature IR clearK
    173Brightness temperature IR cloudK
    174Brightness temperature WV clearK
    175Brightness temperature WV cloudK
    176Virtual potential temperatureK
    177Max v-component in columnm s-1
    178Available#
    179Available#
    180Available#
    181Rainkg m-2
    182Stratiform Rainkg m-2
    183Convective Rainkg m-2
    184Snowkg m-2
    185Total solid precipitationkg m-2
    186Cloud basem
    187Cloud topm
    188Fraction of urban landProportion
    189Cloud base >3/8m
    190Snow AlbedoProportion
    191Snow densitykg/m3
    192Water on canopykg/m2
    193Soil icekg/m2
    194Available#
    195Gravity wave stress U-compN/m2
    196Gravity wave stress V-compN/m2
    197Available#
    198Available#
    199Vegetation type-
    200TKEm2 s-2
    201Graupelkg m-2
    202Stratiform Graupelkg m-2
    203Convective Graupelkg m-2
    204Hailkg m-2
    205Stratiform Hailkg m-2
    206Convective Hailkg m-2
    207Available#
    208Available#
    209Lightningflash h-1
    210Simulated reflectivitydBz
    211Wind power productionMW or MJ
    212Pressure departurePa
    213Vertical divergences-1
    214UD_OMEGAms-1?
    215DD_OMEGAms-1?
    216UDMESHFRAC-
    217DDMESHFRAC-
    218PSHICONVCL-
    219Surface albedo for non snow covered areasProportion
    220Standard deviation of orography * gm2 s-2
    221Anisotropy coeff of topography-
    222Direction of main axis of topographyrad
    223Roughness length of bare surface * gm2 s-2
    224Roughness length for vegetation * gm2 s-2
    225Fraction of clay within soilProportion
    226Fraction of sand within soilProportion
    227Maximum proportion of vegetationProportion
    228Gust wind speedm s-1
    229Albedo of bare groundProportion
    230Albedo of vegetationProportion
    231Stomatal minimum resistances/m
    232Leaf area indexm2/m2
    233Thetaprimwprim surface fluxKm/s
    234Dominant vegetation index-
    235Surface emissivity-
    236Maximum soil depthm
    237Soil depthm
    238Soil wetnesskg/m2
    239Thermal roughness length * gm2 s-2
    240Resistance to evapotransirations/m
    241Minimum relative moisture at 2 meters%
    242Maximum relative moisture at 2 meters%
    243Duration of total precipitationss
    244Latent Heat SublimationW/m2
    245Water evaporationkg/m2
    246Snow sublimationkg/m2
    247Snow history???
    248A OZONEkg kg-1
    249B OZONEkg kg-1
    250C OZONEkg kg-1
    251Surface aerosol seakg kg-1
    252Surface aerosol landkg kg-1
    253Surface aerosol sootkg kg-1
    254Surface aerosol desertkg kg-1
    255Missing valuen/a

    SURFEX output Harmonie GRIB1 code table 2 version 001

    Levels are used in the conversion of SURFEX output to GRIB to indicate tile/patch/type/level:

    leveldescription
    300Extra yet unknown SURFEX variables
    301Fraction of each vegetation types on PATCH 1
    302Fraction of each vegetation types on PATCH 2
    303Fraction of each vegetation types cy43 (ECOCLIMAP-SG)
    600Physiography fields?
    720Sea ice
    730Sea ice (TICE_LL)
    755Precip
    760Sea
    770in addition to FLake (or instead of it)
    780Flake
    790Patch (*_P fields)
    800ISBA
    810Gridpoint average
    820Surface boundary multi layer fields
    830ISBA - patch 1 (X001*, open land)
    840ISBA - patch 2 (X002*, forest)
    950Town energy balance model (TEB)

    A small selection of fields available in the SURFEX output files is shown below.

    FA nameshortNameNCnamelvTiOPlevsTunitsdescription
    FRAC_SEA#sftofhag32300ins0-1Fraction of sea
    FRAC_WATER#sftlafhag33300ins0-1Fraction of water
    FRAC_NATURE#sftnfhag34300ins0-1Fraction of nature
    FRAC_TOWN#sfturfhag35300ins0-1Fraction of town
    COVER001#lsm10insLAND SEA MASK
    COVER002-COVER243##002-2430insECOCLIMAP I cover types
    COVER255##2550insECOCLIMAP I MY_COVER type
    COVER301-COVER573##001-254 & 001-0190insECOCLIMAP II cover types
    ZS#oroghag80insmOro hgt.
    SST#tosms110insKSST
    SIC#siconcams910ins0-1SIC
    T2M_SEA#tas_seahag11760insKT2m sea
    Q2M_SEA#huss_seahag51760inskg kg-1Q2m sea
    MER10M_SEA#vas_seahag34760insm s-1V10m sea
    ZON10M_SEA#uas_seahag33760insm s-1U10m sea
    T2M_WAT#tas_waterhag11772insKT2m water
    Q2M_WAT#huss_waterhag51770inskg kg-1Q2m water
    MER10M_WAT#vas_waterhag34770insm s-1V10m water
    ZON10M_WAT#uas_waterhag33770insm s-1U10m water
    DSNTISBA#sndhag660insmSnow depth
    WSNTISBA#snwhag130inskg m-2Total snow reservoir
    T2M_ISBA#tas_naturehag11802insKT2m isba
    Q2M_ISBA#huss_naturehag51802inskg kg-1Q2m isba
    X001T2M_P#tashag11832insKT2m of patch 1
    X002T2M_P#tashag11842insKT2m of patch 2
    T2M_TEB#tas_townhag11950insKT2m town
    T2MMAX_TEB#tasmax_townhag15950maxKMax Temp for town
    T2MMIN_TEB#tasmin_townhag16950minKMin Temp for town
    TGL#tg_LLLhag11800+insKTemperature of soil layer L(isba)
    WGL#wsa_LLLhag86800+insm3 m-3Liquid volumetric water content of soil layer L
    WGIL#isa_LLLhag193800+insm3 m-3Frozen volumetric water content of soil layer L
    WR#wrhag12800inskg m-2Liquid water retained by foliage (isba)
    DGL#dsoil_LLLhag23300insmSoil depth of soil layer L

    Harmonie GRIB1 code table 2 version 210

    Used for aerosol fields

    GRIB

    NetCDF

    diff --git a/dev/ForecastModel/SingleColumnModel/Forcing/index.html b/dev/ForecastModel/SingleColumnModel/Forcing/index.html index ebe8ec0340..994d949935 100644 --- a/dev/ForecastModel/SingleColumnModel/Forcing/index.html +++ b/dev/ForecastModel/SingleColumnModel/Forcing/index.html @@ -48,4 +48,4 @@ NL_T_NUDG_TIME(3) = 43200 NL_T_NUDG_TIME(4) = 64800 NL_T_NUDG_TIME(5) = 86400 -/

    and now you can not run MUSC more than 1 day ... if the time between the forcing profile is the same you can use *_FREQ instead of TIME ...

    +/

    and now you can not run MUSC more than 1 day ... if the time between the forcing profile is the same you can use *_FREQ instead of TIME ...

    diff --git a/dev/ForecastModel/SingleColumnModel/MUSC/index.html b/dev/ForecastModel/SingleColumnModel/MUSC/index.html index 09a0925adc..f864c12021 100644 --- a/dev/ForecastModel/SingleColumnModel/MUSC/index.html +++ b/dev/ForecastModel/SingleColumnModel/MUSC/index.html @@ -70,4 +70,4 @@ IF(ABS(ZVBH(JFLEV)-PVBH(JFLEV)) > PEPS) THEN WRITE(KULOUT,*) ' VERTICAL FUNCTION *B* MISMATCH ON ',&

    Then you are ready to compile:

    When starting the MUSC run, add the PATH to mpirun and the libraries:

    export PATH=$PATH:/usr/lib64/openmpi/bin
     export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/lib64/openmpi/lib
    -./musc_run.sh [...]

    MUSC FAQ

    1. If there is an error, what files do I look in? NODE.001_01 and lola in your output directory.

    2. How to I handle the output files? The output files are of the form Out.XXX.XXXX and appear in your output directory. There are in lfa format and can be handled using ddh tools. See the bash script musc_plot1Dts.sh for ideas. There are also ICM*lfa output files that are also handy for plotting profiles - use musc_convertICM2ascii.sh to convert these files to ASCII and musc_plot_profiles_ICMfiles.sh to plot some profiles e.g. TKE, cloud liquid etc.

    3. I ran a different idealised case but did not get different results? The likely reason for this is that you did not delete the namelists from your experiment directory. If the namelists are there, the musc_run.sh script neither creates them nor copies them from the repository.

    4. How do I create a new idealised case? This is not straightforward but the following was used to create the ASTEX cases in cy43 using info from cy38: https://www.overleaf.com/7513443985ckqvfdcphnng

    5. How can I access a list of MUSC output parameters? Ensure you have the ddhtoolbox compiled. Then use lfaminm $file on any of your output files and it will show what is there. To look at a particular variable try lfac $file $parameter e.g. lfac $file PTS (for surface temperature). You can use cat to copy the values to an ASCII file for ease of use (e.g. lfac $file PTS > $ASCIIfile).

    6. Is MUSC similar to the full 3D model version - is the physics the same? Yes, if you checkout develop then you have MUSC up-to-date with that.

    7. Do I need to recompile the model if I modify code? Yes, if you modify code in a single file you must recompile the code but do not delete the original compiled model first. This will recompile relatively quickly. If you modify code in multiple files and you change what variables are passed between files, then you must delete your original compiled model and recompile the code. This will take longer to recompile.

    MUSC variable names

    A list of variable names found in the MUSC lfa output files can be found here. Please note that this is not a complete list of MUSC output parameters (yet). The variables in regular ICMSH... fa output are documented here

    Outstanding Issues

    1. ARMCU and Jenny's cases run without surface physics, radiation etc and hence return NANs in apl_arome. To circumvent this on ECMWF, we needed to compile less strictly. This needs to be investigated further.
    2. The ASTEX cases currently do not run on ECMWF but work perfectly at Met Eireann - debugging needed.

    MUSC using EMS

    These instructions have moved to MUSC EMS

    +./musc_run.sh [...]

    MUSC FAQ

    1. If there is an error, what files do I look in? NODE.001_01 and lola in your output directory.

    2. How to I handle the output files? The output files are of the form Out.XXX.XXXX and appear in your output directory. There are in lfa format and can be handled using ddh tools. See the bash script musc_plot1Dts.sh for ideas. There are also ICM*lfa output files that are also handy for plotting profiles - use musc_convertICM2ascii.sh to convert these files to ASCII and musc_plot_profiles_ICMfiles.sh to plot some profiles e.g. TKE, cloud liquid etc.

    3. I ran a different idealised case but did not get different results? The likely reason for this is that you did not delete the namelists from your experiment directory. If the namelists are there, the musc_run.sh script neither creates them nor copies them from the repository.

    4. How do I create a new idealised case? This is not straightforward but the following was used to create the ASTEX cases in cy43 using info from cy38: https://www.overleaf.com/7513443985ckqvfdcphnng

    5. How can I access a list of MUSC output parameters? Ensure you have the ddhtoolbox compiled. Then use lfaminm $file on any of your output files and it will show what is there. To look at a particular variable try lfac $file $parameter e.g. lfac $file PTS (for surface temperature). You can use cat to copy the values to an ASCII file for ease of use (e.g. lfac $file PTS > $ASCIIfile).

    6. Is MUSC similar to the full 3D model version - is the physics the same? Yes, if you checkout develop then you have MUSC up-to-date with that.

    7. Do I need to recompile the model if I modify code? Yes, if you modify code in a single file you must recompile the code but do not delete the original compiled model first. This will recompile relatively quickly. If you modify code in multiple files and you change what variables are passed between files, then you must delete your original compiled model and recompile the code. This will take longer to recompile.

    MUSC variable names

    A list of variable names found in the MUSC lfa output files can be found here. Please note that this is not a complete list of MUSC output parameters (yet). The variables in regular ICMSH... fa output are documented here

    Outstanding Issues

    1. ARMCU and Jenny's cases run without surface physics, radiation etc and hence return NANs in apl_arome. To circumvent this on ECMWF, we needed to compile less strictly. This needs to be investigated further.
    2. The ASTEX cases currently do not run on ECMWF but work perfectly at Met Eireann - debugging needed.

    MUSC using EMS

    These instructions have moved to MUSC EMS

    diff --git a/dev/ForecastModel/SingleColumnModel/MUSC_EMS/index.html b/dev/ForecastModel/SingleColumnModel/MUSC_EMS/index.html index cdb87c3998..30a3cefce6 100644 --- a/dev/ForecastModel/SingleColumnModel/MUSC_EMS/index.html +++ b/dev/ForecastModel/SingleColumnModel/MUSC_EMS/index.html @@ -55,4 +55,4 @@ mkdir config cp $HOME/SCM-atlas_git/ewhelan/hirlam/examples/config/config_HARM.py config/ ### edit config/config_HARM.py -run_atlas1d.py -config config/config_HARM.py
    +run_atlas1d.py -config config/config_HARM.py
    diff --git a/dev/ForecastModel/SingleColumnModel/MUSC_vars/index.html b/dev/ForecastModel/SingleColumnModel/MUSC_vars/index.html index b08c9223a4..f3aad9202d 100644 --- a/dev/ForecastModel/SingleColumnModel/MUSC_vars/index.html +++ b/dev/ForecastModel/SingleColumnModel/MUSC_vars/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Variable names for MUSC output

    List of parameters copied from variable_list.csv

    short namevariable namelong nameunit
    lwdwPFRTHDSlong wave downward radiation at surfaceW/m2
    lwupPFRTHlong wave upward radiation at surfaceW/m2
    swdwPFRSODSshort wave downward radiation at surfaceW/m2
    swupPFRSOshort wave upward radiation at surfaceW/m2
    shfPFCSsensible heat fluxW/m2
    lhfPFCLNlatent heat fluxW/m2
    evapZLH_fluxevaporation+sublimation fluxmm/day
    evap2PFEVLevaporation+sublimation fluxmm/day
    ustarZUSTARfriction velocitym/s
    rainPREC_TOTprecipitation (liq+sol) ratemm/day
    psurfPAPRSsurface PressurePa
    hpblPCLPHboundary layer heightm
    hpbl2KCLPHboundary layer heightm
    tsurfPTSsurface temperatureK
    t2mPTCLS2 m temperatureK
    q2mPQCLS2 m specific humidityKg/Kg
    rh2mPRHCLS2 m relative humidity[0-100]
    u10mPUCLS1 0m u-componentm/s
    v10mPVCLS10m v-componentm/s
    t3mPT_03temperature at 3.30 meter above the surfaceK
    q3mPQ_03specific humidity at 3.30 meterKg/Kg
    rh3mPRH_03relative humidity at 3.30 meter[0-100]
    u3mPU_03u-component at 3.30 meterm/s
    v3mPV_03v-component at 3.30 meterm/s
    etc
    t42mPT_42temperature at 41.90 meter above the surfaceK
    q42mPQ_42specific humidity at 41.90 meterKg/Kg
    rh42mPRH_42relative humidity at 41.90 meter[0-100]
    u42mPU_42u-component at 41.90 meterm/s
    v42mPV_42v-component at 41.90 meterm/s
    ccPCLCTtotal cloud cover fraction0 1
    tsurfPTSSurface temperatureK
    albPALBHAlbedo[0-1]
    alb_surfTALB_ISBAsurface albedo-
    z0mPGZ0Momentum roughness lengthm
    z0hPGZ0HHeat roughness lengthm
    emisPEMISsurface emissivity[0-1]
    emisEMISsurface emissivity[0-1]
    zfPAPHIFAltitude of layer mid-points at t=0 (full-level)m
    pfPAPRSFPressure of layer mid-points at t=0 (full-level)Pa
    tPTtemperatureK
    thTHETApotential temperatureK
    qPQspecific humiditykg/kg
    uPUzonal wind componentm/s
    vPVmeridional wind componentm/s
    ugeoZFUGEOu-component geostrophic windm/s
    vgeoZFVGEOv-component geostrophic windm/s
    dudt_lsZFUu-component advectionm/s/s
    dvdt_lsZFVv-component advectionm/s/s
    dtdt_lsZFTtemperature advectionK/s
    dqdt_lsZFQmoisture advectionKg/Kg/s
    wZWvertical movementm/s
    zhhPAPHIheight of half levelm
    phhPAPRSpressure of half levelPa
    kmZKMEddy diffusivity momentumm2/s
    khZKHEddy diffusivity momentumm2/s
    mfZMF_shalmassfluxKg/m2/s
    dT_dt_radZDTRADtemperature tendency from radiationK/d
    TKEPECTturbulent kinetic energy$m^2/s^2$
    shearZPRDYshear production$m^2/s^3$
    buoyZPRTHbuoyancy production$m^2/s^3$
    transZDIFFtotal transport$m^2/s^3$
    dissiZDISSdissipation$m^2/s^3$
    +

    Variable names for MUSC output

    List of parameters copied from variable_list.csv

    short namevariable namelong nameunit
    lwdwPFRTHDSlong wave downward radiation at surfaceW/m2
    lwupPFRTHlong wave upward radiation at surfaceW/m2
    swdwPFRSODSshort wave downward radiation at surfaceW/m2
    swupPFRSOshort wave upward radiation at surfaceW/m2
    shfPFCSsensible heat fluxW/m2
    lhfPFCLNlatent heat fluxW/m2
    evapZLH_fluxevaporation+sublimation fluxmm/day
    evap2PFEVLevaporation+sublimation fluxmm/day
    ustarZUSTARfriction velocitym/s
    rainPREC_TOTprecipitation (liq+sol) ratemm/day
    psurfPAPRSsurface PressurePa
    hpblPCLPHboundary layer heightm
    hpbl2KCLPHboundary layer heightm
    tsurfPTSsurface temperatureK
    t2mPTCLS2 m temperatureK
    q2mPQCLS2 m specific humidityKg/Kg
    rh2mPRHCLS2 m relative humidity[0-100]
    u10mPUCLS1 0m u-componentm/s
    v10mPVCLS10m v-componentm/s
    t3mPT_03temperature at 3.30 meter above the surfaceK
    q3mPQ_03specific humidity at 3.30 meterKg/Kg
    rh3mPRH_03relative humidity at 3.30 meter[0-100]
    u3mPU_03u-component at 3.30 meterm/s
    v3mPV_03v-component at 3.30 meterm/s
    etc
    t42mPT_42temperature at 41.90 meter above the surfaceK
    q42mPQ_42specific humidity at 41.90 meterKg/Kg
    rh42mPRH_42relative humidity at 41.90 meter[0-100]
    u42mPU_42u-component at 41.90 meterm/s
    v42mPV_42v-component at 41.90 meterm/s
    ccPCLCTtotal cloud cover fraction0 1
    tsurfPTSSurface temperatureK
    albPALBHAlbedo[0-1]
    alb_surfTALB_ISBAsurface albedo-
    z0mPGZ0Momentum roughness lengthm
    z0hPGZ0HHeat roughness lengthm
    emisPEMISsurface emissivity[0-1]
    emisEMISsurface emissivity[0-1]
    zfPAPHIFAltitude of layer mid-points at t=0 (full-level)m
    pfPAPRSFPressure of layer mid-points at t=0 (full-level)Pa
    tPTtemperatureK
    thTHETApotential temperatureK
    qPQspecific humiditykg/kg
    uPUzonal wind componentm/s
    vPVmeridional wind componentm/s
    ugeoZFUGEOu-component geostrophic windm/s
    vgeoZFVGEOv-component geostrophic windm/s
    dudt_lsZFUu-component advectionm/s/s
    dvdt_lsZFVv-component advectionm/s/s
    dtdt_lsZFTtemperature advectionK/s
    dqdt_lsZFQmoisture advectionKg/Kg/s
    wZWvertical movementm/s
    zhhPAPHIheight of half levelm
    phhPAPRSpressure of half levelPa
    kmZKMEddy diffusivity momentumm2/s
    khZKHEddy diffusivity momentumm2/s
    mfZMF_shalmassfluxKg/m2/s
    dT_dt_radZDTRADtemperature tendency from radiationK/d
    TKEPECTturbulent kinetic energy$m^2/s^2$
    shearZPRDYshear production$m^2/s^3$
    buoyZPRTHbuoyancy production$m^2/s^3$
    transZDIFFtotal transport$m^2/s^3$
    dissiZDISSdissipation$m^2/s^3$
    diff --git a/dev/ForecastModel/WindFarms/index.html b/dev/ForecastModel/WindFarms/index.html index 025140d3d7..1653e51d7b 100644 --- a/dev/ForecastModel/WindFarms/index.html +++ b/dev/ForecastModel/WindFarms/index.html @@ -141,4 +141,4 @@ editionNumber = 2 ; interpretationOfNumberOfPoints = 0 ; subCentre = 255 ; - }

    For both GRIB 1 and GRIB 2:

    1. Wind power production, accumulated:

      • name: 'Wind power production, accumulated'
      • paramId: '253211'
      • shortName: 'wfpower_acc'
      • units: 'MJ'
    2. Wind power production, accumulated:

      • name: 'Wind power production, instantaneous'
      • paramId: '253211'
      • shortName: 'wfpower_ins'
      • units: 'MW'
    + }

    For both GRIB 1 and GRIB 2:

    1. Wind power production, accumulated:

      • name: 'Wind power production, accumulated'
      • paramId: '253211'
      • shortName: 'wfpower_acc'
      • units: 'MJ'
    2. Wind power production, accumulated:

      • name: 'Wind power production, instantaneous'
      • paramId: '253211'
      • shortName: 'wfpower_ins'
      • units: 'MW'
    diff --git a/dev/Observations/Aeolus/index.html b/dev/Observations/Aeolus/index.html index 892ea1dc6e..f89d08340a 100644 --- a/dev/Observations/Aeolus/index.html +++ b/dev/Observations/Aeolus/index.html @@ -6,4 +6,4 @@

    Aeolus, HLOS wind

    short overview

    Aeolus was an ESA Earth Explorer mission, carrying a Doppler wind lidar that measured the vertical profile of winds. Aeolus was launched in August 2018 and safely re-entered over Antarctica in July 2023. The period of usable data is from 31 August 2018 to 30 April 2023

    Aeolus winds come in two different versions, Mie and Rayleigh. The Mie winds are measured by observing the scattering by cloud droplets and aerosols and are only available in optically thin and medium-thin clouds. The horizontal resolution of Mie profiles is 10 km. Rayleigh winds are obtained by measuring the scattering by air molecules in clear air, and have a lower horizontal resolution of 80 km.

    Since Aeolus was a non-operational mission, the data need to be downloaded manually from, e.g. ESA's Earth Observation portal, https://aeolus-ds.eo.esa.int/oads/access/ (a registration is needed to download the data).

    The data from Aeolus is being processed by the Aeolus DISC team, and the processing has been continously improved throughout the mission lifetime. A final version, covering the full Aeolus data set, will be released in 2028 (using baseline 18, the operational baseline at the time of the satellite's reentry was baseline 13). More details can be found here.

    Harmonie changes

    To use Aeolus winds, activate them in scr/include.ass by setting LIDAR_OBS to 1

    export LIDAR_OBS=1             # LIDAR aeolus hlos wind
     [[  $LIDAR_OBS -eq 1  ]] && types_BASE="$types_BASE lidar"

    The optimal settings to use for the observation errors of Aeolus data is still an open question. They are reported in the .bufr file which contain the L2B winds, and the limit of when to allow them can be adjusted in src/odb/pandor/module/bator_decodbufr_mod.F90

    The main ones to be careful are the upper error limits. The recommended values at the time of writing are

      REAL, PARAMETER    :: error_est_threshold_Mie = 4.5  ! m/s
       REAL, PARAMETER    :: error_est_threshold_Ray = 8.  ! m/s
    -

    Future updates

    When the follow-on mission, Aeolus-2 (ESA's name) or EPS-Aeolus (EUMETSAT''s name) launches in 2032, these settings will probably have to be revised. The Aeolus follow-on mission will carry a revised version of the previous instrument, providing observations with higher resolution.

    +

    Future updates

    When the follow-on mission, Aeolus-2 (ESA's name) or EPS-Aeolus (EUMETSAT''s name) launches in 2032, these settings will probably have to be revised. The Aeolus follow-on mission will carry a revised version of the previous instrument, providing observations with higher resolution.

    diff --git a/dev/Observations/Amv/index.html b/dev/Observations/Amv/index.html index 2b83732121..eb14678940 100644 --- a/dev/Observations/Amv/index.html +++ b/dev/Observations/Amv/index.html @@ -36,4 +36,4 @@ values 24 008012 LAND/SEA QUALIFIER values 25 007024 SATELLITE ZENITH ANGLE values 211 033007 % CONFIDENCE -END geowind

    Please be reminded that the processing of data from MARS was not yet tested. From 43h2.1, we have the all necessary content of the param file for processing of both GEOW and POLW in const/bator_param/param_bator.cfg.geow.${GEOW_SOURCE/POLW_SOURCE}

    BATOR namelist

    Depending on the satellite and channel you may have to add entries to the NADIRS namelist in the Bator script like the following:

    TS_GEOWIND(isatid)%T_SELECT%LCANAL(ichanal)=.TRUE.,

    Source code

    The reading of BUFR AMVs is taken care of by src/odb/pandor/module/bator_decodbufr_mod.F90. This subroutine reads the following parameters defined in the param.cfg file:

    NameDescription
    Date and timederived from the tconfig(004001) - assumes month, day, hour and minute are in consecutive entries in the values array
    Locationlatitude and longitude are read from tconfig(005001) and tconfig(006001)
    Satellitethe satellite identifier is read from tconfig(001007)
    Origin. centerthe originating center (of the AMV) is read from tconfig(001031)
    Compu. methodthe wind computation method (type of channel + cloudy/clear if WV) is read from tconfig(002023)
    Derivation methodthe height assignment method is read from tconfig(002163) and the tracking method from tconfig (002164)
    Channel frequencythe centre frequency of the satellite channel is read from tconfig(002153)
    Height (pressure)the height of the AMV observation is read from tconfig(007004)
    Windthe wind speed and direction are read from tconfig(011002) and tconfig(011001)
    Temperaturethe coldest cluster temperature is read from tconfig(012071)
    FG QIThe QI (including FG consistency) for MSG AMVs is read from the first location where descriptor 033007 appears
    noFG-QIThe FG-independent QI for MSG AMVs is read from the first location where 033007 appears + offset(1)=24
    Sat zenith anglethe satellite zenith angle is read from tconfig(007024)
    Land/sea/coasta land/sea/coast qualifier is read from tconfig(008012)

    The geowind routine was adapted to handle MSG AMVs from MARS and its module /src/odb/pandor/module/bator_decodbufr_mod.F90 uploaded to the trunk (Mar 2017) .

    Blacklist

    The selection/blacklist of AMVs according to channel, underlying sea/land, QI, etc. is done in src/blacklist/mf_blacklist.b, section - SATOB CONSTANT DATA SELECTION -.

    +END geowind

    Please be reminded that the processing of data from MARS was not yet tested. From 43h2.1, we have the all necessary content of the param file for processing of both GEOW and POLW in const/bator_param/param_bator.cfg.geow.${GEOW_SOURCE/POLW_SOURCE}

    BATOR namelist

    Depending on the satellite and channel you may have to add entries to the NADIRS namelist in the Bator script like the following:

    TS_GEOWIND(isatid)%T_SELECT%LCANAL(ichanal)=.TRUE.,

    Source code

    The reading of BUFR AMVs is taken care of by src/odb/pandor/module/bator_decodbufr_mod.F90. This subroutine reads the following parameters defined in the param.cfg file:

    NameDescription
    Date and timederived from the tconfig(004001) - assumes month, day, hour and minute are in consecutive entries in the values array
    Locationlatitude and longitude are read from tconfig(005001) and tconfig(006001)
    Satellitethe satellite identifier is read from tconfig(001007)
    Origin. centerthe originating center (of the AMV) is read from tconfig(001031)
    Compu. methodthe wind computation method (type of channel + cloudy/clear if WV) is read from tconfig(002023)
    Derivation methodthe height assignment method is read from tconfig(002163) and the tracking method from tconfig (002164)
    Channel frequencythe centre frequency of the satellite channel is read from tconfig(002153)
    Height (pressure)the height of the AMV observation is read from tconfig(007004)
    Windthe wind speed and direction are read from tconfig(011002) and tconfig(011001)
    Temperaturethe coldest cluster temperature is read from tconfig(012071)
    FG QIThe QI (including FG consistency) for MSG AMVs is read from the first location where descriptor 033007 appears
    noFG-QIThe FG-independent QI for MSG AMVs is read from the first location where 033007 appears + offset(1)=24
    Sat zenith anglethe satellite zenith angle is read from tconfig(007024)
    Land/sea/coasta land/sea/coast qualifier is read from tconfig(008012)

    The geowind routine was adapted to handle MSG AMVs from MARS and its module /src/odb/pandor/module/bator_decodbufr_mod.F90 uploaded to the trunk (Mar 2017) .

    Blacklist

    The selection/blacklist of AMVs according to channel, underlying sea/land, QI, etc. is done in src/blacklist/mf_blacklist.b, section - SATOB CONSTANT DATA SELECTION -.

    diff --git a/dev/Observations/Ascat/index.html b/dev/Observations/Ascat/index.html index ca240ca1d1..dcd0eddf81 100644 --- a/dev/Observations/Ascat/index.html +++ b/dev/Observations/Ascat/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/dev/Observations/Atovs/index.html b/dev/Observations/Atovs/index.html index 8fee039904..d47e145b73 100644 --- a/dev/Observations/Atovs/index.html +++ b/dev/Observations/Atovs/index.html @@ -255,4 +255,4 @@ cp $HM_LIB/const/bias_corr/${DOMAIN}/VARBC.cycle.$HH ${DLOCVARBC}/VARBC.cycle || \ { echo "Could not find cold start VARBC data VARBC.cycle.$EMONTH.$HH" ; exit 1 ; } ls -lrt ${DLOCVARBC} - fi

    With a tiny difference that all the VarBC files are now stored under a ${DOMAIN} directory. This allows our system to be up-to-date and ready for all known model domains. Please send your VarBC files to the system administrators.

    For operational implementation

    The setup is much easier. Name the VARBC.cycle files the following way VARBC.cycle.${HH} and put them in $ARCHIVE_ROOT/VARBC_latest, which you need to create.

    To check that you have done things right:

    If you passed the test, then you are ready with ATOVS implementation. Congratulation!

    + fi

    With a tiny difference that all the VarBC files are now stored under a ${DOMAIN} directory. This allows our system to be up-to-date and ready for all known model domains. Please send your VarBC files to the system administrators.

    For operational implementation

    The setup is much easier. Name the VARBC.cycle files the following way VARBC.cycle.${HH} and put them in $ARCHIVE_ROOT/VARBC_latest, which you need to create.

    To check that you have done things right:

    If you passed the test, then you are ready with ATOVS implementation. Congratulation!

    diff --git a/dev/Observations/Bator/index.html b/dev/Observations/Bator/index.html index 2a433bf37d..0104666a01 100644 --- a/dev/Observations/Bator/index.html +++ b/dev/Observations/Bator/index.html @@ -88,4 +88,4 @@ #-- create IOASSIGN file for the given sub-base cd ${d_DB}/ECMA.${base} export ODB_IOASSIGN_MAXPROC=${NPOOLS} - $HM_LIB/scr/create_ioassign -l "ECMA" -n ${BATOR_NBPOOL}

    where $base is the ODB base ($base can be conv (for conventional data), amsu (ATOVS/AMSU-A,AMSU-B/MHS), sev (for Sevir), iasi, radarv (radar) for example). Important: If you would like to have more bases, do not forget to take that into consideration when generating the "batormap" file for BATOR to define which observations you would like to have in each base.

    Blacklisting

    To avoid model forecast degradation, two files can be used to blacklist or exclude data from the analysis. They are also used to blacklist observations that the model cannot deal with because they are not representative (orography, breeze effects...). The reason for the existence of this method of 'blacklisting', built-in Bator, alongside with hirlam_blacklist.b (built-in Screening) is to allow simple and quick changes (and especially without changing binary) in the operational suite.

    The selection of an observation to be 'blacklisted' can be done using multi-criteria (SID/STATID, obstype, codetype, varno, channel/level, production center, sub-center producer, network (s) concerned (s), cycle (prod / assim), ..).

    LISTE_LOC

    The LISTE_LOC file can be used to blacklist satellite data and also for other data by type and / or subtype for a given parameter (described by varno or not). The contents of the LISTE_LOC are as follows:

    ColumnDescriptionFormat
    1Type of action: N: blacklisted, E: excludea1
    2The observation type (obsytpe@hdr)i3
    3The observation code-type (codetype@hdr)i4
    4The satellite ID with leading zeros (satid@sat)a9
    5The centre that produced the satellite datai4
    6The parameter ID (varno@body) or the satellite sensor ID (sensor@hdr)i4
    7Optional keywords of ZONx4, TOVSn, PPPPn, PROFn

    TOVSn C1 C2 ... Cn

    PPPPn P1 P2 ... Pn

    PROFn P1a P2 ... Pn-1 I1 I2 ... In-1

    ZONx4 latmin latmax lonmin lonmax

    LISTE_NOIRE_DIAP

    The LISTE_NOIRE_DIAP (const/bator_liste) can be used to blacklist conventional observations by station identifier. The contents of the LISTE_NOIRE_DIAP are as follows:

    ColumnDescriptionFormat
    1Observation type (obstype@hdr)i2
    2Observation namea10
    3Observation codetype (codetype@hdr)i3
    4Parameter ID (varno@body)i3
    5Station ID (statid@hdr)a8
    6Start date of blacklisting yyyymmdda8
    7Optional layer blacklisting (PROFn)a180

    PROFn P1a P2 ... Pn-1 I1 I2 ... In

    Particularities - the blacklisting of certain parameters involves the automatic blacklisting of other parameter summarized in the table below:

    obstypespecified parameterblacklisted parameters
    SYNOP39 (t2)39 (t2), 58 (rh2), 7 (q)
    SYNOP58 (rh2)58 (rh2), 7 (q)
    TEMP1 (z)1 (z), 29 (rh), 2 (t), 59 (td), 7 (q)
    TEMP2 (t)2 (t), 29 (rh), 7 (q)
    TEMP29 (rh)29 (rh), 7 (q)
    + $HM_LIB/scr/create_ioassign -l "ECMA" -n ${BATOR_NBPOOL}

    where $base is the ODB base ($base can be conv (for conventional data), amsu (ATOVS/AMSU-A,AMSU-B/MHS), sev (for Sevir), iasi, radarv (radar) for example). Important: If you would like to have more bases, do not forget to take that into consideration when generating the "batormap" file for BATOR to define which observations you would like to have in each base.

    Blacklisting

    To avoid model forecast degradation, two files can be used to blacklist or exclude data from the analysis. They are also used to blacklist observations that the model cannot deal with because they are not representative (orography, breeze effects...). The reason for the existence of this method of 'blacklisting', built-in Bator, alongside with hirlam_blacklist.b (built-in Screening) is to allow simple and quick changes (and especially without changing binary) in the operational suite.

    The selection of an observation to be 'blacklisted' can be done using multi-criteria (SID/STATID, obstype, codetype, varno, channel/level, production center, sub-center producer, network (s) concerned (s), cycle (prod / assim), ..).

    LISTE_LOC

    The LISTE_LOC file can be used to blacklist satellite data and also for other data by type and / or subtype for a given parameter (described by varno or not). The contents of the LISTE_LOC are as follows:

    ColumnDescriptionFormat
    1Type of action: N: blacklisted, E: excludea1
    2The observation type (obsytpe@hdr)i3
    3The observation code-type (codetype@hdr)i4
    4The satellite ID with leading zeros (satid@sat)a9
    5The centre that produced the satellite datai4
    6The parameter ID (varno@body) or the satellite sensor ID (sensor@hdr)i4
    7Optional keywords of ZONx4, TOVSn, PPPPn, PROFn

    TOVSn C1 C2 ... Cn

    PPPPn P1 P2 ... Pn

    PROFn P1a P2 ... Pn-1 I1 I2 ... In-1

    ZONx4 latmin latmax lonmin lonmax

    LISTE_NOIRE_DIAP

    The LISTE_NOIRE_DIAP (const/bator_liste) can be used to blacklist conventional observations by station identifier. The contents of the LISTE_NOIRE_DIAP are as follows:

    ColumnDescriptionFormat
    1Observation type (obstype@hdr)i2
    2Observation namea10
    3Observation codetype (codetype@hdr)i3
    4Parameter ID (varno@body)i3
    5Station ID (statid@hdr)a8
    6Start date of blacklisting yyyymmdda8
    7Optional layer blacklisting (PROFn)a180

    PROFn P1a P2 ... Pn-1 I1 I2 ... In

    Particularities - the blacklisting of certain parameters involves the automatic blacklisting of other parameter summarized in the table below:

    obstypespecified parameterblacklisted parameters
    SYNOP39 (t2)39 (t2), 58 (rh2), 7 (q)
    SYNOP58 (rh2)58 (rh2), 7 (q)
    TEMP1 (z)1 (z), 29 (rh), 2 (t), 59 (td), 7 (q)
    TEMP2 (t)2 (t), 29 (rh), 7 (q)
    TEMP29 (rh)29 (rh), 7 (q)
    diff --git a/dev/Observations/Cope/index.html b/dev/Observations/Cope/index.html index 10ef26d622..a58cc049f7 100644 --- a/dev/Observations/Cope/index.html +++ b/dev/Observations/Cope/index.html @@ -86,4 +86,4 @@ make install

    COPE in HARMONIE system

    The use of COPE in HARMONIE relies on ODB-API, b2o and COPE itself.

    export COPE_DEFINITIONS_PATH=${COPE_DIR}/share/cope
     export ODB_SCHEMA_FILE=${B2O_DIR}/share/b2o/ECMA.sch
     export ODB_CODE_MAPPINGS=${B2O_DIR}/share/b2o/odb_code_mappings.dat
    -export ODBCODEMAPPINGS=${B2O_DIR}/share/b2o/odb_code_mappings.dat
    +export ODBCODEMAPPINGS=${B2O_DIR}/share/b2o/odb_code_mappings.dat diff --git a/dev/Observations/GNSS/index.html b/dev/Observations/GNSS/index.html index 7d1d563a82..c1574c7869 100644 --- a/dev/Observations/GNSS/index.html +++ b/dev/Observations/GNSS/index.html @@ -6,4 +6,4 @@

    GNSS ZTD observations

    Introduction

    The NRT GNSS delay data contain information about the amount of water vapour above the GNSS sites. E-GVAP European program’s aim is to provide its EUMETNET members with European GNSS delay and water vapour estimates for operational meteorology in near real-time. Currently, the E-GVAP network consists of more than 1500 GNSS sites.

    • E-GVAP Programme here

    GNSS ZTD data

    Raw data from GNSS sites are collected by a number of GNSS analysis centers, which process the data to estimate the Zenith Total Delays (ZTD) and other parameters. The ZTDs are then forwarded to a data server, for distribution to meteorological institutes. The observations are currently distributed from Met Office, in two different formats: BUFR that are distributed via GTS to the meteorological centers or in ASCII format, that may be download via ftp.

    Preprocessing the GNSS ZTD data

    The preprocessing of these data should be local, depending if you want to have them in BUFR or ASCII format. ASCII option needs a local script to get the files from Metoffice server and transform them from COST format (EGVAP) into OBSOUL format. (In this case there is an optional script inside scr directory in Harmonie called GNSStoOBSOUL that could transforms ascii into OBSOUL format).

    Apart of the preprocessing, a White List of sites to be assimilated in your domain is needed. It will contain the values of:

       statid lat lon alt dts bias sd obserr

    where statid is the name of the site (NNNNPPPP: NNNN=site PPPP=Procesing centre) , dts is the frequency in minutes between obs, and sd the standard deviation of that station and obserr the observation error. You are supposed to have calculated these values before launching the experiment.

    Harmonie changes to assimilate GNSS ZTD data

    scr/

    • Bator and Fetch_assim_data have the white list path.
    • Oulan : has the white list and gnss observation files paths and cat this one to the rest of conventional observation file.
    • include.ass: This script has two options about gnss bias correction: static bias correction (LSTATIC_BIAS) or variational bias correction (LVARBC_GNSS). For the first case, a fix bias value from each site is read from the White List and then substracted from the corresponding observation value. For the second case, VarBC, it is also needed to set in this script the cold start option.
      export GNSS_OBS=1            #GNSS
       export LSTATIC_BIAS=F        #Swich for bias correction or not,(T|F)
       export LVARBC_GNSS=T         #Swich for GNSS varbc
      -export VARBC_COLD_START=yes  #yes/no

    nam/ Here it should be the White list, called list.gpssol.201512 for example /src/arpifs/obs_preproc/

    • redgps.F90 : This routine is where the horizontal thinning is done (Cy40) , so the thinning distance could be selected here.

    /src/blacklist/

    • mf_blacklist.b: here is posible to blacklist the gnss observations so to calculate the varbc coefficients. It can be done tuning to experimental the apdss variable.
    +export VARBC_COLD_START=yes #yes/no

    nam/ Here it should be the White list, called list.gpssol.201512 for example /src/arpifs/obs_preproc/

    /src/blacklist/

    diff --git a/dev/Observations/Iasi/index.html b/dev/Observations/Iasi/index.html index 86949e608d..22e60e28dc 100644 --- a/dev/Observations/Iasi/index.html +++ b/dev/Observations/Iasi/index.html @@ -70,4 +70,4 @@ /

    Here we specify a list of 145 channels to be included in "band 1" of the cloud detection, i.e., in the main cloud detection channel band. The setup of the cloud detection involves not just the channel list but several additional tuning parameters that can be modified to make the screening more or less conservative. The default settings are specified in src/arpifs/obs_preproc/cloud_detect_setup.F90. A comprehensive description of the cloud detection scheme, including explanations of the various tuning parameter values, is given at the NWPSAF web site https://nwp-saf.eumetsat.int/site/software/aerosol-and-cloud-detection/documentation/.

    Log file of the Screening task will indicate whether the formatting of the namelist file is appropriate:

     READING CLOUD DETECTION FILE FOR IASI
      IASI  CLOUD DETECTION FILE READ OK

    In case of an error, the following is printed instead:

     READING CLOUD DETECTION FILE FOR IASI
      PROBLEM READING IASI CLOUD DETECTION FILE: Using Default Values

    The third possibility is that the namelist file does not appear in the working directory, in which case the printout statement is this:

     READING CLOUD DETECTION FILE FOR IASI
    - NO IASI  CLOUD DETECTION FILE : Using Default Values

    Please note that the use of the "Default Values" is generally not a desired outcome. This is because many of the cloud detection channels in the default list (see src/arpifs/obs_preproc/cloud_detect_setup.F90) are sensitive to higher stratosphere and therefore may be severely affected by the relatively low model top of limited-area HARMONIE systems.

    References:

    McNally, AP, and PD Watts, 2003: A cloud detection algorithm for high-spectral-resolution infrared sounders. Quarterly Journal of the Royal Meteorological Society, 129, 3411-3423, doi:10.1256/qj.02.208.

    + NO IASI CLOUD DETECTION FILE : Using Default Values

    Please note that the use of the "Default Values" is generally not a desired outcome. This is because many of the cloud detection channels in the default list (see src/arpifs/obs_preproc/cloud_detect_setup.F90) are sensitive to higher stratosphere and therefore may be severely affected by the relatively low model top of limited-area HARMONIE systems.

    References:

    McNally, AP, and PD Watts, 2003: A cloud detection algorithm for high-spectral-resolution infrared sounders. Quarterly Journal of the Royal Meteorological Society, 129, 3411-3423, doi:10.1256/qj.02.208.

    diff --git a/dev/Observations/Modes/index.html b/dev/Observations/Modes/index.html index 0269fbdb28..80ca8ac7ea 100644 --- a/dev/Observations/Modes/index.html +++ b/dev/Observations/Modes/index.html @@ -20,4 +20,4 @@ END

    Processing using Oulan

    The processing of Mode-S EHS BUFR using Oulan is controlled by the following namelist entry in scr/Oulan:

    LMODES=.FALSE.

    Thinning of Mode-S

    Thinning of a bufr file

    A collection of python scripts which directly thin the Mode-S csv and bufr file is uploaded here https://gitlab.com/haandes/emaddc-public.

    E.g. the emaddcc-thinning4.py script works with the large Mode-S_EMADDC_KNMI_oper_${DTG}.bufr file and thins the data in 4 dimensions, horizontal, vertical and in observation time closest to analysis time. Emaddcc-thinning4.py currently assumes valid temperature and wind observations at the same time and fix vertical thinning intervals of:

    [300, 300, 600, 1000] m 

    which corresponds to the heights of the lowest, second lowest, third lowest and all above boxes. The horizontal box width is variable and in the following example 40 km.

    The .py script is triggered in scr/Prepare_ob, with:

      nMsgs=`bufr_count $OBDIR/Mode-S_EMADDC_KNMI_oper_${DTG}.bufr`
       time python3 $HM_LIB/scr/emaddcc_thinning4.py --infile $OBDIR/Mode-S_EMADDC_KNMI_oper_${DTG}.bufr --box_width 40 --DTG $DTG --nMsgs $nMsgs --outfile emaddcc_thinned.bufr
    -  cat emaddcc_thinned.bufr  /dev/null >> $BUFRFILE

    It takes about 1:35 min on Atos-Bologna and results in reduction of Mode-S data by a factor of 4-5.

    + cat emaddcc_thinned.bufr /dev/null >> $BUFRFILE

    It takes about 1:35 min on Atos-Bologna and results in reduction of Mode-S data by a factor of 4-5.

    diff --git a/dev/Observations/ObservationData/index.html b/dev/Observations/ObservationData/index.html index 6ed4ee3ceb..35b426c845 100644 --- a/dev/Observations/ObservationData/index.html +++ b/dev/Observations/ObservationData/index.html @@ -7,4 +7,4 @@ EASTEC=$( tail -1 foo | head -1 | sed 's/ //g' ) NORTHEC=$( tail -2 foo | head -1 | sed 's/ //g' ) WESTEC=$( tail -3 foo | head -1 | sed 's/ //g' ) - SOUTHEC=$( tail -4 foo | head -1 | sed 's/ //g' )

    LOCAL

    Otherwise, this step consists of fetching (or waiting for) the observations stored in $OBDIR defined in ecf/config_exp.h . In that case one can use the command "cat" to merge different observations into one BUFR file, ob${DTG}. In general, HIRLAM services are adopting SAPP, ECMWF's scalable acquisition and pre-processing system, to process (conventional) GTS reports and other observational data for use in operational NWP. SAPP produces BUFR encoded in the same way as observational BUFR data available in the MARS archive.

    + SOUTHEC=$( tail -4 foo | head -1 | sed 's/ //g' )

    LOCAL

    Otherwise, this step consists of fetching (or waiting for) the observations stored in $OBDIR defined in ecf/config_exp.h . In that case one can use the command "cat" to merge different observations into one BUFR file, ob${DTG}. In general, HIRLAM services are adopting SAPP, ECMWF's scalable acquisition and pre-processing system, to process (conventional) GTS reports and other observational data for use in operational NWP. SAPP produces BUFR encoded in the same way as observational BUFR data available in the MARS archive.

    diff --git a/dev/Observations/ObservationPreprocessing/index.html b/dev/Observations/ObservationPreprocessing/index.html index a7f9dc1ed6..a9ddcc2821 100644 --- a/dev/Observations/ObservationPreprocessing/index.html +++ b/dev/Observations/ObservationPreprocessing/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    HARMONIE Observation Preprocessing

    Introduction

    The following figure shows different schematic steps in the HARMONIE data assimilation system. It is worth mentioning some differences between the observation pre-processing systems used by ECMWF, Météo France, and HIRLAM. Some of these differences are listed below:

    AROME/HARMONIE-AROMEIFS
    data format/contentBUFR, but sometimes with own tableBUFR with WMO code
    creation of ODB databaseBator converts BUFR to ODBb2o/bufr2odb converts BUFR to ODB
    blacklisting techniqueBator (LISTE_LOC, LISTE_NOIRE_DIAP), Screening (hirlam_blacklist.B) & Minim (NOTVAR namelist)Screening only

    Observation file preparation

    Preprocessing Software

    • Bator: Bator - reads BUFR/HDF5/OBSOUL observation data and writes ODBs used by data assimilation

    Other possibilities include:

    • Oulan: Oulan - Converts conventional BUFR data to OBSOUL file that is read by BATOR
    • Cope: Cope - preparation of ODBs used by data assimilation (in development)
    +

    HARMONIE Observation Preprocessing

    Introduction

    The following figure shows different schematic steps in the HARMONIE data assimilation system. It is worth mentioning some differences between the observation pre-processing systems used by ECMWF, Météo France, and HIRLAM. Some of these differences are listed below:

    AROME/HARMONIE-AROMEIFS
    data format/contentBUFR, but sometimes with own tableBUFR with WMO code
    creation of ODB databaseBator converts BUFR to ODBb2o/bufr2odb converts BUFR to ODB
    blacklisting techniqueBator (LISTE_LOC, LISTE_NOIRE_DIAP), Screening (hirlam_blacklist.B) & Minim (NOTVAR namelist)Screening only

    Observation file preparation

    Preprocessing Software

    • Bator: Bator - reads BUFR/HDF5/OBSOUL observation data and writes ODBs used by data assimilation

    Other possibilities include:

    • Oulan: Oulan - Converts conventional BUFR data to OBSOUL file that is read by BATOR
    • Cope: Cope - preparation of ODBs used by data assimilation (in development)
    diff --git a/dev/Observations/Oulan/index.html b/dev/Observations/Oulan/index.html index 05ab670e10..d6b6bc52ad 100644 --- a/dev/Observations/Oulan/index.html +++ b/dev/Observations/Oulan/index.html @@ -29,4 +29,4 @@ -e "s/SLNEWSHIPBUFR/$SLNEWSHIPBUFR/" \ -e "s/SLNEWBUOYBUFR/$SLNEWBUOYBUFR/" \ -e "s/SLNEWTEMPBUFR/$SLNEWTEMPBUFR/" \ - ${NAMELIST} >NAMELIST
  • run oulan

    $BINDIR/oulan
  • process GNSS data. If $GNSS_OBS is set to 1 then GNSS observations are added to the OBSOUL file and whitelisting is carried out using PREGPSSOL

  • New BUFR templates

    Valid for HARMONIE 40h1 and later

    The use of new format (GTS WMO) BUFR is controlled in scr/include.ass by LNEWSYNOPBUFR, LNEWSHIPBUFR, LNEWBUOYBUFR, LNEWTEMPBUFR (set to 0 or 1). These environment variables control namelist settings in the Oulan script. GTS and ECMWF BUFR were used to guide the code changes so Oulan assumes either "flavour" of BUFR. Local changes may be required if your locally produced BUFR, in particular section 1 data sub-type settings, do not follow WMO and/or ECMWF practices.

    The ECMWF wiki contains updates regarding the quality of the new BUFR HR observations. See the following ECMWF wiki pages for furher information.

    + ${NAMELIST} >NAMELIST
  • run oulan

    $BINDIR/oulan
  • process GNSS data. If $GNSS_OBS is set to 1 then GNSS observations are added to the OBSOUL file and whitelisting is carried out using PREGPSSOL

  • New BUFR templates

    Valid for HARMONIE 40h1 and later

    The use of new format (GTS WMO) BUFR is controlled in scr/include.ass by LNEWSYNOPBUFR, LNEWSHIPBUFR, LNEWBUOYBUFR, LNEWTEMPBUFR (set to 0 or 1). These environment variables control namelist settings in the Oulan script. GTS and ECMWF BUFR were used to guide the code changes so Oulan assumes either "flavour" of BUFR. Local changes may be required if your locally produced BUFR, in particular section 1 data sub-type settings, do not follow WMO and/or ECMWF practices.

    The ECMWF wiki contains updates regarding the quality of the new BUFR HR observations. See the following ECMWF wiki pages for furher information.

    diff --git a/dev/Observations/RadarData/index.html b/dev/Observations/RadarData/index.html index 89fd68da23..6d9f2e7a78 100644 --- a/dev/Observations/RadarData/index.html +++ b/dev/Observations/RadarData/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Assimilation of Radar Data

    This documentation outlines how to retrieve, process and assimilate HDF5 radar data

    HARMONIE compilation

    HIRLAM have made code changes to BATOR to allow the direct reading of HDF5 radar data and conversion to ODB suitable for use in the HARMONIE data assimilation system. If you wish to use these changes you must compile HARMONIE with support for HDF5. This requires the addition of -DUSE_HDF5 to the FDEFS in your makeup config file as well has adding hdf5 to EXTMODS. util/makeup/config.ECMWF.atos.gnu is an example of a makeup config file

    Format

    The BATOR code assumes the HDF5 radar data being read uses the OPERA Data Information Model (ODIM). See https://www.eumetnet.eu/wp-content/uploads/2021/07/ODIMH5v2.4.pdf for further information.

    Data retrieval

    Quality-controlled radar data can be retrieved from local archives, the OPERA Nimbus server (contact: Lukas Tuechler (Geosphere)), or the ODE (OPERA Development Environment) server (contact: Günther Haase (SMHI)).

    Data processing

    The HARMONIE script system requires that the OPERA HDF5 data files be stored in RADARDIR (defined in ecf/config_exp.h ) and have a file name using the format: ${HDFID}_qcvol_${DATE}T${HH}00.h5 where:

    • HDFID is a 5 digit OPERA radar identifier
    • DATE is the date
    • HH is the hour

    Common pitfalls

    • Forgetting to add -DUSE_HDF5 correctly to your config file
    • Incorrect RADARDIR
    • Incorrect file names
    • Incorrect format entered in refdata - BATOR is quite strict about how it reads the information in refdata:
    02918zh  HDF5     radarv           20100808 03 

    Further reading

    Martin Ridal's radar data assimilation presentation

    +

    Assimilation of Radar Data

    This documentation outlines how to retrieve, process and assimilate HDF5 radar data

    HARMONIE compilation

    HIRLAM have made code changes to BATOR to allow the direct reading of HDF5 radar data and conversion to ODB suitable for use in the HARMONIE data assimilation system. If you wish to use these changes you must compile HARMONIE with support for HDF5. This requires the addition of -DUSE_HDF5 to the FDEFS in your makeup config file as well has adding hdf5 to EXTMODS. util/makeup/config.ECMWF.atos.gnu is an example of a makeup config file

    Format

    The BATOR code assumes the HDF5 radar data being read uses the OPERA Data Information Model (ODIM). See https://www.eumetnet.eu/wp-content/uploads/2021/07/ODIMH5v2.4.pdf for further information.

    Data retrieval

    Quality-controlled radar data can be retrieved from local archives, the OPERA Nimbus server (contact: Lukas Tuechler (Geosphere)), or the ODE (OPERA Development Environment) server (contact: Günther Haase (SMHI)).

    Data processing

    The HARMONIE script system requires that the OPERA HDF5 data files be stored in RADARDIR (defined in ecf/config_exp.h ) and have a file name using the format: ${HDFID}_qcvol_${DATE}T${HH}00.h5 where:

    • HDFID is a 5 digit OPERA radar identifier
    • DATE is the date
    • HH is the hour

    Common pitfalls

    • Forgetting to add -DUSE_HDF5 correctly to your config file
    • Incorrect RADARDIR
    • Incorrect file names
    • Incorrect format entered in refdata - BATOR is quite strict about how it reads the information in refdata:
    02918zh  HDF5     radarv           20100808 03 

    Further reading

    Martin Ridal's radar data assimilation presentation

    diff --git a/dev/Observations/SYNOP/index.html b/dev/Observations/SYNOP/index.html index bce60d7633..d1bfe5262d 100644 --- a/dev/Observations/SYNOP/index.html +++ b/dev/Observations/SYNOP/index.html @@ -43,4 +43,4 @@ 'nbg_sfcobs_ndays_apd' => '15,', 'nbg_sfcobs_min_ps'=> '15,', 'nbg_sfcobs_ndays_ps' => '15,', -},

    In addition you need to make sure that surface pressure variable is 'ps' and not 'z' for ship surface pressure subtypes, as explained above. Variational bias correction is only prepared for 'ps' not 'z'.

    +},

    In addition you need to make sure that surface pressure variable is 'ps' and not 'z' for ship surface pressure subtypes, as explained above. Variational bias correction is only prepared for 'ps' not 'z'.

    diff --git a/dev/Observations/Scatt/index.html b/dev/Observations/Scatt/index.html index 31ab750c76..ac53b5a8ae 100644 --- a/dev/Observations/Scatt/index.html +++ b/dev/Observations/Scatt/index.html @@ -4,4 +4,4 @@ gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash});

    Scatterometers

    Background

    The EUMETSAT OSI SAF produces different scatterometer wind products at KNMI and more will become available in 2019:

    • C-band ASCAT-A/B/C overpassing at 9:30/21:30 Local Solar Time (LST), since 2007/2011/2019;
    • Ku-band !ScatSat overpassing at 8:45/20:45 LST, since 2017;
    • Ku-band HY2A/B overpassing at 6:00/18:00 LST, since 2013 (n.a. in NRT)/2019;
    • Ku-band CFOSAT overpassing at 7:00/19:00 LST, expected 2019;
    • Ku-band OSCAT3 overpassing at 12:00/24:00, expected 2019;
    • C/Ku-band !WindRad overpassing at 6:00/18:00, expected 2020.

    Note that the products have different ambiguity and noise properties, that are handled in the generic KNMI processing. We distinguish two types of scatterometers with (1) static beams (ASCAT) and with (2) rotating beams (the rest).

    In the ECMWF model (on ~200 km scales) the availability of three hourly observations is motivated from the experience of assimilating ASCAT and OSCAT (2.5 hours overpass time difference), which showed double the impact of assimilating ASCAT only. So, they appear as independent data sources for the model.

    Since ASCAT overpasses only twice per day we cannot fulfil the temporal requirement and can therefore not expect to analyze open ocean surface winds deterministically at 25 km scales with ASCAT only. Based on this analysis we should therefore focus on larger than 25 km scales (as ECMWF does), also for Harmonie, so typically focus on 100 km scales. This means that scales between ~25-100 km in Harmonie over open sea is mostly noise, which can be removed through supermodding (ref: Mate Mile's project). Note that more scatterometers will be available next year at more times a day (see above).

    ECMWF is testing ASCAT with different aggregation, thinning and weights in order to optimize scatterometer data assimilation, which results may be useful for HARMONIE data assimilation strategy as well.

    ASCAT

    1. ASCAT-12.5km (or ASCAT-coastal) data are available on a 12.5 km grid.
    2. The resolution of ASCAT-12.5km is about 25 km (through the application of a Hanning with tails extending beyond 12.5 km)
    3. As a result, the errors of neighbouring observations are correlated. For the 6.25 km product:
      • along-track wind component l : neighbor 0.60; next-neighbor 0.19; next-next neighbor 0.02; total noise variance 0.385
      • cross-track wind component t : neighbor 0.51; next-neighbor 0.11; next-next neighbor 0.00; total noise variance 0.214
      This agrees well with the footprint overlap (see point 2). We expect similar values for ASCAT-12.5km, but this could be easily assessed more dedicated.
    4. Triple collocation tests show obervation error standard deviation for ASCAT-12.5km (or ASCAT-coastal) of ~ 0.7 m/s for u and v.
    5. The effective model resolution of Harmonie (with 2.5 km grid) is about 20-25 km.

    Based on this one may conclude that the resolution of ASCAT-12.5km and Harmonie is about the same, so the representativeness error is negligible, and the total error equal to the observation error, i.e., 0.7 m/s and use this value for giving weight to ASCAT in Harmonie.

    However, we think this will not give the best impact. This is because if you want to analyse model states on 25 km scales (Harmonie effective resolution) deterministically, you need a forcing term which accounts for this resolution. Forcing can be either from orography (over land only) or observations. So, over sea we have to rely on the density of the observation network. To analyse scales up to 25 km deterministically over sea requires high density observations both in space and time, i.e., for the latter at least every hour. This is corroborated by studies with ASCAT A and B, separated in time by 50 minutes, showing high correlation of ASCAT divergence and convergence with moist convection rain, but negligible correlation between convergence or divergence of the two passes.

    Since ASCAT overpasses only twice per day we can not fulfil the temporal requirement and can therefore not expect to analyse ocean surface winds deterministically at 25 km scales with ASCAT only. Based on this analysis we should therefore focus on larger than 25 km scales (as ECMWF does), also for Harmonie, so typically focus on 100 km scales. This means that scales between ~25-100 km in Harmonie over sea is mostly noise, which can be removed through supermodding, i.e., the project where Mate Mile is working on.

    KNMI are waiting for a data feed from EUMETSAT. Level 1 ASCAT data available 14 March 2019 here

    Other scatterometers

    1. 25km data are generally available on a the satellite swath grid of WVCs
    2. The resolution of this 25 km data is around 100 km (through the application of a spatial filter that successfully suppresses both wind direction ambiguities and noise)
    3. As a result, the errors of neighboring observations are correlated over a distance of 100 km or more
    4. Triple collocation tests show observation error standard deviation ~ 0.7 m/s for u and v
    5. Biases exist at warm and cold SST of up to 0.5 m/s, which are being corrected; also winds around nadir and, to a lesser extent, in the outer swath are sometimes biased; the IFS takes account of this, but may need retuning for CFOSAT

    Further reading

    More information is available on the OSI SAF wind site in the form of training material, product manuals, scientific publications, verification reports and monitoring information. Support and services messages for all products can be obtained through scat at knmi.nl .

    The EUMETSAT NWP SAF provides the following reports:

    Model

    Enable assimilation

    • Set SCATT_OBS=1 in scr/include.ass
    • Ensure ascat${DTG} files are available in $OBDIR (defined in ecf/config_exp.h )

    Technical information

    • Referred to as NSCAT3 in arpifs (see src/arpifs/module/yomcoctp.F90)
    • From https://apps.ecmwf.int/odbgov
      • obstype=9
      • codetype=139
      • sensor=190
      • varno=125/124 for ambiguos u/v wind component

    Issues (CY40/CY43)

    Thinning: NASCAWVC

    • Number of ASCAT wave vector cells
    • Defined in src/arpifs/module/yomthlim.F90
    • Default, set in src/arpifs/obs_preproc/sufglim.F90, is 42 (for 25-km product)
    • Set to 82 for 12.5-km scatterometer product in nam/harmonie_namelists.pm (possibly also in sufglim.F90. To be checked)

    Observation error

    • Set by Bator (src/odb/pandor/module/bator_init_mod.F90) u_err=1.39, v_err=1.54
    • Suggested values from KNMI: u_err=1.4, v_err=1.4
    • ZWE=2.0 set in src/arpifs/obs_preproc/nscatin.F90 but not used (I think)
    • ObsErr in Jo-table is RMS of all ASCAT obs_error values (SQRT(0.5*(u_err^2 + v_err^2)
    • sigma_o can be set by Bator in NADIRS using NADIRS:
      ECTERO(9,139,125,1) = 1.39_JPRB
      -ECTERO(9,139,124,1) = 1.54_JPRB
    +ECTERO(9,139,124,1) = 1.54_JPRB diff --git a/dev/Observations/Seviri/index.html b/dev/Observations/Seviri/index.html index c0f7ed90f0..31164b75a4 100644 --- a/dev/Observations/Seviri/index.html +++ b/dev/Observations/Seviri/index.html @@ -37,4 +37,4 @@ NSEVIRI(57)%NbChannels= 8, NSEVIRI(57)%Channels(1:8)= 1,2,3,4,5,6,7,8, NSEVIRI(57)%NamChannels(1:8)='IR_039','WV_062','WV_073','IR_087','IR_097','IR_108','IR_120','IR_134', -/

    Model settings (Screening and Minimisation)

    References

    Technical stuff:

    Further reading and links to reports/presentations:

    +/

    Model settings (Screening and Minimisation)

    References

    Technical stuff:

    Further reading and links to reports/presentations:

    diff --git a/dev/Overview/Binaries/index.html b/dev/Overview/Binaries/index.html index 01ecae9666..ef497e0333 100644 --- a/dev/Overview/Binaries/index.html +++ b/dev/Overview/Binaries/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    HARMONIE binaries

    An installation of HARMONIE produces the following binaries:

    • ACADFA1D : Tool to generate initial and boundary data for MUSC
    • ADDPERT : Create initial perturbations
    • ADDSURF : Allows you to mix different files and add different fields
    • ALTO : Also known as PINUTS. Contains several diagnostic tools.
    • BATOR : Generate ODB from observations in various formats
    • bl95.x : Blacklist compiler, help program to generate object files from the blacklist
    • BLEND : Mixes to files
    • BLENDSUR : Mixes to files
    • cluster : Cluster ensemble members
    • CONVERT_ECOCLIMAP_PARAM : Generate binary files from ECOCLIMAP ascii files
    • dcagen : ODB handling tool
    • domain_prop : Helper program to return various model domain properties
    • FESTAT : Background error covariance calculations.
    • fldextr : Extracts data for verification from model history files. Reads FA from HARMONIE and GRIB from ECMWF/HIRLAM.
    • gl : Converts/interpolates between different file formats and projections. Used for boundary interpolation.
    • IOASSIGN/ioassign : ODB IO setup
    • LSMIX : Scale dependent mixing of two model states.
    • jbconv : Interpolates/extrapolates background error statistics files. For technical experimentation
    • lfitools : FA/LFI file manipulation tool
    • MASTERODB : The main binary for the forecast model, surface assimilation, climate generation, 3DVAR, fullpos and much more.
    • MTEN : Computation of moist tendencies
    • obsextr : Extract data for verification from BUFR files.
    • obsmon : Extract data for observation monitoring
    • odb98.x : ODB manipulation program
    • OFFLINE : The SURFEX offline model. Also called SURFEX
    • oulan : Converts observations in BUFR to OBSOUL format used by BATOR
    • PERTCMA : Perturbation of observations in ODB
    • PERTSFC : Surface perturbation scheme
    • PGD : Generates physiography files for SURFEX.
    • PREGPSSOL : Processing of GNSS data
    • PREP : Generate SURFEX initial files. Interpolates/translates between two SURFEX domains.
    • SFXTOOLS : Converts SURFEX output between FA and LFI format.
    • shuffle : Manipulation of ODB. Also called ODBTOOLS
    • ShuffleBufr : Split bufr data according to observation type, used in the observation preprocessing.
    • SODA : Surfex offline data assimilation
    • SPG : Stochastic pattern generator, https://github.com/gayfulin/SPG
    • SURFEX : The SURFEX offline model. Also called OFFLINE
    • tot_energy : Calculates the total energy of a model state. Is used for boundary perturbation scaling.
    • xtool : Compares two FA/LFI/GRIB files.
    +

    HARMONIE binaries

    An installation of HARMONIE produces the following binaries:

    • ACADFA1D : Tool to generate initial and boundary data for MUSC
    • ADDPERT : Create initial perturbations
    • ADDSURF : Allows you to mix different files and add different fields
    • ALTO : Also known as PINUTS. Contains several diagnostic tools.
    • BATOR : Generate ODB from observations in various formats
    • bl95.x : Blacklist compiler, help program to generate object files from the blacklist
    • BLEND : Mixes to files
    • BLENDSUR : Mixes to files
    • cluster : Cluster ensemble members
    • CONVERT_ECOCLIMAP_PARAM : Generate binary files from ECOCLIMAP ascii files
    • dcagen : ODB handling tool
    • domain_prop : Helper program to return various model domain properties
    • FESTAT : Background error covariance calculations.
    • fldextr : Extracts data for verification from model history files. Reads FA from HARMONIE and GRIB from ECMWF/HIRLAM.
    • gl : Converts/interpolates between different file formats and projections. Used for boundary interpolation.
    • IOASSIGN/ioassign : ODB IO setup
    • LSMIX : Scale dependent mixing of two model states.
    • jbconv : Interpolates/extrapolates background error statistics files. For technical experimentation
    • lfitools : FA/LFI file manipulation tool
    • MASTERODB : The main binary for the forecast model, surface assimilation, climate generation, 3DVAR, fullpos and much more.
    • MTEN : Computation of moist tendencies
    • obsextr : Extract data for verification from BUFR files.
    • obsmon : Extract data for observation monitoring
    • odb98.x : ODB manipulation program
    • OFFLINE : The SURFEX offline model. Also called SURFEX
    • oulan : Converts observations in BUFR to OBSOUL format used by BATOR
    • PERTCMA : Perturbation of observations in ODB
    • PERTSFC : Surface perturbation scheme
    • PGD : Generates physiography files for SURFEX.
    • PREGPSSOL : Processing of GNSS data
    • PREP : Generate SURFEX initial files. Interpolates/translates between two SURFEX domains.
    • SFXTOOLS : Converts SURFEX output between FA and LFI format.
    • shuffle : Manipulation of ODB. Also called ODBTOOLS
    • ShuffleBufr : Split bufr data according to observation type, used in the observation preprocessing.
    • SODA : Surfex offline data assimilation
    • SPG : Stochastic pattern generator, https://github.com/gayfulin/SPG
    • SURFEX : The SURFEX offline model. Also called OFFLINE
    • tot_energy : Calculates the total energy of a model state. Is used for boundary perturbation scaling.
    • xtool : Compares two FA/LFI/GRIB files.
    diff --git a/dev/Overview/Content/index.html b/dev/Overview/Content/index.html index d5710c70c5..e745352540 100644 --- a/dev/Overview/Content/index.html +++ b/dev/Overview/Content/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Harmonie Content

    Overview

    Harmonie is HIRLAM's adaptation of the LAM version of the IFS/ARPEGE project. The common code shared with the ALADIN program, Meteo France and ECMWF only contains the source code. Harmonie adds the build environment, scripts, support for a scheduler, and a number of diagnostics tools for file conversion and postprocessing. In summary a git clone of harmonie from github contains the following main directories

    • config-sh : Configuration and job submission files for different platforms.
    • const : A selected number of constant files for bias correction, assimilation and different internal schemes. A large number of data for climate generation and the RTTOV software is kept outside of the repository. See [wiki:HarmonieSystemDocumentation#Downloaddata].
    • ecf : Directory for the main configuration file config_exp.h and the containers for the scheduler ECFLOW.
    • suites Scripts and suit definition files for ECFLOW, the scheduler for HARMONIE.
    • nam : Namelists for different configurations.
    • scr : Scripts to run the different tasks.
    • src : The IFS/ARPEGE source code.
    • util : A number of utilities and support libraries.

    util

    The util directory contains the following main directories

    • auxlibs : Contains gribex, bufr, rgb and some dummy routines
    • binutils : https://www.gnu.org/software/binutils/
    • checknorms : Script for code norm checking
    • gl_grib_api : Boundary file generator and file converter
    • makeup : HIRLAM style compilation tool
    • musc : MUSC scripts
    • obsmon : Code to produce obsmon sqlite files
    • offline : SURFEX offline code
    • oulan : Converts conventional BUFR data to OBSOUL format read by bator.
    • RadarDAbyFA : Field alignment code
    +

    Harmonie Content

    Overview

    Harmonie is HIRLAM's adaptation of the LAM version of the IFS/ARPEGE project. The common code shared with the ALADIN program, Meteo France and ECMWF only contains the source code. Harmonie adds the build environment, scripts, support for a scheduler, and a number of diagnostics tools for file conversion and postprocessing. In summary a git clone of harmonie from github contains the following main directories

    • config-sh : Configuration and job submission files for different platforms.
    • const : A selected number of constant files for bias correction, assimilation and different internal schemes. A large number of data for climate generation and the RTTOV software is kept outside of the repository. See [wiki:HarmonieSystemDocumentation#Downloaddata].
    • ecf : Directory for the main configuration file config_exp.h and the containers for the scheduler ECFLOW.
    • suites Scripts and suit definition files for ECFLOW, the scheduler for HARMONIE.
    • nam : Namelists for different configurations.
    • scr : Scripts to run the different tasks.
    • src : The IFS/ARPEGE source code.
    • util : A number of utilities and support libraries.

    util

    The util directory contains the following main directories

    • auxlibs : Contains gribex, bufr, rgb and some dummy routines
    • binutils : https://www.gnu.org/software/binutils/
    • checknorms : Script for code norm checking
    • gl_grib_api : Boundary file generator and file converter
    • makeup : HIRLAM style compilation tool
    • musc : MUSC scripts
    • obsmon : Code to produce obsmon sqlite files
    • offline : SURFEX offline code
    • oulan : Converts conventional BUFR data to OBSOUL format read by bator.
    • RadarDAbyFA : Field alignment code
    diff --git a/dev/Overview/FileFormats/index.html b/dev/Overview/FileFormats/index.html index cca3a78018..3eadbd102d 100644 --- a/dev/Overview/FileFormats/index.html +++ b/dev/Overview/FileFormats/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    File formats in HARMONIE

    Introduction

    The HARMONIE system reads and writes a number of different formats.

    FA files

    Default internal format input/output for HARMONIE for gridpoint, spectral and SURFEX data. GRIB is used as a way to pack data, but the grib record cannot be used as such.

    • The header contains information about model domain, projection, spectral truncation, extension zone, boundary zone, vertical levels.
    • Only one date/time per file.
    • FA routines are found under ifsaux/fa
    • List or convert a file with gl
    • Other listing tool PINUTS

    Read more

    GRIB/GRIB2

    All FA files may be converted to GRIB after the forecast run. For the conversion between FA names and GRIB parameters check this table.

    • List or convert a GRIB file with gl

    NETCDF

    In climate mode all FA files may converted to NETCDF after the forecast run. For the conversion between FA names and NETCDF parameters check util/gl/inc/nc_tab.h.

    • For the manipulation and listing of NETCDF files we refer to standard NETCDF tools.
    • NETCDF is also used as output data from some SURFEX tools.

    BUFR and ODB

    BUFR is the archiving/exchange format for observations. Observation Database is used for efficient handling of observations on IFS. ODB used for both input data and feedback information.

    Read more about observations in HARMONIE here.

    DDH (LFA files )

    Diagnostics by Horizontal Domains allows you to accumulate fluxes from different packages over different areas/points.

    • LFA files ( Autodocumented File Software )
    • gmapdoc
    • under util/ddh

    Misc

    • vfld/vobs files in a simple ASCII format used by the verification.
    • Obsmon files are stored in sqlite format.
    +

    File formats in HARMONIE

    Introduction

    The HARMONIE system reads and writes a number of different formats.

    FA files

    Default internal format input/output for HARMONIE for gridpoint, spectral and SURFEX data. GRIB is used as a way to pack data, but the grib record cannot be used as such.

    • The header contains information about model domain, projection, spectral truncation, extension zone, boundary zone, vertical levels.
    • Only one date/time per file.
    • FA routines are found under ifsaux/fa
    • List or convert a file with gl
    • Other listing tool PINUTS

    Read more

    GRIB/GRIB2

    All FA files may be converted to GRIB after the forecast run. For the conversion between FA names and GRIB parameters check this table.

    • List or convert a GRIB file with gl

    NETCDF

    In climate mode all FA files may converted to NETCDF after the forecast run. For the conversion between FA names and NETCDF parameters check util/gl/inc/nc_tab.h.

    • For the manipulation and listing of NETCDF files we refer to standard NETCDF tools.
    • NETCDF is also used as output data from some SURFEX tools.

    BUFR and ODB

    BUFR is the archiving/exchange format for observations. Observation Database is used for efficient handling of observations on IFS. ODB used for both input data and feedback information.

    Read more about observations in HARMONIE here.

    DDH (LFA files )

    Diagnostics by Horizontal Domains allows you to accumulate fluxes from different packages over different areas/points.

    • LFA files ( Autodocumented File Software )
    • gmapdoc
    • under util/ddh

    Misc

    • vfld/vobs files in a simple ASCII format used by the verification.
    • Obsmon files are stored in sqlite format.
    diff --git a/dev/Overview/Source/index.html b/dev/Overview/Source/index.html index fd4f774f10..1430ebcc30 100644 --- a/dev/Overview/Source/index.html +++ b/dev/Overview/Source/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Harmonie Source Code

    Introduction

    This wiki page summaries the ARPEGE/IFS source code made available in the HARMONIE system. It is based on documents made available by YESSAD K. (METEO-FRANCE/CNRM/GMAP/ALGO). The relevant document for cycle 40 is available here (or directly here).

    HARMONIE Source Library Structure

    The main source of HARMONIE system originates from IFS/ARPEGE and it consists of a number of "project" sources. These are:

    • aeolus: Aeolous source code, a package for pre-processing satellite lidar wind data. Inactive for us.
    • aladin: specific routines only relevant to LAM, (limited area models, in particular ALADIN and AROME).
    • algor: application routines, e.g. to read LFI or Arpege files,interface routines for distributed memory environment, some linear algebra routines, such as lanczos algorithm, minimizers.
    • arpifs: global model routines (ARPEGE, IFS), and routines common to global and LAM models. This is the core of the ARPEGE/IFS software. The core of ARPEGE/IFS software.
    • biper: Biperiodization routines for the LAM
    • blacklist: package for blacklisting
    • coupling: lateral coupling and spectral nudging for LAM models
    • etrans: spectral transforms for plane geometry, used for LAM
    • ifsaux: some application routines, for example reading or writing on “LFI” or ARPEGE files, interface routines for distributed memory environment
    • mpa: upper air meso-NH/AROME physics (also used in ARPEGE/ALADIN)
    • mse: surface processes in meso-NH/AROME (interface for SURFEX)
    • odb: ODB (Observational Data Base software), needed by ARPEGE/ALADIN for their analysis or their assimilation cycle
    • satrad: satellite data handling package, needed to run the model analysis/assimilation
    • surf: ECMWF surface scheme
    • surfex: surface processes in meso-NH/AROME - the externalized surface scheme SURFEX
    • trans: spectral transforms for spherical geometry, used for ARPEGE/IFS
    • utilities: utility packages, for operational FA to GRIB (PROGRID), OULAN, BATOR, or programs to operate on ODB and radiances bias correction

    Dependencies and hierarchy between each project

    Note: these project names are no longer valid – need to update

    • ARP+TFL+XRD+XLA+MPA+MSE+SURFEX: for ARPEGE forecasts with METEO-FRANCE physics.
    • ARP+ALD+TFL+TAL+XRD+XLA+BIP+MPA+MSE+SURFEX: for ALADIN or AROME forecasts.
    • ARP+TFL+XRD+XLA+SUR: for IFS forecasts with ECMWF physics.
    • ARP+TFL+XRD+XLA+MPA+MSE+SURFEX+BLA+ODB+SAT+AEO: for ARPEGE assimilations with METEO-FRANCE physics.
    • ARP+ALD+TFL+TAL+XRD+XLA+BIP+MPA+MSE+SURFEX+BLA+ODB+SAT+AEO: for ALADIN or AROME assimilations.
    • ARP+TFL+XRD+XLA+SUR+BLA+ODB+SAT+OBT+SCR+AEO: for IFS assimilations with ECMWF physics.

    Libraries under each project

    Note: this information made need to be updated for CY40

    ARPIFS

    • adiab
      • Adiabatic dynamics
      • Adiabatic diagnostics and intermediate quantities calculation, for example the geopotential height (routines GP... or GNH...).
      • Eulerian advections
      • Semi-Lagrangian advection and interpolators (routines LA...)
      • Semi-implicit scheme and linear terms calculation (routines SI..., SP..SI..)
      • Horizontal diffusion (routines SP..HOR..)
    • ald inc
      • function: functions used only in ALADIN
      • namelist: namelists read by ALADIN.
    • c9xx: specific configurations 901 to 999 routines (mainly configuration 923). Routines INCLI.. are used in configuration 923. Routines INTER... are interpolators used in configurations 923, 931, 932.
    • canari: routines used in the CANARI optimal interpolation. Their names generally starts by CA.
    • canari common: empty directory to be deleted.
    • climate: some specific ARPEGE-CLIMAT routines.
    • common: often contains includes
    • control: control routines. Contains in particular STEPO and CNT... routines.
    • dfi: routines used in the DFI (digital filter initialisation) algorithm
    • dia: diagnostics other than FULL-POS. One finds some setup SU... routines specific to some diagnostics and some WR... routines doing file writing.
    • function: functions (in includes). The qa....h functions are used in CANARI, the fc....h functions are used in a large panel of topics.
    • interface: not automatic interfaces (currently empty).
    • kalman: Kalman filter.
    • module: all the types of module (variables declarations, type definition, active code).
    • mwave: micro-wave observations (SSM/I) treatment.* namelist: all namelists.
    • nmi: routines used in the NMI (normal mode initialisation) algorithm.
    • obs error: treatment of the observation errors in the assimilation.
    • obs preproc: observation pre-processing (some of them are called in the screening).
    • ocean: oceanic coupling, for climatic applications.
    • onedvar: 1D-VAR assimilation scheme used at ECMWF.
    • parallel: parallel environment, communications between processors.
    • parameter: empty directory to be deleted.
    • phys dmn: physics parameterizations used at METEO-FRANCE, and HIRLAM physics, ALARO physics.
    • phys ec: ECMWF physics. Some of these routines (FMR radiation scheme, Lopez convection scheme) are now also used in the METEO-FRANCE physics.
    • pointer: empty directory to be deleted.
    • pp obs: several applications
      • observation horizontal and vertical interpolator.
      • FULL-POS.
      • vertical interpolator common to FULL-POS and the observation interpolator; some of these routines may be used elsewhere.
    • setup: setup routines not linked with a very specific domain. More specific setup routines are spread among some other subdirectories.
    • sinvect: singular vectors calculation (configuration 601).
    • support: empty directory to be deleted.
    • transform: hat routines for spectral transforms.
    • utility: miscellaneous utilitaries, linear algebra routines, array deallocation routines.
    • var: routines involved in the 3DVAR and 4DVAR assimilation, some minimizers (N1CG1, CONGRAD), some specific 3DVAR and 4DVAR setup routines.
    • wave: empty directory to be deleted.

    ALADIN

    • adiab: adiabatic dynamics.
    • blending: blending scheme (currently only contains the procedure blend.ksh).
    • c9xx: specific configurations E901 to E999 routines (mainly configuration E923). Routines EINCLI.. are used in configuration E923. Routines EINTER... are interpolators used in configurations E923, E931, E932.
    • control: control routines.
    • coupling: lateral coupling by external lateral boundary conditions.
    • dia: diagnostics other than FULL-POS.
    • inidata: setup routines specific to file reading (initial conditions, LBC).
    • module: active code modules only used in ALADIN.
    • obs preproc: observation pre-processing (some of them are called in the screening).
    • parallel: parallel environment, communications between processors.
    • pp obs: several applications:
      • observation horizontal and vertical interpolator.
      • FULL-POS.
      • vertical interpolator common to FULL-POS and the observation interpolator; some of these routines may be used elsewhere.
    • programs: probably designed to contain procedures, but currently contains among others some blending routines, the place of which would be probably better in subdirectory "blending".
    • setup: setup routines not linked with a very specific domain. More specific setup routines are spread among some other subdirectories.
    • sinvect: singular vectors calculation (configuration E601).
    • transform: hat routines for spectral transforms.
    • utility: miscellaneous utilitaries, array deallocation routines.
    • var: routines involved in the 3DVAR and 4DVAR assimilation, some specific 3DVAR and 4DVAR setup routines.

    TFL

    • build: contains procedures.
    • external: routines which can be called from another project.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • module: all the types of module (variables declarations, type definition, active code).
      • tpm ...F90: variable declaration + type definition modules.
      • lt.... mod.F90: active code modules for Legendre transforms.
      • ft.... mod.F90: active code modules for Fourier transforms.
      • tr.... mod.F90: active code modules for transpositions.
      • su.... mod.F90: active code modules for setup.
    • programs: specific entries which can be used for TFL code validation. These routines are not called elsewhere.

    TAL

    • external: routines which can be called from another project.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • module: all the types of module (variables declarations, type definition, active code).
      • tpmald ...F90: variable declaration + type definition modules.
      • elt.... mod.F90: active code modules for N-S Fourier transforms.
      • eft.... mod.F90: active code modules for E-W Fourier transforms.
      • sue.... mod.F90: active code modules for setup.
    • programs: specific entries which can be used for TAL code validation. These routines are not called elsewhere.

    XRD

    • arpege: empty directory to be deleted.
    • bufr io: BUFR format files reading and writing.
    • cma: CMA format files reading and writing.
    • ddh: DDH diagnostics.
    • fa: ARPEGE (FA) files reading and writing.
    • grib io: ECMWF GRIB format files reading and writing.
    • grib mf: METEO-FRANCE GRIB format files reading and writing.
    • ioassign: empty directory to be deleted.
    • lanczos: linear algebra routines for Lanczos algorithm.
    • lfi: LFI format files reading and writing.
    • minim: linear algebra routines for minimizations. Contains the M1QN3 (quasi-Newton) minimizer.
    • misc: miscellaneous decks.* module: all the types of module (variables declarations, type definition, active code). There are a lot of mpl...F90 modules for parallel environment (interface to MPI parallel environment).
    • mrfstools: empty directory to be deleted.
    • newbufrio: empty directory to be deleted.
    • newcmaio: empty directory to be deleted.
    • not used: miscellaneous decks (unused decks to be deleted?).
    • pcma: empty directory to be deleted.
    • support: miscellaneous routines. Some of them do Fourier transforms, some others do linear algebra.
    • svipc: contains only svipc.c .
    • utilities: miscellaneous utilitaries.

    SUR

    • build: contains procedures.
    • external: routines which can be called from another project.* function: specific functions.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • module: all the types of module (variables declarations, type definition, active code).
      • yos ...F90: variable declaration + type definition modules.
      • su.... mod.F90 but not surf.... mod.F90: active code modules for setup.
      • surf.... mod.F90, v.... mod.F90: other active code modules.
    • offline: specific entries which can be used for SUR code validation. These routines are not called elsewhere.

    BLA

    • compiler.
    • include: not automatically generated interfaces, functions, and some other includes.
    • library: the only containing .F90 decks.
    • old2new.
    • scripts.

    SAT

    • bias.
    • emiss.
    • interface.
    • module.
    • mwave.
    • onedvar.
    • pre screen.
    • rtlimb.
    • rttov.
    • satim.
    • test. (Not described in detail; more information has to be provided by someone who knows the content of this project, but there is currently no specific documentation about this topic)

    UTI

    • add cloud fields: program to add 4 cloud variables (liquid water, ice, rainfall, snow) in ARPEGE files.
    • bator: BATOR software (reads observations data in a ASCII format file named OBSOUL and the blacklist, writes them on a ODB format file with some additional information).
    • combi: combination of perturbations in an ensemble forecast (PEARP).
    • controdb: control of the number of observations.
    • extrtovs: unbias TOVS.
    • fcq: does quality control and writes this quality control in ODB files.
    • gobptout: PROGRIB? (convert ARPEGE files contained post-processed data into GRIB files).
    • include: all .h decks (functions, COMMON blocks, parameters).
    • mandalay: software MANDALAY.
    • module: all types of modules.
    • namelist: namelists specific to the applications stored in UTI (for example OULAN, BATOR).
    • oulan: OULAN software (the step just before BATOR: observation extractions in the BDM, samples data in space and time, and writes the sampled data in an ASCII file called "OBSOUL").
    • pregpssol: Surface GPS processing.
    • prescat: Scatterometer data processing.
    • progrid: PROGRID? (convert ARPEGE files contained post-processed data into GRIB files).
    • progrid cadre: cf. progrid?
    • sst nesdis: program to read the SST on the BDAP. This project has its own entries.

    MPA

    It contains first layer of directory

    • chem: chemistry.
    • conv: convection.
    • micro: microphysics.
    • turb: turbulence.

    Each directory contains the following subdirectories

    • externals: routines which can be called from another project.
    • include: all the "include" decks (functions, COMMON blocks, parameters).
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • internals: other non-module routines; they cannot be called from another project.
    • module: all types of modules.

    SURFEX

    • ASSIM: Surface assimilation routines (please note that programs soda.F90, oi_main.F90 and varassim.F90 are located under mse/programs).
    • OFFLIN: Surface offline routines (please note that programs pgd.F90, prep.F90 and offline.F90 are located under mse/programs).
    • SURFEX: Surface routines for physiography (PGD), initialisation (PREP) and physical processes including e.g. land (ISBA), sea, town (TEB) and lakes.
    • TOPD: TOPMODEL (TOPography based MODEL) for soil hydrology.
    • TRIP: River routing model TRIP

    MSE

    • dummy: empty versions of some routines.
    • externals: routines which can be called from another project.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • internals: other non-module routines; they cannot be called from another project.
    • module: all types of modules.
    • new: file conversion routines, e.g. fa2lfi, lfi2fa
    • programs: SURFEX programs

    References and documentation

    +

    Harmonie Source Code

    Introduction

    This wiki page summaries the ARPEGE/IFS source code made available in the HARMONIE system. It is based on documents made available by YESSAD K. (METEO-FRANCE/CNRM/GMAP/ALGO). The relevant document for cycle 40 is available here (or directly here).

    HARMONIE Source Library Structure

    The main source of HARMONIE system originates from IFS/ARPEGE and it consists of a number of "project" sources. These are:

    • aeolus: Aeolous source code, a package for pre-processing satellite lidar wind data. Inactive for us.
    • aladin: specific routines only relevant to LAM, (limited area models, in particular ALADIN and AROME).
    • algor: application routines, e.g. to read LFI or Arpege files,interface routines for distributed memory environment, some linear algebra routines, such as lanczos algorithm, minimizers.
    • arpifs: global model routines (ARPEGE, IFS), and routines common to global and LAM models. This is the core of the ARPEGE/IFS software. The core of ARPEGE/IFS software.
    • biper: Biperiodization routines for the LAM
    • blacklist: package for blacklisting
    • coupling: lateral coupling and spectral nudging for LAM models
    • etrans: spectral transforms for plane geometry, used for LAM
    • ifsaux: some application routines, for example reading or writing on “LFI” or ARPEGE files, interface routines for distributed memory environment
    • mpa: upper air meso-NH/AROME physics (also used in ARPEGE/ALADIN)
    • mse: surface processes in meso-NH/AROME (interface for SURFEX)
    • odb: ODB (Observational Data Base software), needed by ARPEGE/ALADIN for their analysis or their assimilation cycle
    • satrad: satellite data handling package, needed to run the model analysis/assimilation
    • surf: ECMWF surface scheme
    • surfex: surface processes in meso-NH/AROME - the externalized surface scheme SURFEX
    • trans: spectral transforms for spherical geometry, used for ARPEGE/IFS
    • utilities: utility packages, for operational FA to GRIB (PROGRID), OULAN, BATOR, or programs to operate on ODB and radiances bias correction

    Dependencies and hierarchy between each project

    Note: these project names are no longer valid – need to update

    • ARP+TFL+XRD+XLA+MPA+MSE+SURFEX: for ARPEGE forecasts with METEO-FRANCE physics.
    • ARP+ALD+TFL+TAL+XRD+XLA+BIP+MPA+MSE+SURFEX: for ALADIN or AROME forecasts.
    • ARP+TFL+XRD+XLA+SUR: for IFS forecasts with ECMWF physics.
    • ARP+TFL+XRD+XLA+MPA+MSE+SURFEX+BLA+ODB+SAT+AEO: for ARPEGE assimilations with METEO-FRANCE physics.
    • ARP+ALD+TFL+TAL+XRD+XLA+BIP+MPA+MSE+SURFEX+BLA+ODB+SAT+AEO: for ALADIN or AROME assimilations.
    • ARP+TFL+XRD+XLA+SUR+BLA+ODB+SAT+OBT+SCR+AEO: for IFS assimilations with ECMWF physics.

    Libraries under each project

    Note: this information made need to be updated for CY40

    ARPIFS

    • adiab
      • Adiabatic dynamics
      • Adiabatic diagnostics and intermediate quantities calculation, for example the geopotential height (routines GP... or GNH...).
      • Eulerian advections
      • Semi-Lagrangian advection and interpolators (routines LA...)
      • Semi-implicit scheme and linear terms calculation (routines SI..., SP..SI..)
      • Horizontal diffusion (routines SP..HOR..)
    • ald inc
      • function: functions used only in ALADIN
      • namelist: namelists read by ALADIN.
    • c9xx: specific configurations 901 to 999 routines (mainly configuration 923). Routines INCLI.. are used in configuration 923. Routines INTER... are interpolators used in configurations 923, 931, 932.
    • canari: routines used in the CANARI optimal interpolation. Their names generally starts by CA.
    • canari common: empty directory to be deleted.
    • climate: some specific ARPEGE-CLIMAT routines.
    • common: often contains includes
    • control: control routines. Contains in particular STEPO and CNT... routines.
    • dfi: routines used in the DFI (digital filter initialisation) algorithm
    • dia: diagnostics other than FULL-POS. One finds some setup SU... routines specific to some diagnostics and some WR... routines doing file writing.
    • function: functions (in includes). The qa....h functions are used in CANARI, the fc....h functions are used in a large panel of topics.
    • interface: not automatic interfaces (currently empty).
    • kalman: Kalman filter.
    • module: all the types of module (variables declarations, type definition, active code).
    • mwave: micro-wave observations (SSM/I) treatment.* namelist: all namelists.
    • nmi: routines used in the NMI (normal mode initialisation) algorithm.
    • obs error: treatment of the observation errors in the assimilation.
    • obs preproc: observation pre-processing (some of them are called in the screening).
    • ocean: oceanic coupling, for climatic applications.
    • onedvar: 1D-VAR assimilation scheme used at ECMWF.
    • parallel: parallel environment, communications between processors.
    • parameter: empty directory to be deleted.
    • phys dmn: physics parameterizations used at METEO-FRANCE, and HIRLAM physics, ALARO physics.
    • phys ec: ECMWF physics. Some of these routines (FMR radiation scheme, Lopez convection scheme) are now also used in the METEO-FRANCE physics.
    • pointer: empty directory to be deleted.
    • pp obs: several applications
      • observation horizontal and vertical interpolator.
      • FULL-POS.
      • vertical interpolator common to FULL-POS and the observation interpolator; some of these routines may be used elsewhere.
    • setup: setup routines not linked with a very specific domain. More specific setup routines are spread among some other subdirectories.
    • sinvect: singular vectors calculation (configuration 601).
    • support: empty directory to be deleted.
    • transform: hat routines for spectral transforms.
    • utility: miscellaneous utilitaries, linear algebra routines, array deallocation routines.
    • var: routines involved in the 3DVAR and 4DVAR assimilation, some minimizers (N1CG1, CONGRAD), some specific 3DVAR and 4DVAR setup routines.
    • wave: empty directory to be deleted.

    ALADIN

    • adiab: adiabatic dynamics.
    • blending: blending scheme (currently only contains the procedure blend.ksh).
    • c9xx: specific configurations E901 to E999 routines (mainly configuration E923). Routines EINCLI.. are used in configuration E923. Routines EINTER... are interpolators used in configurations E923, E931, E932.
    • control: control routines.
    • coupling: lateral coupling by external lateral boundary conditions.
    • dia: diagnostics other than FULL-POS.
    • inidata: setup routines specific to file reading (initial conditions, LBC).
    • module: active code modules only used in ALADIN.
    • obs preproc: observation pre-processing (some of them are called in the screening).
    • parallel: parallel environment, communications between processors.
    • pp obs: several applications:
      • observation horizontal and vertical interpolator.
      • FULL-POS.
      • vertical interpolator common to FULL-POS and the observation interpolator; some of these routines may be used elsewhere.
    • programs: probably designed to contain procedures, but currently contains among others some blending routines, the place of which would be probably better in subdirectory "blending".
    • setup: setup routines not linked with a very specific domain. More specific setup routines are spread among some other subdirectories.
    • sinvect: singular vectors calculation (configuration E601).
    • transform: hat routines for spectral transforms.
    • utility: miscellaneous utilitaries, array deallocation routines.
    • var: routines involved in the 3DVAR and 4DVAR assimilation, some specific 3DVAR and 4DVAR setup routines.

    TFL

    • build: contains procedures.
    • external: routines which can be called from another project.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • module: all the types of module (variables declarations, type definition, active code).
      • tpm ...F90: variable declaration + type definition modules.
      • lt.... mod.F90: active code modules for Legendre transforms.
      • ft.... mod.F90: active code modules for Fourier transforms.
      • tr.... mod.F90: active code modules for transpositions.
      • su.... mod.F90: active code modules for setup.
    • programs: specific entries which can be used for TFL code validation. These routines are not called elsewhere.

    TAL

    • external: routines which can be called from another project.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • module: all the types of module (variables declarations, type definition, active code).
      • tpmald ...F90: variable declaration + type definition modules.
      • elt.... mod.F90: active code modules for N-S Fourier transforms.
      • eft.... mod.F90: active code modules for E-W Fourier transforms.
      • sue.... mod.F90: active code modules for setup.
    • programs: specific entries which can be used for TAL code validation. These routines are not called elsewhere.

    XRD

    • arpege: empty directory to be deleted.
    • bufr io: BUFR format files reading and writing.
    • cma: CMA format files reading and writing.
    • ddh: DDH diagnostics.
    • fa: ARPEGE (FA) files reading and writing.
    • grib io: ECMWF GRIB format files reading and writing.
    • grib mf: METEO-FRANCE GRIB format files reading and writing.
    • ioassign: empty directory to be deleted.
    • lanczos: linear algebra routines for Lanczos algorithm.
    • lfi: LFI format files reading and writing.
    • minim: linear algebra routines for minimizations. Contains the M1QN3 (quasi-Newton) minimizer.
    • misc: miscellaneous decks.* module: all the types of module (variables declarations, type definition, active code). There are a lot of mpl...F90 modules for parallel environment (interface to MPI parallel environment).
    • mrfstools: empty directory to be deleted.
    • newbufrio: empty directory to be deleted.
    • newcmaio: empty directory to be deleted.
    • not used: miscellaneous decks (unused decks to be deleted?).
    • pcma: empty directory to be deleted.
    • support: miscellaneous routines. Some of them do Fourier transforms, some others do linear algebra.
    • svipc: contains only svipc.c .
    • utilities: miscellaneous utilitaries.

    SUR

    • build: contains procedures.
    • external: routines which can be called from another project.* function: specific functions.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • module: all the types of module (variables declarations, type definition, active code).
      • yos ...F90: variable declaration + type definition modules.
      • su.... mod.F90 but not surf.... mod.F90: active code modules for setup.
      • surf.... mod.F90, v.... mod.F90: other active code modules.
    • offline: specific entries which can be used for SUR code validation. These routines are not called elsewhere.

    BLA

    • compiler.
    • include: not automatically generated interfaces, functions, and some other includes.
    • library: the only containing .F90 decks.
    • old2new.
    • scripts.

    SAT

    • bias.
    • emiss.
    • interface.
    • module.
    • mwave.
    • onedvar.
    • pre screen.
    • rtlimb.
    • rttov.
    • satim.
    • test. (Not described in detail; more information has to be provided by someone who knows the content of this project, but there is currently no specific documentation about this topic)

    UTI

    • add cloud fields: program to add 4 cloud variables (liquid water, ice, rainfall, snow) in ARPEGE files.
    • bator: BATOR software (reads observations data in a ASCII format file named OBSOUL and the blacklist, writes them on a ODB format file with some additional information).
    • combi: combination of perturbations in an ensemble forecast (PEARP).
    • controdb: control of the number of observations.
    • extrtovs: unbias TOVS.
    • fcq: does quality control and writes this quality control in ODB files.
    • gobptout: PROGRIB? (convert ARPEGE files contained post-processed data into GRIB files).
    • include: all .h decks (functions, COMMON blocks, parameters).
    • mandalay: software MANDALAY.
    • module: all types of modules.
    • namelist: namelists specific to the applications stored in UTI (for example OULAN, BATOR).
    • oulan: OULAN software (the step just before BATOR: observation extractions in the BDM, samples data in space and time, and writes the sampled data in an ASCII file called "OBSOUL").
    • pregpssol: Surface GPS processing.
    • prescat: Scatterometer data processing.
    • progrid: PROGRID? (convert ARPEGE files contained post-processed data into GRIB files).
    • progrid cadre: cf. progrid?
    • sst nesdis: program to read the SST on the BDAP. This project has its own entries.

    MPA

    It contains first layer of directory

    • chem: chemistry.
    • conv: convection.
    • micro: microphysics.
    • turb: turbulence.

    Each directory contains the following subdirectories

    • externals: routines which can be called from another project.
    • include: all the "include" decks (functions, COMMON blocks, parameters).
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • internals: other non-module routines; they cannot be called from another project.
    • module: all types of modules.

    SURFEX

    • ASSIM: Surface assimilation routines (please note that programs soda.F90, oi_main.F90 and varassim.F90 are located under mse/programs).
    • OFFLIN: Surface offline routines (please note that programs pgd.F90, prep.F90 and offline.F90 are located under mse/programs).
    • SURFEX: Surface routines for physiography (PGD), initialisation (PREP) and physical processes including e.g. land (ISBA), sea, town (TEB) and lakes.
    • TOPD: TOPMODEL (TOPography based MODEL) for soil hydrology.
    • TRIP: River routing model TRIP

    MSE

    • dummy: empty versions of some routines.
    • externals: routines which can be called from another project.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • internals: other non-module routines; they cannot be called from another project.
    • module: all types of modules.
    • new: file conversion routines, e.g. fa2lfi, lfi2fa
    • programs: SURFEX programs

    References and documentation

    diff --git a/dev/Overview/da_graph/index.html b/dev/Overview/da_graph/index.html index f75c2412ba..2317051b28 100644 --- a/dev/Overview/da_graph/index.html +++ b/dev/Overview/da_graph/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/dev/PostProcessing/Diagnostics/index.html b/dev/PostProcessing/Diagnostics/index.html index 789eefd275..60e3ebe2f8 100644 --- a/dev/PostProcessing/Diagnostics/index.html +++ b/dev/PostProcessing/Diagnostics/index.html @@ -11,4 +11,4 @@ YEZDIAG_NL(1)%CNAME='YOURVAL', YEZDIAG_NL(1)%LADV=.F.,

    If you add more fields (e.g. you set NGFL_EZDIAG=4), I think you will also need to set the grib parameter, e.g. (the default is 999, that you can leave for the first one).

    YEZDIAG_NL(2)%IGRBCODE=998,
     YEZDIAG_NL(3)%IGRBCODE=997,
    -YEZDIAG_NL(4)%IGRBCODE=996,

    Note that the two first places are already defined in harmonie_namelist.pm.

  • In order to have your variable converted from FA to grib, add the new variable in util/gl/inc/trans_tab.h

  • +YEZDIAG_NL(4)%IGRBCODE=996,

    Note that the two first places are already defined in harmonie_namelist.pm.

  • In order to have your variable converted from FA to grib, add the new variable in util/gl/inc/trans_tab.h

  • diff --git a/dev/PostProcessing/FileConversions/index.html b/dev/PostProcessing/FileConversions/index.html index 217fc1f679..5fa82899be 100644 --- a/dev/PostProcessing/FileConversions/index.html +++ b/dev/PostProcessing/FileConversions/index.html @@ -26,4 +26,4 @@ fstart(16) = $fstart, fstart(162) = $fstart, fstart(163) = $fstart, -/In the namelist:

    WMO GRIB editions and references

    Currently (Aug 2019) there are several editions of GRIB in use or in experimental phase.

    +/In the namelist:

    WMO GRIB editions and references

    Currently (Aug 2019) there are several editions of GRIB in use or in experimental phase.

    diff --git a/dev/PostProcessing/Fullpos/index.html b/dev/PostProcessing/Fullpos/index.html index 34bd59b55a..c17b6c787d 100644 --- a/dev/PostProcessing/Fullpos/index.html +++ b/dev/PostProcessing/Fullpos/index.html @@ -22,4 +22,4 @@ 132c132 < @namfpdyh_lev = (1,2,3,4,5,6,7,8,9,10,11,12,13) ; --- -> @namfpdyh_lev = (1,2,3,4,5,6,7,8,9,10,11,12) ;

    Expert users

    In the FULL-POS namelist NAMFPC (variables explained in src/arp/module/yomfpc.F90), the variables are placed into different categories:

    The default FA-names for parameters in different categories can be found from src/arp/setup/suafn1.F90 L687.

    It's worth mentioning some of the variables postprocessed by FULL-POS

    Problems

    Problems may be encountered with FULL-POS when running on large domains. Here are some things to look out for:

    +> @namfpdyh_lev = (1,2,3,4,5,6,7,8,9,10,11,12) ;

    Expert users

    In the FULL-POS namelist NAMFPC (variables explained in src/arp/module/yomfpc.F90), the variables are placed into different categories:

    The default FA-names for parameters in different categories can be found from src/arp/setup/suafn1.F90 L687.

    It's worth mentioning some of the variables postprocessed by FULL-POS

    Problems

    Problems may be encountered with FULL-POS when running on large domains. Here are some things to look out for:

    diff --git a/dev/PostProcessing/Interpolation/index.html b/dev/PostProcessing/Interpolation/index.html index 3f635baab4..69258d3b1a 100644 --- a/dev/PostProcessing/Interpolation/index.html +++ b/dev/PostProcessing/Interpolation/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Interpolations with gl

    Introduction

    In the following we describe the geometrical routines in gl. gl can handle the following projections

    • lat/lon
    • Rotated lat/lon
    • Lambert
    • Polar stereographic
    • Rotated Mercator

    Interpolation

    All interpolations are handled within the module util/gl/mod/module_interpol.f90. The module contains

    • clear_interpol to clear the interpolation setup
    • setup_interpol where the position of the output gridpoints in the input grid are calculated
    • setup_weights where we calculate the interpolation weights. Interpolation can be nearest gridpoint or bilinear. The interpolation can be masked with a field that tells which gridpoints from the input fields that can be used.

    The setup routines are only called once.

    • interpolate runs the interpolation
    • resample works like the interpolation if the input grid is coarser than the output grid. If reversed it takes the averages of the input gridpoints belonging to each output gridpoit.

    Interpolation can be done between different projections as wall as to geographical points. The most general example on the usage of the interpolatin can be found in util/gl/grb/any2any.F90.

    For practical usage see the section about postprocessing

    Rotations

    All rotations are handled within the module util/gl/mod/module_rotations.f90. The module contains

    • clear_rotation to clear the rotation setup
    • prepare_rotation prepare rotations from input geometry to output geometry via north south components.
    • rotate_winds runs the actual rotation.

    Staggering

    The staggering of an input file is based on the knowledge about the model and is in util/gl/mod/module_griblist.f90. The restaggering is done in util/gl/grb/restag.f90 as a simple average between gridpoints. The staggering of the output geomtery is defined by OUTGEO@ARKAWA, where A and C are available options.

    +

    Interpolations with gl

    Introduction

    In the following we describe the geometrical routines in gl. gl can handle the following projections

    • lat/lon
    • Rotated lat/lon
    • Lambert
    • Polar stereographic
    • Rotated Mercator

    Interpolation

    All interpolations are handled within the module util/gl/mod/module_interpol.f90. The module contains

    • clear_interpol to clear the interpolation setup
    • setup_interpol where the position of the output gridpoints in the input grid are calculated
    • setup_weights where we calculate the interpolation weights. Interpolation can be nearest gridpoint or bilinear. The interpolation can be masked with a field that tells which gridpoints from the input fields that can be used.

    The setup routines are only called once.

    • interpolate runs the interpolation
    • resample works like the interpolation if the input grid is coarser than the output grid. If reversed it takes the averages of the input gridpoints belonging to each output gridpoit.

    Interpolation can be done between different projections as wall as to geographical points. The most general example on the usage of the interpolatin can be found in util/gl/grb/any2any.F90.

    For practical usage see the section about postprocessing

    Rotations

    All rotations are handled within the module util/gl/mod/module_rotations.f90. The module contains

    • clear_rotation to clear the rotation setup
    • prepare_rotation prepare rotations from input geometry to output geometry via north south components.
    • rotate_winds runs the actual rotation.

    Staggering

    The staggering of an input file is based on the knowledge about the model and is in util/gl/mod/module_griblist.f90. The restaggering is done in util/gl/grb/restag.f90 as a simple average between gridpoints. The staggering of the output geomtery is defined by OUTGEO@ARKAWA, where A and C are available options.

    diff --git a/dev/PostProcessing/gl/index.html b/dev/PostProcessing/gl/index.html index 7586fc967e..7ae77dbf8c 100644 --- a/dev/PostProcessing/gl/index.html +++ b/dev/PostProcessing/gl/index.html @@ -233,4 +233,4 @@ -NLON $NLON -NLAT $NLAT \ -LATC $LATC -LONC $LONC \ -LAT0 $LAT0 -LON0 $LON0 \ --GSIZE $GSIZE

    To get the geographical position of the lower left corner use

    domain_prop -f -LOW_LEFT FAFILE  

    To print out the important projection parameters in a file use:

    domain_prop -f -4JB FAFILE

    Get time information from a file

    domain_prop -f -DATE FAFILE

    fldextr and obsextr

    Read about the verification extraction programs here

    +-GSIZE $GSIZE

    To get the geographical position of the lower left corner use

    domain_prop -f -LOW_LEFT FAFILE  

    To print out the important projection parameters in a file use:

    domain_prop -f -4JB FAFILE

    Get time information from a file

    domain_prop -f -DATE FAFILE

    fldextr and obsextr

    Read about the verification extraction programs here

    diff --git a/dev/PostProcessing/xtool/index.html b/dev/PostProcessing/xtool/index.html index 91ede76880..7875c5dbc3 100644 --- a/dev/PostProcessing/xtool/index.html +++ b/dev/PostProcessing/xtool/index.html @@ -67,4 +67,4 @@ outkey%time = 0000 outkey%endstep = 8 outkey%startstep = 7 -/

    This is used scr/convertFA to deaccumulate fields to NetCDF for climate simulations.

    SAL

    Structure Amplitude Location (SAL) is object based quality measure for the verification of QPFs (Wernli et al., 2008). SAL contains three independent components that focus on Structure, Amplitude and Location of the precipitation field in a specified domain.

    +/

    This is used scr/convertFA to deaccumulate fields to NetCDF for climate simulations.

    SAL

    Structure Amplitude Location (SAL) is object based quality measure for the verification of QPFs (Wernli et al., 2008). SAL contains three independent components that focus on Structure, Amplitude and Location of the precipitation field in a specified domain.

    diff --git a/dev/SuiteManagement/ECFLOW/index.html b/dev/SuiteManagement/ECFLOW/index.html index 59e4554ca3..304a159898 100644 --- a/dev/SuiteManagement/ECFLOW/index.html +++ b/dev/SuiteManagement/ECFLOW/index.html @@ -18,4 +18,4 @@ source ~/.bash_profile module unload ecflow module load ecflow/5.7.0 -$@

    The ecFlow server version may change over time.

    Add another user to your ecFlow viewer

    Sometimes it's handy to be able to follow, and control, your colleagues experiments. To be able to do this do the following steps:

    Changing the port

    By default, the port is set by

    export ECF_PORT=$((1500+usernumber))

    in mSMS.job (40h1.1), Start_ecFlow.sh (up to #b6d58dd), or Main (currently).

    For the VMs at ECMWF it is set to 3141 in Env_system. If you want to change this number (for example, if that port is in use already), you will also need to add a -p flag when calling ecflow_start.sh as follows:

    ecflow_start.sh -p $ECF_PORT -d $JOBOUTDIR

    Otherwise, ecflow_start.sh tries to open the default port.

    Note: if you already have an ecFlow server running at your new port number before launching an experiment, this won't be an issue.

    More info

    +$@

    The ecFlow server version may change over time.

    Add another user to your ecFlow viewer

    Sometimes it's handy to be able to follow, and control, your colleagues experiments. To be able to do this do the following steps:

    Changing the port

    By default, the port is set by

    export ECF_PORT=$((1500+usernumber))

    in mSMS.job (40h1.1), Start_ecFlow.sh (up to #b6d58dd), or Main (currently).

    For the VMs at ECMWF it is set to 3141 in Env_system. If you want to change this number (for example, if that port is in use already), you will also need to add a -p flag when calling ecflow_start.sh as follows:

    ecflow_start.sh -p $ECF_PORT -d $JOBOUTDIR

    Otherwise, ecflow_start.sh tries to open the default port.

    Note: if you already have an ecFlow server running at your new port number before launching an experiment, this won't be an issue.

    More info

    diff --git a/dev/System/Build_local_docs/index.html b/dev/System/Build_local_docs/index.html index 4ca675166a..2586c8eca3 100644 --- a/dev/System/Build_local_docs/index.html +++ b/dev/System/Build_local_docs/index.html @@ -17,4 +17,4 @@ │ - ✘ ENV["GITHUB_REF"] matches devbranch="pre-CY46h1" │ - ✘ ENV["GITHUB_ACTOR"] exists │ - ✘ ENV["DOCUMENTER_KEY"] or ENV["GITHUB_TOKEN"] exists -└ Deploying: ✘

    The HTML pages will be put in docs/build. Open index.html in a browser

    firefox docs/build/index.html

    A local build will not deploy the HTML pages to github.com/Hirlam/HarmonieSystemDocumentation.git.

    Also see .github/workflows/documentation.yml

    +└ Deploying: ✘

    The HTML pages will be put in docs/build. Open index.html in a browser

    firefox docs/build/index.html

    A local build will not deploy the HTML pages to github.com/Hirlam/HarmonieSystemDocumentation.git.

    Also see .github/workflows/documentation.yml

    diff --git a/dev/System/DrHook/index.html b/dev/System/DrHook/index.html index ebad785e6f..60223d490c 100644 --- a/dev/System/DrHook/index.html +++ b/dev/System/DrHook/index.html @@ -39,4 +39,4 @@ !-- The following now does NOT initialize MPL nor MPI for you IF (LHOOK) CALL DR_HOOK('SOME_UTILGL_TOOL',0,ZHOOK_HANDLE) ... -IF (LHOOK) CALL DR_HOOK('SOME_UTILGL_TOOL',1,ZHOOK_HANDLE)

    Overheads

    The DR_HOOK=1 has practically no overhead on a scalar machine. Profiling with DR_HOOK_OPT=prof causes some 5% overhead.

    On a vector machine overhead are so big that Dr.Hook should not be used there, unfortunately.

    +IF (LHOOK) CALL DR_HOOK('SOME_UTILGL_TOOL',1,ZHOOK_HANDLE)

    Overheads

    The DR_HOOK=1 has practically no overhead on a scalar machine. Profiling with DR_HOOK_OPT=prof causes some 5% overhead.

    On a vector machine overhead are so big that Dr.Hook should not be used there, unfortunately.

    diff --git a/dev/System/ECMWF/ECMWF_teleport/index.html b/dev/System/ECMWF/ECMWF_teleport/index.html index 33c9a84602..be64076892 100644 --- a/dev/System/ECMWF/ECMWF_teleport/index.html +++ b/dev/System/ECMWF/ECMWF_teleport/index.html @@ -20,4 +20,4 @@ User dui IdentityFile ~/.tsh/keys/jump.ecmwf.int/eoin.whelan@met.ie ProxyCommand bash -c "tsh login; ssh -W %h:%p %r@jump.ecmwf.int" -[ewhelan@reaserve ~]$

    Open ecFlow ports

    ssh hpc-login -C -N -L 3141:ecflow-gen-dui-001:3141
    +[ewhelan@reaserve ~]$

    Open ecFlow ports

    ssh hpc-login -C -N -L 3141:ecflow-gen-dui-001:3141
    diff --git a/dev/System/ECMWF/RunningHarmonieOnAtos/index.html b/dev/System/ECMWF/RunningHarmonieOnAtos/index.html index 7c41c9d269..3e5ebee682 100644 --- a/dev/System/ECMWF/RunningHarmonieOnAtos/index.html +++ b/dev/System/ECMWF/RunningHarmonieOnAtos/index.html @@ -18,4 +18,4 @@ git commit --author "Name <name@host>" -m "Commit message" git push --set-upstream origin <feature/branch_name>

    Specifying --set-upstream origin <feature/branch_name> to git push is only necessary the first time you push your branch to the remote. When ready you can now go to GitHub and make a pull-request to the Harmonie repository from your fork.

    Start your experiment

    Launch the experiment by giving start time, DTG, end time, DTGEND

    ./Harmonie start DTG=YYYYMMDDHH DTGEND=YYYYMMDDHH
     # e.g., ./Harmonie start DTG=2022122400 DTGEND=2022122406

    If successful, Harmonie will identify your experiment name and start building your binaries and run your forecast. If not, you need to examine the ECFLOW log file $HM_DATA/ECF.log. $HM_DATA is defined in your Env_system file. At ECMWF $HM_DATA=$SCRATCH/hm_home/$EXP where $EXP is your experiment name. Read more about where things happen further down.

    Continue your experiment

    If your experiment have successfully completed and you would like to continue for another period you should write

    ./Harmonie prod DTGEND=YYYYMMDDHH

    By using prod you tell the system that you are continuing the experiment and using the first guess from the previous cycle. The start date is take from a file progress.log created in your $HOME/hm_home/my_exp directory. If you would have used start the initial data would have been interpolated from the boundaries, a cold start in other words.

    Start/Restart of ecflow_ui

    To start the graphical window for ECFLOW

    ./Harmonie mon

    The graphical window runs independently of the experiment and can be closed and restarted again with the same command. With the graphical interface you can control and view logfiles of each task.

    Making local changes

    Very soon you will find that you need to do changes in a script or in the source code. Once you have identified which file to edit you put it into the current $HOME/hm_home/my_exp directory, with exactly the same subdirectory structure as in the reference. e.g, if you want to modify a namelist setting

    ./Harmonie co nam/harmonie_namelists.pm   # retrieve default namelist harmonie_namelists.pm
    -vi nam/harmonie_namelists.pm              # modify the namelist

    Next time you run your experiment the changed file will be used. You can also make changes in a running experiment. Make the change you wish and rerun the InitRun task from the viewer. The InitRun task copies all files from your local experiment directory to your working directory $HM_DATA. Once your InitRun task is complete your can rerun the task you are interested in. If you wish to recompile something you will also have to rerun the Build tasks.

    Issues

    Harmonie exp stop at ECMWF(Atos) due $PERM mounting problem https://github.com/Hirlam/Harmonie/issues/628

    Account

    In order to change the billing account, open Env_submit and find the definition of scalar_job. Then add a line like

    'ACCOUNT' => $submit_type.' --account=account_name' to the definition of the dictionary.

    Directory structure

    $SCRATCH

    In $SCRATCH/hm_home/$EXP you will find

    DirectoryContent
    binBinaries
    libSource code synced from $HM_LIB and compiled code
    lib/srcObject files and source code (if you build with makeup, set by MAKEUP_BUILD_DIR)
    lib/utilUtilities such as makeup, gl_grib_api or oulan
    climateClimate files
    YYYYMMDD_HHWorking directory for the current cycle. If an experiment fails it is useful to check the IFS log file, NODE.001_01, in the working directory of the current cycle. The failed job will be in a directory called something like Failed_this_job.
    archiveArchived files. A YYYY/MM/DD/HH structure for per cycle data. ICMSHHARM+NNNN and ICMSHHARM+NNNN.sfx are atmospheric and surfex forecast output files
    extractVerification input data. This is also stored on the permanent disk $HPCPERM/HARMONIE/archive/$EXP/parchive/archive/extract
    ECF.logLog of job submission

    ECFS

    $PERM

    DirectoryContent
    HARMONIE/$EXPecflow log and job files
    hm_lib/$EXP/libScipts, config files, ecf and suite, source code (not compiled, set by $HM_LIB). Reference with experiment's changes on top

    $HPCPERM

    In $HPCPERM/hm_home/$EXP

    DirectoryContent
    parchive/archive/extract/Verification input data.

    $HOME on ecflow-gen-${user}-001

    DirectoryContent
    ecflow_server/ecFlow checkpoint and log files

    Cleanup of old experiments

    Danger

    These commands may not work properly in all versions. Do not run the removal before you're sure it's OK

    Once you have complete your experiment you may wish to remove code, scripts and data from the disks. Harmonie provides some simple tools to do this. First check the content of the different disks by

    Harmonie CleanUp -ALL

    Once you have convinced yourself that this is OK you can proceed with the removal.

    Harmonie CleanUp -ALL -go 

    If you would like to exclude the data stored on e.g ECFS ( at ECMWF ) or in more general terms stored under HM_EXP ( as defined in Env_system ) you run

    Harmonie CleanUp -d

    to list the directories intended for cleaning. Again, convince yourself that this is OK and proceed with the cleaning by

    Harmonie CleanUp -d -go

    You can always remove the data from ECFS directly by running e.g.

    erm -R ec:/YOUR_USER/harmonie/EXPERIMENT_NAME 

    or

    erm -R ectmp:/YOUR_USER/harmonie/EXPERIMENT_NAME 

    Debugging Harmonie with ARM DDT

    Follow instructions here. Use Run DDT client on your Personal Computer or End User Device

    +vi nam/harmonie_namelists.pm # modify the namelist

    Next time you run your experiment the changed file will be used. You can also make changes in a running experiment. Make the change you wish and rerun the InitRun task from the viewer. The InitRun task copies all files from your local experiment directory to your working directory $HM_DATA. Once your InitRun task is complete your can rerun the task you are interested in. If you wish to recompile something you will also have to rerun the Build tasks.

    Issues

    Harmonie exp stop at ECMWF(Atos) due $PERM mounting problem https://github.com/Hirlam/Harmonie/issues/628

    Account

    In order to change the billing account, open Env_submit and find the definition of scalar_job. Then add a line like

    'ACCOUNT' => $submit_type.' --account=account_name' to the definition of the dictionary.

    Directory structure

    $SCRATCH

    In $SCRATCH/hm_home/$EXP you will find

    DirectoryContent
    binBinaries
    libSource code synced from $HM_LIB and compiled code
    lib/srcObject files and source code (if you build with makeup, set by MAKEUP_BUILD_DIR)
    lib/utilUtilities such as makeup, gl_grib_api or oulan
    climateClimate files
    YYYYMMDD_HHWorking directory for the current cycle. If an experiment fails it is useful to check the IFS log file, NODE.001_01, in the working directory of the current cycle. The failed job will be in a directory called something like Failed_this_job.
    archiveArchived files. A YYYY/MM/DD/HH structure for per cycle data. ICMSHHARM+NNNN and ICMSHHARM+NNNN.sfx are atmospheric and surfex forecast output files
    extractVerification input data. This is also stored on the permanent disk $HPCPERM/HARMONIE/archive/$EXP/parchive/archive/extract
    ECF.logLog of job submission

    ECFS

    $PERM

    DirectoryContent
    HARMONIE/$EXPecflow log and job files
    hm_lib/$EXP/libScipts, config files, ecf and suite, source code (not compiled, set by $HM_LIB). Reference with experiment's changes on top

    $HPCPERM

    In $HPCPERM/hm_home/$EXP

    DirectoryContent
    parchive/archive/extract/Verification input data.

    $HOME on ecflow-gen-${user}-001

    DirectoryContent
    ecflow_server/ecFlow checkpoint and log files

    Cleanup of old experiments

    Danger

    These commands may not work properly in all versions. Do not run the removal before you're sure it's OK

    Once you have complete your experiment you may wish to remove code, scripts and data from the disks. Harmonie provides some simple tools to do this. First check the content of the different disks by

    Harmonie CleanUp -ALL

    Once you have convinced yourself that this is OK you can proceed with the removal.

    Harmonie CleanUp -ALL -go 

    If you would like to exclude the data stored on e.g ECFS ( at ECMWF ) or in more general terms stored under HM_EXP ( as defined in Env_system ) you run

    Harmonie CleanUp -d

    to list the directories intended for cleaning. Again, convince yourself that this is OK and proceed with the cleaning by

    Harmonie CleanUp -d -go

    You can always remove the data from ECFS directly by running e.g.

    erm -R ec:/YOUR_USER/harmonie/EXPERIMENT_NAME 

    or

    erm -R ectmp:/YOUR_USER/harmonie/EXPERIMENT_NAME 

    Debugging Harmonie with ARM DDT

    Follow instructions here. Use Run DDT client on your Personal Computer or End User Device

    diff --git a/dev/System/GitDeveloperDocumentation/index.html b/dev/System/GitDeveloperDocumentation/index.html index 8a51b150e0..dd71b827a9 100644 --- a/dev/System/GitDeveloperDocumentation/index.html +++ b/dev/System/GitDeveloperDocumentation/index.html @@ -33,4 +33,4 @@ remote:
  • Follow this link

  • Once the pull request has been approved by the System-core team it will be merged in to the dev-CY46h1 branch

  • Further information is available here

    Moving my branches from hirlam.org

    1. Add your hirlam.org fork as a remote (HLUSER is your hirlam.org username)

      cd $HOME/git/github/USER/Harmonie
       git remote add hirlamorgfork https://git.hirlam.org/users/HLUSER/Harmonie
       git fetch hirlamorgfork
    2. For each branch BRANCHNAME you want to move to github

      git checkout -t hirlamorgfork/BRANCHNAME
      -git push origin BRANCHNAME

    learn git branching is an excellent interactive tool to understand git.

    Coding Standards

    See Coding standards for Arpège, IFS and Aladin and Arpege/IFS Fortran coding standard (requires ECMWF account)

    +git push origin BRANCHNAME

    learn git branching is an excellent interactive tool to understand git.

    Coding Standards

    See Coding standards for Arpège, IFS and Aladin and Arpege/IFS Fortran coding standard (requires ECMWF account)

    diff --git a/dev/System/HarmonieTestbed/index.html b/dev/System/HarmonieTestbed/index.html index 3e592560e5..62184652ff 100644 --- a/dev/System/HarmonieTestbed/index.html +++ b/dev/System/HarmonieTestbed/index.html @@ -122,4 +122,4 @@ [ Status: OK] For more details please check /scratch/hlam/hm_home/testbed_ECMWF.atos.gnu_12414/testbed_comp_12414.log_details -

    All the logs from a testbed experiment are posted to the mail address MAIL_TESTBED set in ecf_config_exp.h. If a github token GH_TOKEN is set in scr/Testbed_comp the results will also be posted on Testbed output discussions on github using the GraphQL API. See github settings to create a token. Tick at least the repo box. Save your token in $HOME/.ssh/gh_testbed.token or in $HOME/env/gh_testbed.token and chmod 600 and it will be used. The test returns three different status signals

    In addition to the summary information detailed information can be found in the archive about the art of the difference.

    When to use the testbed

    It is recommended to use the testbed when adding new options or make other changes in the configurations. If your new option is not activated the result compared with the reference experiment should be the same, if not you have to start debugging. When changing things for one configuration it's easy to break other ones. In such cases the testbed is a very good tool make sure you haven't destroyed anything.

    +

    All the logs from a testbed experiment are posted to the mail address MAIL_TESTBED set in ecf_config_exp.h. If a github token GH_TOKEN is set in scr/Testbed_comp the results will also be posted on Testbed output discussions on github using the GraphQL API. See github settings to create a token. Tick at least the repo box. Save your token in $HOME/.ssh/gh_testbed.token or in $HOME/env/gh_testbed.token and chmod 600 and it will be used. The test returns three different status signals

    In addition to the summary information detailed information can be found in the archive about the art of the difference.

    When to use the testbed

    It is recommended to use the testbed when adding new options or make other changes in the configurations. If your new option is not activated the result compared with the reference experiment should be the same, if not you have to start debugging. When changing things for one configuration it's easy to break other ones. In such cases the testbed is a very good tool make sure you haven't destroyed anything.

    diff --git a/dev/System/Local/QuickStartLocal/index.html b/dev/System/Local/QuickStartLocal/index.html index 21c0031e88..c70663c505 100644 --- a/dev/System/Local/QuickStartLocal/index.html +++ b/dev/System/Local/QuickStartLocal/index.html @@ -12,4 +12,4 @@ PATH_TO_HARMONIE/config-sh/Harmonie prod DTGEND=YYYYMMDDHH LL=12

    By using prod you tell the system that you are continuing the experiment and using the first guess from the previous cycle. The start date is take from a file progress.log created in your $HOME/hm_home/my_exp directory. If you would have used start the initial data would have been interpolated from the boundaries, a cold start in other words.

    Start/Restart of ecflow_ui

    To start the graphical window for ecFlow on ECMWF type

    cd $HOME/hm_home/my_exp
     PATH_TO_HARMONIE/config-sh/Harmonie mon

    The graphical window, mXCdp runs independently of the mSMS job and can be closed and restarted again with the same command. With the graphical interface you can control and view logfiles of each task.

    Making local changes

    Very soon you will find that you need to do changes in a script or in the source code. Once you have identified which file to edit you put it into the current $HOME/hm_home/my_exp directory, with exactly the same subdirectory structure as in the reference. e.g, if you want to modify a namelist setting

    cd $HOME/hm_home/my_exp
     PATH_TO_HARMONIE/config-sh/Harmonie co nam/harmonie_namelists.pm         # retrieve default namelist harmonie_namelists.pm
    -vi nam/harmonie_namelists.pm                        # modify the namelist

    Next time you run your experiment the changed file will be used. You can also make changes in a running experiment. Make the change you wish and rerun the InitRun task in the mXCdp window. The !InitRun task copies all files from your local experiment directory to your working directory $HM_DATA. Once your InitRun task is complete your can rerun the task you are interested in. If you wish to recompile something you will also have to rerun the Build tasks. Read more about how to control and rerun tasks in mini-SMS from mXCdp.

    Directory structure

    On most platforms HARMONIE compiles and produces all its output data under $HM_DATA (defined in ~/hm_home/my_exp/Env_system)

    = Description == Location =
    Binaries$BINDIR (set in ecf/config_exp.h ), default is $HM_DATA/bin
    libraries, object files & source code$HM_DATA/lib/src if MAKEUP=yes, $HMDATA/gmkpack_build if MAKEUP=no
    Scripts$HM_LIB/scr
    config files (Envsystem & Envsystem$HM_LIB linked to files in $HM_LIB/config-sh
    ecf scripts and main config$HM_LIB/ecf
    ecFlow suite definitions$HM_LIB/suites
    Utilities such as gmkpack, gl & monitor$HM_DATA/lib/util
    Climate files$HM_DATA/climate
    Working directory for the current cycle$HM_DATA/YYYYMMDD_HH
    Archived files$HM_DATA/archive
    Archived cycle output$HM_DATA/archive/YYYY/MM/DD/HH
    Archived log files$HM_DATA/archive/log/HM_TaskFamily_YYYYMMDDHH.html where TaskFamily=MakeCycleInput,Date,Postprocessing
    Task log files$JOBOUTDIR (set in Env_system) usually $HM_DATA/sms_logfiles
    Verification data (vfld/vobs/logmonitor)$HM_DATA/archive/extract
    Verification (monitor) results$HM_DATA/archive/extract/WebgraF
    "Fail" directory$HM_DATA/YYYYMMDD_HH/Failed_Family_Task (look at ifs.stat,NODE.001_01, fort.4

    Archive contents

    $HM_DATA/archive/YYYY/MM/DD/HH is used to store "archived" output from HARMONIE cycles. The level of archiving depends on ARSTRATEGY in ecf/config_exp.h . The default setting is medium which will keep the following cycle data:

    Cleanup of old experiments

    Once you have complete your experiment you may wish to remove code, scripts and data from the disks. Harmonie provides some simple tools to do this. First check the content of the different disks by

     Harmonie CleanUp -ALL

    Once you have convinced yourself that this is OK you can proceed with the removal.

     Harmonie CleanUp -ALL -go 

    If you would like to exclude the data stored HM_DATA ( as defined in Env_system ) you run

     Harmonie CleanUp -d

    to list the directories intended for cleaning. Again, convince yourself that this is OK and proceed with the cleaning by

     Harmonie CleanUp -d -go

    NOTE that these commands may not work properly in all versions. Do not run the removal before you're sure it's OK

    +vi nam/harmonie_namelists.pm # modify the namelist

    Next time you run your experiment the changed file will be used. You can also make changes in a running experiment. Make the change you wish and rerun the InitRun task in the mXCdp window. The !InitRun task copies all files from your local experiment directory to your working directory $HM_DATA. Once your InitRun task is complete your can rerun the task you are interested in. If you wish to recompile something you will also have to rerun the Build tasks. Read more about how to control and rerun tasks in mini-SMS from mXCdp.

    Directory structure

    On most platforms HARMONIE compiles and produces all its output data under $HM_DATA (defined in ~/hm_home/my_exp/Env_system)

    = Description == Location =
    Binaries$BINDIR (set in ecf/config_exp.h ), default is $HM_DATA/bin
    libraries, object files & source code$HM_DATA/lib/src if MAKEUP=yes, $HMDATA/gmkpack_build if MAKEUP=no
    Scripts$HM_LIB/scr
    config files (Envsystem & Envsystem$HM_LIB linked to files in $HM_LIB/config-sh
    ecf scripts and main config$HM_LIB/ecf
    ecFlow suite definitions$HM_LIB/suites
    Utilities such as gmkpack, gl & monitor$HM_DATA/lib/util
    Climate files$HM_DATA/climate
    Working directory for the current cycle$HM_DATA/YYYYMMDD_HH
    Archived files$HM_DATA/archive
    Archived cycle output$HM_DATA/archive/YYYY/MM/DD/HH
    Archived log files$HM_DATA/archive/log/HM_TaskFamily_YYYYMMDDHH.html where TaskFamily=MakeCycleInput,Date,Postprocessing
    Task log files$JOBOUTDIR (set in Env_system) usually $HM_DATA/sms_logfiles
    Verification data (vfld/vobs/logmonitor)$HM_DATA/archive/extract
    Verification (monitor) results$HM_DATA/archive/extract/WebgraF
    "Fail" directory$HM_DATA/YYYYMMDD_HH/Failed_Family_Task (look at ifs.stat,NODE.001_01, fort.4

    Archive contents

    $HM_DATA/archive/YYYY/MM/DD/HH is used to store "archived" output from HARMONIE cycles. The level of archiving depends on ARSTRATEGY in ecf/config_exp.h . The default setting is medium which will keep the following cycle data:

    Cleanup of old experiments

    Once you have complete your experiment you may wish to remove code, scripts and data from the disks. Harmonie provides some simple tools to do this. First check the content of the different disks by

     Harmonie CleanUp -ALL

    Once you have convinced yourself that this is OK you can proceed with the removal.

     Harmonie CleanUp -ALL -go 

    If you would like to exclude the data stored HM_DATA ( as defined in Env_system ) you run

     Harmonie CleanUp -d

    to list the directories intended for cleaning. Again, convince yourself that this is OK and proceed with the cleaning by

     Harmonie CleanUp -d -go

    NOTE that these commands may not work properly in all versions. Do not run the removal before you're sure it's OK

    diff --git a/dev/System/MFaccess/index.html b/dev/System/MFaccess/index.html index ae7d5dc4c6..02b1f5f9bc 100644 --- a/dev/System/MFaccess/index.html +++ b/dev/System/MFaccess/index.html @@ -94,4 +94,4 @@ [whelane@merou ~]$

    Access to (read-only) MF git arpifs git repository

    MF use ssh keys to allow access to their read-only git repository. If approved by the HIRLAM System PL you should request access to the repository by sending a request e-mail to Eric Escaliere and cc'ed to Daniel Santos and Claude Fischer your ssh public key attached.

    Once you have been given access you can create a local clone by issuing the following commands:

    cd $HOME
     mkdir arpifs_releases
     cd arpifs_releases
    -git clone ssh://reader054@git.cnrm-game-meteo.fr/git/arpifs.git

    Happy gitting!

    +git clone ssh://reader054@git.cnrm-game-meteo.fr/git/arpifs.git

    Happy gitting!

    diff --git a/dev/System/ReleaseProcess/index.html b/dev/System/ReleaseProcess/index.html index 5184271c99..ce574fe1b5 100644 --- a/dev/System/ReleaseProcess/index.html +++ b/dev/System/ReleaseProcess/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Harmonie release process

    This page describes the release process for tagging new Harmonie versions

    Harmonie repository organization

    In the past we used the concept of trunk(svn) or develop(git) for the development of Harmonie-Arome codes. Since CY46 we decided to used dev-CYXXhX as development branch to be more clear about the harmonie version under development.

    Harmonie-AROME naming convection

    Using a common T(Toulouse) cycle of the ACCORD consortium from the IAL repository the development of an Harmonie-Arome version starts.

    • The naming convention is using the number of their cycle of T cycle used as base.
    • The h letter is used to indicate that it is, or will be, an Harmonie-Arome CSC that differs from the T base code version.
    • The first number after the h refers to the version of T cycle version used as base. (e.g. CY46T1 is used as base for dev-CY46h1)

    Tagging

    In Hirlam, various tagging prior to official releases are made to provide user communities with a 'frozen' code set, even though the code has not necessarily been fully validated. These codes are often labeled as alpha, beta, rc.

    • Alpha release (e.g. harmonie-46h1.alpha.1): a snapshot of dev branch which is not mature both technically and meteorologically
    • Beta release (e.g. harmonie-46h1.beta.1): a snapshot of dev branch which is deemed technically mature for evaluation and meteorological validation. On the other hand, there could still be possibility for more features to add
    • Target releases (e.g. harmonie-43h2.2.target.2 and harmonie-43h2.2.target.3): pre-release tagging for final meteorological evaluation
    • Release candidate(e.g. harmonie-43h2.2.rc1): pre-release tagging for final evaluation
    • Official release (e.g. harmonie-43h2.2): mature for operational use
      • The second number refers the number of the Harmonie-Arome release technically and meteorological quality assured
      • A third number could appear in the name for a minor update, a technical release necessities or other aspects.(e.g., harmonie-43h2.2.1
      • Also some bugfix branches could be active using the bf in the naming (e.g.harmonie-43h2.2_bf)
    +

    Harmonie release process

    This page describes the release process for tagging new Harmonie versions

    Harmonie repository organization

    In the past we used the concept of trunk(svn) or develop(git) for the development of Harmonie-Arome codes. Since CY46 we decided to used dev-CYXXhX as development branch to be more clear about the harmonie version under development.

    Harmonie-AROME naming convection

    Using a common T(Toulouse) cycle of the ACCORD consortium from the IAL repository the development of an Harmonie-Arome version starts.

    • The naming convention is using the number of their cycle of T cycle used as base.
    • The h letter is used to indicate that it is, or will be, an Harmonie-Arome CSC that differs from the T base code version.
    • The first number after the h refers to the version of T cycle version used as base. (e.g. CY46T1 is used as base for dev-CY46h1)

    Tagging

    In Hirlam, various tagging prior to official releases are made to provide user communities with a 'frozen' code set, even though the code has not necessarily been fully validated. These codes are often labeled as alpha, beta, rc.

    • Alpha release (e.g. harmonie-46h1.alpha.1): a snapshot of dev branch which is not mature both technically and meteorologically
    • Beta release (e.g. harmonie-46h1.beta.1): a snapshot of dev branch which is deemed technically mature for evaluation and meteorological validation. On the other hand, there could still be possibility for more features to add
    • Target releases (e.g. harmonie-43h2.2.target.2 and harmonie-43h2.2.target.3): pre-release tagging for final meteorological evaluation
    • Release candidate(e.g. harmonie-43h2.2.rc1): pre-release tagging for final evaluation
    • Official release (e.g. harmonie-43h2.2): mature for operational use
      • The second number refers the number of the Harmonie-Arome release technically and meteorological quality assured
      • A third number could appear in the name for a minor update, a technical release necessities or other aspects.(e.g., harmonie-43h2.2.1
      • Also some bugfix branches could be active using the bf in the naming (e.g.harmonie-43h2.2_bf)
    diff --git a/dev/System/StandaloneOdb/index.html b/dev/System/StandaloneOdb/index.html index 6db6d37f3c..e8367dd7c0 100644 --- a/dev/System/StandaloneOdb/index.html +++ b/dev/System/StandaloneOdb/index.html @@ -68,4 +68,4 @@ cp -r /home/ms/ie/dui/odbMacroTest . cd odbMacroTest metview4 -b odbmap.mv4 conv201312.odb "obsvalue" "andate=20131225 and antime=120000 and varno=39" legon png -xv odbmap.1.png +xv odbmap.1.png diff --git a/dev/System/TheHarmonieScript/index.html b/dev/System/TheHarmonieScript/index.html index db9d5e9707..7c249162c0 100644 --- a/dev/System/TheHarmonieScript/index.html +++ b/dev/System/TheHarmonieScript/index.html @@ -17,4 +17,4 @@ # unless the / is preceded by ~ (which will be removed). # Hence, to remove e.g. all analyses from 1995, use 1995/an, # which translates to 1995[0-9][0-9]*_*/an* -# (to be precise: use: CleanUp("REMOVE:1995/an", "-go"); +# (to be precise: use: CleanUp("REMOVE:1995/an", "-go"); diff --git a/dev/System/UpdateNamelists/index.html b/dev/System/UpdateNamelists/index.html index 2656819d4a..85d9439910 100644 --- a/dev/System/UpdateNamelists/index.html +++ b/dev/System/UpdateNamelists/index.html @@ -11,4 +11,4 @@ Create namelist hash 4dvar.pm Create updated empty namelist hash empty_4dvar.pm for 4dvar

    We have now created a perl module for the new namelists. One with empty namelist entries, 4dvar_empty.pm, and one with all namelists in the right format, 4dvar.pm. To get one of your namelists back ( sorted ) you can write:

    ./gen_namlist.pl -n 4dvar_empty.pm -n 4dvar.pm namscreen_dat_4d

    To get the module integrated in the system the module has to be merged with the conventions in harmonie_namelists.pm, but as a start the full namelists can be used. Copy the new empty*.pm to empty.pm to get the updated list of empty namelists.

    Create the new namelist

    Add the new namelists to the script scr/Get_namelist. In this case we would add a new case for 4dvar

    4dvartraj) 
        NAMELIST_CONFIG="$DEFAULT minimization dynamics ${DYNAMICS} ${PHYSICS} ${PHYSICS}_minimization ${SURFACE} ${EXTRA_FORECAST_OPTIONS} varbc minim4d"
    -    ;;
    + ;; diff --git a/dev/Verification/AllobsVerification/index.html b/dev/Verification/AllobsVerification/index.html index 2031ca4daa..6e1d1d7c10 100644 --- a/dev/Verification/AllobsVerification/index.html +++ b/dev/Verification/AllobsVerification/index.html @@ -5,4 +5,4 @@ gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash});

    allobs Verification

    Introduction

    It is possible to use Screening (NCONF=002) to calculate observation - forecast (O-F) statistics using forecasts of any length as the model first-guess. The ...

    Screening adjustments

    The screening of observations is switched off by adjusting the NAMCT0 and L_SCREEN_CALL

    &NAMCT0
       L_SCREEN_CALL=.FALSE.,
    -/

    Inputs

    The inputs for the allobs data extraction are the same as for a regular DA Screening task - observations (ECMA) and a first-guess (short-forecast files - ICMSHHARM+hhhh and ICMSHHARM+hhhh.sfx).

    Note

    The forecasts being verified will need both model state (ICMSHHARM+hhhh) and the "full" SURFEX file (ICMSHHARM+hhhh.sfx) available for the Scextr task. You may need to adjust the VERITIMES and SWRITUPTIMES settings in your ecf/config_exp.h file.

    The following settings are important:

    config_exp.h setting
    SCREXTRUse Screening (NCONF=002) to produce O-F data
    SCREXTR_TASKSNumber of parallel tasks for O-F extraction
    FGREFEXPExperiment name for FirstGuess. If set to undef it will use own forecasts
    OBREFEXPExperiment name for ODBs. If set to undef it will use own ODBs

    Running

    This extraction can be executed as part of a running experiment (with SCREXTER=yes) or using a standalone suite (PLAYFILE=allobsver).

    Output

    The output from the Scrextr task is a CCMA ODB with O-F statistics. This ODB is archived in $HM_DATA/archive/extract/obsver/odb_ver_${FGDTG}_${FCLENSTR}/ where FGDTG is the forecast cycle DTG and FCLENSTR is the forecast length verified. The ODB data is then converted to ODB-2 and sqlite files for use in Harp and other downstream applications using odbcon tools.

    +/

    Inputs

    The inputs for the allobs data extraction are the same as for a regular DA Screening task - observations (ECMA) and a first-guess (short-forecast files - ICMSHHARM+hhhh and ICMSHHARM+hhhh.sfx).

    Note

    The forecasts being verified will need both model state (ICMSHHARM+hhhh) and the "full" SURFEX file (ICMSHHARM+hhhh.sfx) available for the Scextr task. You may need to adjust the VERITIMES and SWRITUPTIMES settings in your ecf/config_exp.h file.

    The following settings are important:

    config_exp.h setting
    SCREXTRUse Screening (NCONF=002) to produce O-F data
    SCREXTR_TASKSNumber of parallel tasks for O-F extraction
    FGREFEXPExperiment name for FirstGuess. If set to undef it will use own forecasts
    OBREFEXPExperiment name for ODBs. If set to undef it will use own ODBs

    Running

    This extraction can be executed as part of a running experiment (with SCREXTER=yes) or using a standalone suite (PLAYFILE=allobsver).

    Output

    The output from the Scrextr task is a CCMA ODB with O-F statistics. This ODB is archived in $HM_DATA/archive/extract/obsver/odb_ver_${FGDTG}_${FCLENSTR}/ where FGDTG is the forecast cycle DTG and FCLENSTR is the forecast length verified. The ODB data is then converted to ODB-2 and sqlite files for use in Harp and other downstream applications using odbcon tools.

    diff --git a/dev/Verification/CommonVerification/index.html b/dev/Verification/CommonVerification/index.html index db520612e5..435b8096bb 100644 --- a/dev/Verification/CommonVerification/index.html +++ b/dev/Verification/CommonVerification/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/dev/Verification/Extract4verification/index.html b/dev/Verification/Extract4verification/index.html index cb4eed28d8..10104ee13b 100644 --- a/dev/Verification/Extract4verification/index.html +++ b/dev/Verification/Extract4verification/index.html @@ -30,4 +30,4 @@ ... pressure(nlev_temp) val(1:nvar_temp) stid_2 lat lon hgt -...

    The accumulation time allows us to e.g. easily include different precipitation accumulation intervals.

    +...

    The accumulation time allows us to e.g. easily include different precipitation accumulation intervals.

    diff --git a/dev/Verification/HARP/index.html b/dev/Verification/HARP/index.html index 9500701c65..7e5a646585 100644 --- a/dev/Verification/HARP/index.html +++ b/dev/Verification/HARP/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    harp

    harp is a set of R packages for manipulation, analysis, visualization and verification of data from regular grids. The most up to date information and tutorials can be found on the website for the 2024 training course

    +

    harp

    harp is a set of R packages for manipulation, analysis, visualization and verification of data from regular grids. The most up to date information and tutorials can be found on the website for the 2024 training course

    diff --git a/dev/Verification/Obsmon/index.html b/dev/Verification/Obsmon/index.html index 0b2a536b55..8bd67ea2d7 100644 --- a/dev/Verification/Obsmon/index.html +++ b/dev/Verification/Obsmon/index.html @@ -6,4 +6,4 @@

    OBSMON

    In 2014 a new version of the observational monitoring system entered trunk. The first official release containing obsmon was cy38h1.2

    The obsmon package consists of two components. The first is a fortran-based code that is run, for all the active observations types (defined in scr/include.ass), at the post-processing stage of an experiment. It generates statistics from the ODB and store data in three SQLite tables (ECMA/CCMA/ECMA_SFC(CANARI)). In addition the SQLite tables are concatenated in tables in the /ts directory at the end of the run.

    The second component is written in R using the Shiny web application framework. It allows the interactive visualization of the data contained in the SQLite tables produced by the first component of the package. This can be done either offline or via a server daemon (e.g. shiny.hirlam.org).

    For disambiguation, we will hereinafter use the terms "backend" and "frontend" to refer to the first and second components of obsmon, respectively.

    How to turn on backend obsmon?

    Obsmon is enabled by default in ecf/config_exp.h vi OBSMONITOR=obstat

    Note

    If you don't have any log-files from the monitoring experiment, you should disable plotlog from the OBSMONITOR= string in ecf/config_exp.h

    Note

    Make sure that the -DODBMONITOR pre-processor flag is active during compilation of util/monitor. This should only be an issue on untested platforms and is by default enabled on ECMWF.

    How to create statistics and SQLite tables offline/stand-alone:

    If you are running a normal harmonie experiment with the OBSMONITOR=obstat active, the following step is not relevant.

    Two new actions are implemented in the Harmonie script. Instead of start you can write obsmon and instead of prod you can write obsmonprod. This will use the correct definition file and only do post-processing. If you have your ODB files in another experiment you can add the variable OBSMON_EXP_ARCHIVE_ROOT to point to the archive directory in the experiment you are monitoring. This approach is used in the operational MetCoOp runs. If you set OBSMON_EXP=label the runs will be stored in $EXTRARCH/label/. This way you can use the same experiment to monitor all other experiments. The experiements do not need to belong to you as long as you have reading permissions to the experiment.

    1. as start:
     ${HM_REV}/config-sh/Harmonie obsmon DTG=YYYYMMDDHH DTGEND=YYYYMMDDHH OBSMON_EXP_ARCHIVE_ROOT=PATH-TO-ARCHIVE-DIRECTORY-TO-MONITOR OBSMON_EXP=MY-LABEL
    2. as prod:
     ${HM_REV}/config-sh/Harmonie obsmonprod DTGEND=YYYYMMDDHH OBSMON_EXP_ARCHIVE_ROOT=PATH-TO-ARCHIVE-DIRECTORY-TO-MONITOR OBSMON_EXP=MY-LABEL

    If you want to monitor an experiment stored on ECFS, you should specify OBSMON_EXP_ARCHIVE_ROOT with the full address (ectmp:/$USER/..... or ec:/$USER/...) e.g.

    OBSMON_EXP_ARCHIVE_ROOT=ectmp:/$USER/harmonie/MY-EXP OBSMON_EXP=MY-LABEL

    You can also monitor other users experiments as long as you have read-access to the data.

    How to visualize the SQLite tables using frontend obsmon:

    Download the code from its git repo at github:

    git clone git@github.com:Hirlam/obsmon.git 

    Instructions on how to install, configure and run the code can be found in the file docs/obsmon_documentation.pdf that is shipped with the code.

    How to extend backend obsmon with new observation types

    Step 1: Extract statistics from ODB

    In the scripts you must enable monitoring of your observation type. Each observation type is monitored if active in:

    msms/harmonie.tdf

    The script which calls the obsmon binary, is:

    scr/obsmon_stat

    This script set the correct namelist based on how you define your observation below.

    After the information is extracted, the different SQLite bases are gathered into one big SQLite file in the script:

    scr/obsmon_link_stat

    The observation types which the above script is gathering is defined in obtypes in this script:

    util/monitor/scr/monitor.inc

    Then let us introduce the new observation in the obsmon binary. The source code is in

    harmonie/util/monitor

    There are two modules controlling the extraction from ODB:

    mod/module_obstypes.f90
    -mod/module_obsmon.F90

    The first routine defines and initializes the observation type you want to monitor. The second calls the intialization defined in the first file. The important steps are to introduce namelist variables and a meaningful definition in the initialization of the observation type.

    The real extraction from ODB is done in

    cmastat/odb_extract.f90

    At the moment there are two different SQL files used, one for conventional and one for satelites. E.g. radar is handled as TEMP/AIRCRAFT.

    Step 2: Visualize the new observation in shiny (frontend obsmon)

    The logics of which observation type to display is defined in:

    src/observation_definitions.R

    In case of a new plot added, the plotting is defined in the files under:

    src/plots
    +mod/module_obsmon.F90

    The first routine defines and initializes the observation type you want to monitor. The second calls the intialization defined in the first file. The important steps are to introduce namelist variables and a meaningful definition in the initialization of the observation type.

    The real extraction from ODB is done in

    cmastat/odb_extract.f90

    At the moment there are two different SQL files used, one for conventional and one for satelites. E.g. radar is handled as TEMP/AIRCRAFT.

    Step 2: Visualize the new observation in shiny (frontend obsmon)

    The logics of which observation type to display is defined in:

    src/observation_definitions.R

    In case of a new plot added, the plotting is defined in the files under:

    src/plots
    diff --git a/dev/Verification/Verification/index.html b/dev/Verification/Verification/index.html index f8395bde5c..6dc9639064 100644 --- a/dev/Verification/Verification/index.html +++ b/dev/Verification/Verification/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/dev/Visualization/EPyGrAM/index.html b/dev/Visualization/EPyGrAM/index.html index 0a74d9924d..c8800c6124 100644 --- a/dev/Visualization/EPyGrAM/index.html +++ b/dev/Visualization/EPyGrAM/index.html @@ -6,4 +6,4 @@
    +domain_maker.py

    Enjoy!

    diff --git a/dev/assets/README/index.html b/dev/assets/README/index.html index a6d19266e9..32eb297db5 100644 --- a/dev/assets/README/index.html +++ b/dev/assets/README/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    da_graph.svg is created in .github/workflows/documentation.yml. It can be recreated locally by using dot which is part of graphviz

    sudo apt install graphviz
    dot -Tsvg da_graph.dot -o da_graph.svg
    +

    da_graph.svg is created in .github/workflows/documentation.yml. It can be recreated locally by using dot which is part of graphviz

    sudo apt install graphviz
    dot -Tsvg da_graph.dot -o da_graph.svg
    diff --git a/dev/index.html b/dev/index.html index 21a12d1241..f6a5a13a46 100644 --- a/dev/index.html +++ b/dev/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Harmonie System Documentation

    Welcome to the Harmonie system documentation

    Github

    Contributing

    To update a page:

    • Click the "Edit on Github" button at the top right of the page
    • Edit the markdown file on github.com
    • commit (this creates a new branch in your fork) and start a pull request

    When adding new pages also add them to docs/pages.jl so they appear in the navigation bar.

    To add a reference:

    • Update docs/references.bib using <Lastname><Year>` as the citation key.
    • Cite paper in markdown using [<Lastname><Year>](@cite)

    Instructions how to build the system documentation locally are here.

    +

    Harmonie System Documentation

    Welcome to the Harmonie system documentation

    Github

    Contributing

    To update a page:

    • Click the "Edit on Github" button at the top right of the page
    • Edit the markdown file on github.com
    • commit (this creates a new branch in your fork) and start a pull request

    When adding new pages also add them to docs/pages.jl so they appear in the navigation bar.

    To add a reference:

    • Update docs/references.bib using <Lastname><Year>` as the citation key.
    • Cite paper in markdown using [<Lastname><Year>](@cite)

    Instructions how to build the system documentation locally are here.

    diff --git a/dev/objects.inv b/dev/objects.inv index c72ba3bc18c54cda5a48286910846f8089b656db..58d5ffac7e5471aff12c68415d96f1c6ddc7c7e5 100644 GIT binary patch delta 16484 zcmV)7K*zs=fdQj|0g#k`iWiG}i=sl(cJ};Hfg~s*1__1$WhSgM%DJ9bvFNFSN3437_^y2NxPs?eY>lXsQ2=&cEjN{#Q7L-!+ zr3=A-z3z8-TOq!GD!73~__W*dE7RYCU!k3C zfj=cdyckXscnXF2+mz-)IA4;=t{x&kUBDa3{Mw|yQo;+Nf3EECHvaaQ#*e!scwC0t zAb~HNm{!h9lWuFsFT`y)f+;*Pzf=xl5YL0C6R-TR(Y-o<%`XK1t%Uv@Y*SqPAGhJW z3AWq*Lg;S-zk2Q)yl}oeY=g}FlqBZ!Oqe$cDq?!h=0@Y;rd7P!|`$Z{3Q6N>OR%q z*4mF=k}zr1V~z%9E&m|&FD2d0wn4DYZLrm?a3QFVC0OZpH%o!b=2DF@{!6z3ifIMRvHs}Y#JlyJ1rPP{1qBFXUi>1Ty1zB^J?e`GVn{Ia zlazS73;oSSEN^jPey@NS2Wbj(BiRq7Tf$^gpK_4HUAoz&C@NHg**3_R;b(Cd76x~J zJE}&w9U*-6lp7;Fiss`6Ixe)kK;Aimwvwju8RvRWf z``b|nLU`~oSO;1V&i~?e(V+Q-ZY1n~ZzbJy;&1CH_UHXI%+x&h)u1_WxcHH_5U=|% zMu9MmGlG`>5oR_1p;oL?$v0Dq7Nu~ZhBuw@WYn5;KmM9;Y_EVIV?HzbU8`5XhTi}g z{^uqS4}}&FC{H{*=OW!{`Kf2g^8>2R=_-nS4&rQHHUow}n zV67;Mr%5BoV$%!Z({DBSx2@5CLnR_!gtZI*XNiE-TGb4MFO!VF!v4Z!XF1O9!^_8Q zYmgx>YXDDr10VdAce4wl`33#|_zigD$JxsNno(7hw}5?c0Dp?~cyYI>1>h#I{nKBq zH6HZ*8P2;B6|ueqBt6lhkKT9}roq5p`%eLBGP~{G-SA6>xU>b_I{npu-ALy{tIm`1pFlQZz7=||KF;=solROtOi%KqaX_WBv1>s1U%c|!j5>$7lhJ}X~4U9 z=0_KQ-i5QTkBw_(x;uz9vDEPJ?*~O0ESwRQ`~Cko5m!3IpE6+p@P) z)(o-ulLq7Y7vTsWfxhg2Y2}-!8xk4AiN@<}Z{zrD2?}3;lZgK6=J1$+S&!m%Fq}_I zs4~}43`QA=zKdb!Mx%V%%z<$zRmhjOxDfnVnF zX8udv9IOx;-_-YZa`gx7@8s%_6+My+Mdb8M6uDq>!GjejAwNMN6_(WFt>I#}3}#>D zOT61c7r@}rl%K*j_%)I7E=|IDU|wOcldA|Bf8c!?4CTjB|3(J$kfuDX*W5m5hP;Q> zhY0|90`Y{#qxEU=yw+wDDRwM5Xqxf#!Y<{4&BORofdw24@B7`{8ZI*}13ej+6*azB zO-(u}nX{Lb?+8}m4|pV;I%cwVyancfmntmcji15`$ls@y>ww;AkLyym_t5sfjU(Ej zf8d>k>->vQ82!~I3ancKA!)cY8ue=>z3z-{wPqkVfDubMvufDi~F90-9B`T@*Y zH;#DJd6P#_<;8&0SgG1IoYqcyLwFJJChh!}DIBVaJU>4_{?sE-;eZMP6#(jeqOCkA zY&qQE*>^Pv)WE0Fnc>an9Nt&`xKOu8tyTpmcNJ1lde`8+D zk~mN}+Jd>9zQ}@Fz;w1>F@Q@~`8YA-i=blqA~K2sy%o~_Z>=DEw&+qL3Up|;W!(K8 zn@S7T2sVe8+}WNRA;5Hy6$1ds5(q`$kB{!- zzSkcOx}D$tw)8a_)xsM?d~)3bwyOpIauEEZz=nc)5tE^~=?c>zBj5n;e{QvdNd~WT zk2G~C+{uyX|qSa2ac?gW{b!X$>D;*HX4!}c;q zuEclNTN6nIdc`S*M=13w++LCSDNJ~*Kufpxs>)$_onjFKewYLE;b{`({x7VE5 z8VOE7KoX4P_1Cj#H^&apf2T!meGn3n%Vd_rY-u>_tDhV>#o)5iVB{bp3>Y&WW=8`s z?|tWUYa|obiuaoHm*C8?>nTg0TqLy9;Y3-%4Xym|u;m2Nyq+m%7D8bN#-P`(Fa%?G z>a38bvu(V23y&+=O-tWU3BINU0Sl>7z{=38>w>Z2>Dg-@sXpZsnq+8LF`{zIY3xAL(*DWtgx3p@X-#68`C*7-) zKfF8n&PyHH z?fwZ^g1?t+?M+^{Mb|0{M%T{iE;>xIc%u|0<1c6TU08D4e>L#-aBN+`{$gdeO&qTC zud`$a$l$?&nn66d>Xl(4jncNz7z9%Y9)lzBaeMC}ic{2cz!A<86>Qq@_*t03r%Bo$ z)gW57TgslMPU=lv9f#<+Yo!Zp0FYV6#)y13UogimZ(G7wS>qj60K?H&b zz8oe3Ku9z=696GJ1RWjJ#$!FvdKS-lMqAmmvG~KPAqC-zw;t@1QH2smha}LN8gGJN zt{8CM^7$A?t{+B^({0om$a$E2ZOl+>OyrwfOyv6ke-o#q#`H4Fke=_i5Lm{Azg-=# zTFFMD;1fqsfuKcA%k(Oir|;jTm*?*;-`4OoeW2fML z|1&yr2$v)Z-Hf&yVR5|yj2f1C>W5Kt!4ZjsBJzrE3|0Gm){%ivHa5n<)&uKp>d(GD zk#U73S?5*l<2i~>tfY=o;JENCIE%Z^JN72Qe{8wNP>AZysKgr`dN72UEoj^xwI=P` z8dj#<(hPt8)!gb0BK^7kj8-8ZnK1^ z^t6;qHMsN5szhhZcY}|Bn+vD8H=M%MAnitLJb?wX3;_?;IGmTX+jY7Fx|1KWQ<%;K ze|08uoeAiS?$XooE^Gs7G-Fu?GJ!G~LgEK;+R>?j8#o;s$j&}ZOW%FgysVf)jX=9K z9TLdWK26!FL*iSMET%M+X_%A6-;{R zdJ!6v&pC_f#-H$Ew8{BGmk+!$5R6u;Rn^l|b|Agm;luD|{6YGwWasbM?#s2se~f@f zJ!2?DLEpUA6!%Qz_S-NA8*s}%2_hk;9Z5Xchh!Q{IA+$k%<(`0Q4XydFx&prfETF+ zIJxd=|`lb8n0msVnw7>fvh0{cy zPIrs#nS9!Y)8PA=+&)d;Havgoe{q4YL}ZjY7#lwUI&zt8YGF!zI7BHFrQi_>H6no{ zVwMKaYF&nzK1 zju+`SKX3kTy!^6}oEpr_+I~^NfchkvxVBzgHirl$W(j4%QNMuTLW;ySe*)tYD~Km> zIO4gB7M{`VaQF)yu>&~Q(Z2pykUbADbZ98SP;}O2pgdA0&X)fA3HDc9q-cOv9C{@m zp5=t-kX8`1jM4b4MFofDzJ)g|lih0N=Y3TUi+?6-3`!vk1&kZKBw*!=2UvxxA8~K+ zJ6 zZQLbxrBVh{(F#i|m=^ZqV6lrNFz#P-KxBo+i-InC#w<`U2C;VSiBz!Uv?NdC4Gl2= z44cR(hx7vvUiXC0-jEh7N36_aQgV(GYKQ_}5rsj!&I5I?h(o9Gcz>e^YnRI7u;V&n zj!#xwuRmjP0ZvDBcG{NpUCo9YdXc5~24OM_BA7@)yh}jD-7OZ)1#g#D5Uqz1SnMRE zHqZlDZ=_|1Ax#S#VHin&J_iXZDR>JVZjwScVc<}S(!TwrLW5Ej@W*Ns_+oV?Mn15f zbo;g7IY(?)QgD3vvVZDs@=8llNhB}vT)+=krRW9ePSFaY#RGw8y)UTYV{2LiD=fAa z)S5ja*|g!${Y|>}6t31%lM0u%lzkh?XkELx&kl0MYakbL2GjpR5*sx&$ugY*#wl+MGv#zn`D zIW5jN(fMY@&pp{}DqRjS$P7InX$8UW-d{{8RAcsbvLhuC3gSAdtvEL1Qg$4GvEUfD zcD!PHshX>0c7M*s9!45W&sk;be6{D2^jhsne|Y~v_J$+b9roc9zLl4hm}-Cn9%&iC zIl2ylIq{rRn9k&TJ8!bVFg%L8x&!>&ybCov0n~zqCoHWXT7k5JXi4&d<-oM)Pp9!=?%tJzSH&%O2@nRDPW}-u~@ceVs6F>QBx@){lM)aUmNCMP!) z*XS9iKEL-=`T$h4m;?t0GjKY80jv?qcW`spYOwm;>0PwH)TXn_y=gM2w8rE9pnuoG zbL0pH2xY7oI27{hThAq$UIWy5O5`LO`t=$J@h`#v2x3LFpDzfqQm0`!N>d5|l!<{R z<$o(?V{RJ4dH(biYzy4*eyi#Cy{s0;owJM8I(m^y|Ct}NcK!$#*6Sm|NP}2TMxWSg zo1Dc_+Xg(IJZ){ZAQ4`@ySjdRUiS=@&hM4oom~CFc6f62$8&*zf&m@BlW<`jP7(}r zhy?j!N|+=_ve?_Z>66~)Y-^ZOP=1uQT(*_a|)mGxb!z!TzDl}WOCEA z*l=B;TYbHbf!AXaYD~gUDR^46TEdLU8^vjva%KNQP&z6(D42APV90cwH-Z_j)_-cX zQ=@{5f1bY9bwzcSOZI`ATc$JQB!4rVDFZio7S|u}R@Z4r_*93l+u(1zFbNCF@T|Q< zdcPR}PyHL2C{vOI{`IT>RI5T~&HTyL6_UcA5@JRb`hw6G+$ewzaAy*S8RWOk#tzOL z1b{$70ePP$dF^l+7j=D{0Et!?a&u}W7$f+coXd}s;luFyot$LT(!SOshJSj(sX;vn z=t-!a+4e2gla*a}0Q}BW@nZkxg*iM|cS(D>Q_d#AtO!1Zg^?nOP z`LT>J0$N)bZxmoy7){ed^D_X;X^5Rv3VH&`ap|<*uV*z0CQYhGPLkCy__rj>7J!!J z+3K~Fvy0hI{Phc3z<>Q+lu|l~OO)!@Ntx?j-KN@WDVG_unE+irVSo&`61+*6!$)Ob0uV*z`p5UV89AIobgm%Tn_ z%b4$G0l9|cP6~(XLKouLyce(gpPhc|<6+0*9mqfAt%ADN!rVO|#|{Ds2M9xAW+~b@ zmN0$(jy{1At$&)5sE)EjPLm5uE(|$)CvEXDtde82XPVZi&#dz;o+_aVqM#*D$|Rd2 z{6HSnNlOYxiUQ_UO-dzB8QvZ~uii8h;#KKY-)yhS#T?*>-_e?uw4QQs0nZuhw%=jaV1htEn@XLlsgUAMC27lILTlos6fJPBHNFF3S6A zHGjx0Kvb0YDR9zEkz8;0drIAg-k9jZ%aXE6=!x7zn8Z_{ob}hs2frzEa(Is>b7!$n z0&{X(kwU>a=TIfDz?7h`(sUx!zh|(#!x@2bmIa{Dxh;myQ7pe-8amf(r>4+3gyM{B z8j3BFz1E*!VTJ^1Z{}mQyvI0vEIQ5{Ab%6+QZ>%=*Mz}bT;L7N@gxHhPQn6PimSwt ziA{cAzcms6?5Jo|?hP-VPY;Mn$JW`Xboj-IN>81<$N8mH5e{Jnq-f#fNWvKz8$kYJ zU%}TMMg%Baz)V~sN-sKTkX%`5zJW5%Wd~4J(J;coc)3@l@?JF8&E#H~_Wx2dr+*l* zS*_!1TtUg1n1ztejGE7NY|>HQ`1LuNUXj-68XX5s5IHqI^-d*g^ zH7}jbCe?tCR>Z_ZXR}9nt=SBAmpWZg4Ql}#5@sTcc1_YoVq-y#i%J+Wg_ z#PN7$4tgp?D3Q;hkKqfe@~|bF9e?g8m3JNw&OuKF2PFc?1(9Rwx5=<|=A1rs7`aK> z%F@A&F-In&40!G{(3{! zW8#yI)BKY+Dvlk_=4t3BtofZ8n|zX~sJ3U@bif3H2@A&g_)N7mR6Sp?$*+MtiVaV$ zdfZ-m!EE(y;p++n2d-i5onoGbuEtAoyhibQ*mPP~4=fm+VM{so~uP%}Oevw!2F0Y#%^m|mqsuMw-2y26nYc{-vdyZ$T&Qjis9o)Opz|O4d^nKGE-(qM5YzBq$VQ?3uf1O$V>w6HOw;(&t zS>uDI(gugHqTxgdNMmBrl*B-1D1K~>+x`A=;)4!RQ(^?2tN4*1m0KRU62P)U$v0jX z+27L9Yf{cXWl6nRRe$*CtKIjUTHKk`%O2da*@4L74BL6O2wx+$ZW#=0?gl7xUGPa| zy7APes{$L!Pk=IkMD6E_%=;Twd*SFo4h1X)Dz6Mvafna=8e?sALG4k%JnVQpxfu7` z)oGEM$i5C3Jg+>Qx++vxnM6u$e=Ym+DM&#Kf*Au-bsA%i9e>ulQ0lswu{+vKIk{XM zRd%H#(J@g6kcz$lQh}sGk?OR5ln&33I;nX65hK84e#wS!@Z#FpK((SDSt1f5J8Xh4;7Y`Lok5R5l(x0d``GF z?7FHwZX4JqhHTQ?rj;HXlLks52!-G#gz8edZ(@^4TL0*88mct*Oc@3ymYL*q1O~2m zaXH3 z>D@K3eg0+CY0%2$uJLS!Wk#_#@UoLAt_>h|K2egovI^n&JpW) zq~Z#Q;D}tq0L~~kG4A~oXC|rKQ80g7x2LLzwEBKwde--h6QQ(k)IcCMRYRfNzJ%@A zPJhfjaU?w+`{5PTy9rD@91S@ur}Zyhj1psy;9#f#kxH3zuA&F?n#*%C=v~Cv(WDjYmE~ct`tn zkWyXL1ruLuq^nv+2+;NrINVx zRF+OTTgG9*3US1c6rNCc-e8x^9=E~b@OP_Y7SJKQiI2YtS>n1|#=&0)>-nq@;Wt}M zgVuCNI%v;lWeC4u%8-WnmzqjAsvLt>YnBIjRul|;$X)& zLH3BNyUgPtS^sM)r@@kI7rdapHk0c5fzV;I7E_f=a&zAfWOU!I3s z3j2C#s3B@Hx|6Hl=f{T}!)r%~S|-!I!0uIf8szL)5uhH9;Le49IATW1DSy>9=QJ`Y zp)r(#yQT}&wYqE{j(78E`0MeWangI}-wj_?&UTOMAfIgbqf<1~I1F-K;a@%UWR^!kluvSgCgQ^8Wg5m4>~N46DTfAxNdjd^pW0;fs||8J%N4ucYR+ zFf(3tp%K;qbvrFNdw-dc<>xeX>pXDYI&_tQYdK-NQ4r}XM?>n0s_~Fd1cs z`Gn6*z!wT%WbRfe-}T*QQVH!z?1R;#X1gs$J_3!KM937GQpS-(lL{TwaXy_;v2!=Q z(4O2vSkB5oXA%pUnpetzbM9%`pel9vQ38UN1}Ky#UsodW9)IwsLMYJTXPwk#%wH9n zqH3Dk3JZ3VX6HF+V^NnwC##wM3CXF*feKhB6f@$Y+zVcZo z5_0n}ek8qC-G7T|=tRQKDB6#t*8*DIW? zt#y})OH@A9eKLqDa%=MvFo@TQWylKckW!N$n1f-D6@R!Bi;&gYjBeetRggmPb@7Ph zj)U)q#VK@u={>U4b4aKHQek)lOnnfr8zBRg$iU3>zk0^9M?~%;~GzvU@)+GGRBJ@nqDRbU*%j@n$a` z`f}zGOMjC6kPXBRg7P_YTC27I63W#`n-McUug%&>n-KFjgRys&Pg8(;bavwd#@EQe zt)}gS7+roe%{eqO(cbM8<@z z$_PhiDuG92LeJ&p<+UStrr7ug3Y%`{C~KB=pMQ}-c>oRMPC_wefDbsszZ)cd#pmSh!)rbDxvr|93VG8scxD?a10?iM zUR&il=9{WdAyR)9DoT_gSM@u1d|?QhznM zB5mL$Ag_yoZ^r6p)2%m^H!ZzRJ@fs?;PT9!#{(5;;DDl-uzt*rH zxD~f2yq|_DGajK(Bg}-H+9+E(C4coqtxnT&xNgP`a&cmVaX--0*-h@^2BWqn!_VVJ z(WpiUa>G%H+=)lk8wR^>V;#m46@B^frb*dH8%bG7PnGdlS}ilF99JHX@sx_pT|89P z2h;nkG0Hzt@yirE7>ALa_Q$QE#^~-c(nP`~;t)h2vg?Yo2Ji$vN=PCm1Am1hp3Ep6 z#pYI;vtbX~iCviFkHp8>LUxJc_qd-)#H63g0MF-{{D#6bx{l)nJE~}l!4ili$PFE4 z9AkN(&9=75Y z%iA$4Eo}9peVb9}lr~b+x3+|l3gKJD&%wnk-n>v5^Q@;6Wh_|7l)2CTi1Px(It*+J z(}M!~urGLa$Mj&OHVaFl<{skCv)92! zYh;vz4V5HsV`jY&6*M3aex?6(* z6UbnsGbJ$Vz{5G6f0+Lc!I7m0eG0-tiobsAj;2F}Nu){?bB!y>J1P z?$GOnTP%2;2Kgw%lKd{zR#02-`07})x*Brn2rA>HD=QXzlY&2BnL#L%2MME3Fi#x? zUDiC-OftJn;C~s$nBYZP$==UjrJqQ$(_`IvT$Jc98qfEv`n&O<{}-4%KT`p%pT7z} z5oD=XD1uQq*GaE+ao6i}<^s)X?&EA!+LK-j{wokWsGQqCUGR<&PxB%v$0#Y4!!bS! z9KWOhoRa1Xltq#k(#0OU%h{sl2qPFq*FJ4nwVB;uOn)U*r(QFguH|X_*Z1!(;J@oS z8@o~fDw>n4KOhynC8R?Yh_a_UV9G}WKX1T$2jF|Uh`H|SkZSX4E28bg%#H4L-7 z=wzc{Xd`OkzbfIbkv)Vd>&tKci{rSSV00vxrI;ZrIStZpL4Z5^JbVu4J51&0Z=V7# z#gfzJVtgGJsz!nfqN_&YPu`cMPtIN# z6TIE6QI4RUv^Bt3HCUA!l&71Hsu>y%4O`-T#eXx%^!DwI8kBk3R`Db9lznn!KG|gxB}6qUZ0#DC zf`2!w_!yzO7ng6nPR#xg>S4=bk;$qjMRSA6*#U^1Aavqk1oB#Utxo^G0ncRY)olNF zvMgOmUB>6?@?G;;Y9uE%{5zSMu7ob}1Ar)TsA{+&rIpLE@(uVAKoAZ>P8d+(p;hH| z8X2uP9Mq|;4sKEJg9FvA6y^!UaumojQC)UwtKnY1T zO)w*dQ=VR(H*|I8_OMkD1^YVFZ$e=x7Ysg+f1qL1fz8@St#6U4SAK|d5tDc=+_X6V z5zK5@Q&_1>a$HHpaat(r-NOBelOe%8Rv03O;Up72MQ>k9X7Lsi3qPTI^M7^|E)x0( zLVlctq@b-=Kv_U|DFzZZ@mf%umDm{vW+;9_M`$9UkCsGLs@XarPUW+$%u34DY8+ZS z2d_J-zFv~^w;?`JhRwi4d~9(1ZSCZMIFfNNshEgk21q9lgmKcR#126^n=h)|1E^x%+K&Zf;TvqE-KS2H|{`pwyj zi)Ywv{4I^Mdo3KY&yUG&x(baOPO`kvkF96e zO3oGb@?clkORv+pm8W;Q$dt^V+NIfDkOp=&ONA#l{3sAe8KK{nJNv)Jt)HEKhI@a@ zHSF>D8LD~>-PSAk`9aYrS6*B}dJiP3VztzLW_IlZ1dl}rMWnE^7QM={q9=UQ5f9Lu zJ;|sZ8RenMtL8XR2N`XYwtzWDCZ*r{1o9sUUpoxBjUu@WqRl?skR3=)OF94I9X zLxx;6MfL`o3e~&7hJt!`9F#bt1;u}?^2?x&H;(3yu3uSMT8Ts4iL57+>W`Ge)E{lt zPJbk2s00)pSI12) zQqGT-{QtLTp(KqwNo-Df@JZ|Dt}B1nlpQKV%Oncdcoa%N4TbI2rVOWM;&*>uU;1<6 z3Nha!0oc4V?9z~=@n(amVdL%B!Yr@$H17)n76{B;+Ke*n;-G+q0_I@gZ2;>jc+MxB zt~Okfk<(ClWs|d1{r3@LxiKTbK~8HQF-#j15NOA>WQihnta_+Beof2^kqAxVUFM_3 z{M~o>fpxKuYL*X2C-_qyjALjs%QbqUKv|@&@ixr@RWjQ_3my%eJYfSQnPB8Bt=xs3>oz z(pE9HP}nLp6|2MYr6kyfk+JaNE8|22D<=UwodE6b^#%O*UarE~Hr{`5bUtq4B@u?D z_TB?J0w$`as|*m$3(9Zc0?|RFM=V?zJO(&^k&@2_>*sJAugT0%A@mf)dCE8I5IXXX zO3b3m*T9fp%sm_93mJkk+mVrOtw_AdIvP0X3;`^IL>@taB-4kZo zeAoTVH^Rz+UZM2hT_|Cpj7q3c;&{wB1v|8Ux9gpDAWi+`Yhp4lA6lJNgL3M=opvN8 zP?L077j1QX7D9hr*W=J6TUiWd6*Pv;r8f=NB}}53PB-c@aU$B;RMU9RD(RSfD(GaJ z<|#9v`hrTE%eCS=PKeHGm(1|#&kAzHKFvapiHveVefP4359KL(7rvE^YCUlf(d ziU@(4cn2g~AyIHIPD)5sH>Ih_BFQZnz3O$2m!|wZdjEeSozCLoB|0)uE_7((KEb>s zmQhuKGW?lM$7mYqG{c-O2Gdm{d$`MMO!np(=1eh2FPNTnfm|ZgD&Y9j57Y(bIv~gW zY!|j!Ru`CSrzM3MKMyC=lXN4QM7Hr_fx*)kl>bMP9C<*B+yi<*)u!#kNtMeO!U-!S zjY=kEZrOh*y<2n+#OkJ0RRu##<^RG0YyZ2*+BESXDuFxs{S5-1{JukwKVcK(8=WRU z*)C1Smg6*%I!TL{DcHujOb;wwnXM&9nN2g*Ds%qD2kl8pN1ul%-<5J+$tBy8uCtX* zY4%E+uD+1XFY2|OB-KC8Y~FVAQd{d|uOkYRi0wJ!BCEIQF3)K$7+M;5>W0J|#x^mwsvlY!i0WW_16SZaID(@! zpPQ&AT$;E|xw%MHNvwuGC4go^n-C2*EH^bNqj~pm;FQ#Zti}ZbRSZ%Wi#FNm8aRFH zd|!W>#=FDtH`s6Y;CuG;ex>I2DI9;BmEFuIP*FZr4=F5UV`;h_n9k^wa5~cp zaOO6sjGyf+pUey0$CY*D5IY(U0R74Nn-`QgdLH2y>Nd(G-84#(E!;HRqdurgS%nd| zQ9E_UqO8dEDaL8Xk5^Ai?{ox*;#6ysu<8P3c`mgQRmy&F0FUVWtp>{UBk(dMh1P%8 zqwe2!z+gwH%v2FjRHqdq)}D5S=d>r5%7O_4CJ0Orm}hvJY9)UuZ(}$am(EI>Mp&H1 z@fPlrSR8NH5FxDApl_k1OW&;cEMx!SnHWrctYppk*sx!Pu6hem7AC~LC3JNdOdi3 zHY3b6HL05}){)>h1B@gKaE`LSKKgDsSSb z4o|3I6x+l@WzuV)@O0hz+!|fH$NyOyr7H&2q%S%`mteXAM(4oJP)Kt|$@(r`n5ZUI zl40|70GPFXgd}n*8U8W&zBymY4-L=Jg6qP*Zj4evb;RXZnVZyMXVzpAlBk;*n)pB> z2%RF27!h%l1mchz0i?n!uhD&S7rHjj4OZ1xx8-;+PA+{ zR3NJ_J)Hp(3}kDO_1AC+lU+HIdO1qgF(&8$b)dxTz-iCTL{;TUFs`EY$!iTh@6!jh zR?j5F4nDKq#{@9{8RJo5M?j7G2Htmy$=6XvN-;?rYL6%i`LIBma3TJTA*`_E3@+wf z*SI2!>tWIf?LAw0HamYI2AESqd#GlXb>|kiia46a-;a~N&|gsjI91fxX0m4y>DDwp z!-f$l?UNLz#3e0V4}(M&j?Ahkcb!L>e#y8gBm&P(oCc@li|&o{Ma3S>ECx6 zx~LQd62!wgwHlGk?~F;_DKM9?JHg!v-JSQPHQ8G;y3UPqtVMr(5b};a0AI zz0>S4yF^iOCiXcsrA3=bO65J_eIC-6MiJTZ3c?Ty19nh$#iBel><+6DyzzK)G48jI zd}crmhdI*2sl=X;B#_Pz4Jo4q%)+|?@Y*r4dCb6VE6lXDsU8?X7zA&VwG{5(Rir9H zpaXl4;4^Hxf6aeoxY_}F!%pVNA8_$m_7TYQx_}R%$b*dXz*TAi_PD?6yY;Qov zmvBAr0Z?MY$2el+P;)}ocz`7+ZDP+^q*lvrW2z@*a>%2fN@Mf2!RE}^3L8~px}*}h z@@1T0y!#k?T$}nb7$9oG`CXk^;^Zi9I*Sk-O+FUfiRXWJb!3T?pxDiX`Xg%4z{!`l zFY>WCQR{@=zm&GLaXlEF0zi>LHy|2;#LjWf+X1{0z6UnB$uPpgkh?OP1Mz+_I}_|G3-%|icP7zvg6V{zBclO1PnJX!RQo=lae&++IgDVk z+fH{8CZvC23rg()UuL|p0Bj}+yf3KaO=C>RMmko~&)OXkc#GAG6I;_TP0N9(tcnRR z0A4VO@{zW?EAS_)7bju|lc#LMzsJN50~;7@H*uLKOX-^Ow>=9f-%V<&Cg_KQkZ7r!$1HUR9Uu~>XS`x zSD1W~`sYhtut^jzP`8S+f$%Dx%CKZyb6kI&`a1ayRrUJa`TPB$?wWw>dI1SaRj3KY z6gnEk$q&G;fIp^$p3Nb^zJSVe!J;Z{3GT3k+L;xGT|xw$84C z35`vLB~$FJp#{j8znxA2ECAF01a=~9C*&GNS(9@n@~pXN6{M;Z+jW!l1gbf`vTM#x Pz&{BHh2Z^P#lX)}bASce delta 16465 zcmV((K;XZlfdPVn0g#k`h!=}{i=sl(cJ};Hfg~g%1__1$Wht_PL2=&cEOp@Jp9#&HG zr8B{Qz20|tTM@p0D!74%=yA8@SEj#(zd}3P$b~$R^vofs?e;cUg>jVT?|g6LGoe4$ z0)I-wR;VG2nZ(Eu(;e1JJySj^m>;c|L?$F{c~-H*U7j0EV|^>gn#mM z{_3fg3a*`h34i#xK#?U+X*~^I1*+-j^CV9<2-2vQ3@@IEfj=1(ZYz9>w)JFu8E&li zc`z=L&Vwwgr^Ks3h<^(Am%?xL_lH;E{*W+F_wjZzsU^dUaJa(%c_Lh#^$(M-o(cMs zryB>0VB0u-Oba+}@nhj8!zA7{yhXYUw@H6Jgz;g2y{)2{*ZpL-eF)%e>-e|+c;Le9 zX$lL1{_cak9}Vt@{^0fiXicKK5Tk*>?|ZxI{nEdmd>Z$Ae>tQ#OJzEQp9KF@->3TP zuY>qGjnY;<7HDAB@()7)QqkRf8;0w`23y|>7lQg&ft77{^9;C*{%$>|`OyqoY7Xf7 zH}Rlz0Mv!8*#KAddcCViOLbgEN#b z3cf1xm&;>t7S;$(Q9MPOZ?B2~*n`pCeRn+g+#6TCA$V_b2z1RaMz@0ZK_79=m%(l( z!@*yI_2TERjcB&nnV`=CzKEZ{R=kA%r1@cg_hD;<|H_oZX|H()zy`2C!vAIB&h;<7 z&!a|iZUXC<{+1zbzw}z5m{!0X>yO@5vRf}$@K7J0QGh_^B`EW$`&%>LqrP|`hJ^DV z&4{-<)8Aag>K13__Zo;vm}M|G(*01nB}^vusQ@|JWt&}wqC!2GZG(Ineik=TX>iwn z<9dYaF>=n2O99Mtfs7}42TrinntvI7nsn-+s@uc#g8%nI!}WP#yKPBA?*u=z8vcE6 zI63qdkcMwVz^8WIJO$f^Nzg!~I>L{Ip1wruML*cC8gNaMGl3rkeinBy>sKLcwNc8m zzY|9wgohu)b*Kg5;xBF&Et;R{M#BDoR?$s2`L>ReU@=(3Of7O>Et&&|iyvtV@p^X? z6bQ2m0H}PJaKoYuTLu)OsU1xyGzpS&Ps6ID0p1MPtiO#6Y5*xk!!vb(Qu!C1fldP z+@?j=q`4()betbb$h$b+B#nsN0+8SXTq`0x#tp=eF$e@VMUwAXk~C4#^)o?#inN0@ zERszUmsUViJDig)84L;J_~-!ydAi*Do^6A9_WsHF(Z$}|7`0L5U-O-OZUdhClDm`# z^W!*~rL7>#P0vI?zct`r`{TQRT131EYZv~{69KEWY8VJ#CK-Q){YB}{a-6-p=Z{-| zm?N%g08e@YAA*&4wTt4#8U26%4S3`G`6~FDQ&m*AfPHWXf6DZDakps%;3lyB(_gJM z9`=Jd&btZ~vAzT(ec{tbZ?cQBa2TwE$B;Ccy-xpT^d(1J*#d5z{_1Xjr^`?)ok6mR zHd#ggr8Z{*eiHgOk7X`ss&pCo^NnwN4(_=Lg~je;7u|Q z;RqWRZJ7Hl*6UV&TL4!C>#qp@^f(=g3u;i3Yi&;lMRf5<;b`u{$I0dJCR)!V6R zhFJVbgYo=}afJ6kU-h(qicQoFiJaj?<8`;!N%FMAoJg2?>YzSbm@`{{uESs}Q*I;S5y&)& zV%znJjM17kecpu25P12_pCwP>$1O@i`C-*c4{z)sjxIXRFpe(z?FWLS4MVx;YR$`u zsJv0(;*&g#`1Vnq2!sCbC3x>D4(4C+ZeVYa+gZLpp%<#FfN_y6`*MtsMQQ6@p$PSOZw z{&UkDtPvXDH1~FN@dxbh=;DtxJ(3JX zvfDxzz~Ir8pTRcxHI>OOOQS_-USYVCtOyx@=zSTE^Hk@r1^s^=a`uf3t}dJC+_a&3JlYm-2zl!}wBx1sslU2fe}?t}-nHJsFo3HNIC( zO*$!Au$NWu2v*?_cqE)UX0lGQ1?GU4DlFnnkiiQm-lvx9fZl13>r%J((DuGfV%nj9 z;GIS5;)_rigViPuty=;iYDytAsW*Y?HD4kcjB8Vn^P85PRTKcULBTzxh~jl?fjP+9IB~2Jw1K-sYjr~0Tl!)0Mz?LTX|T; zIpH*V?TJY6BCO)CaC+6AiNN;QaX_c(CY(p`{1p$9wq>7wLgvPHi4%>HghujzCcKs< zaiDOtg$p@*mL;`->1@AZ0GF=vabm_7LB;G@*!iVg1h>Uo9pew=(OU{{N@{CGe( zq9ZeOJcA4(J}gG`Iw1O?R4I3Vn^sd#pXso(DM|qXKF_KF@UvT*@7t}mR<%LWbb{%; zaFfcVQE}$Z2v&093xTHr4YoVjMAxta(r^V+(79dj%QDD-bN`wxELqWZW=}27lglUiyZ~%9Ix7xuZgV(u7 zns}M$a!3cRK;qEt6GOZSoCGjU@J#$1Y_B@7bA=DE;39b3DKasoNen+F8>QEV?PY;n zN#LxvHj)bTic<`aQ0i519-^aUUOn= zBsc*9NidQ(SkL3#0y{u|pBA|dKuAO`lUWY4rQxive{$p$gUd>Tk%NpdV9a=!9Sy*| z_ubF_Sf;KO?=|Ny!I@*%QGB5RBoe zvqB!vx5?%$Jg#&%t9(Nh_?i|3ETl#OD@U)c3&w`0XRmpr`jnf02!)tHeuIPf5f;X3 z16$&@?Kw^uf-nG_Foecn<~f?}=ou=JZbeJ(pa1wT{6V5zw>+=h(&~MF-_+wC^)8P7 z@b2i3moB>7T)52z3`5w4yhuXi4&g6~|4RHkaCmBsgvcW@kdkA)(WmJP*DrLp+|BZY zbO8?gwK`-aDhkto%F$LT7m))5YIUFrqV{I^k|k*mrBN^z;Yun-)XU&+B4yLvBInRe z4dZ9nBw!81fEz-$3y?yi;-S%iuZV?9>`iX2yG_T=^_U-@dQ~`Il>Xs5+Qz|3vimq} z_m995{JmmpZ}Yk>x>ivzx^_-?(P5G$8>J|jd^x%8!IIm5u7S6QW9tm|7b~-El4xCg zofSJk4i6609OBVMzX}s+l(vP&Aech%7#x9*+k1C$lA)dhj&PQ!VAF=j&%zWwP163T z2Ju5s#gMgABY`9beF=sVPG2l_;j<-Gk!k>aDHiTJctVyz!U+rFpM*EEmfx1}->7rj zy^v8V*KiMi$Z(%X*a~0{rqSaXcn;jusZAsM(&^q_oV{t^334_y-5?0Degq;AL?DRZ z%TX!-ghYcA0T4n%(9uC{Jk}Gf=gER+w3SU;i$AOyQV_0q>%l%5*C=sxNCK^?$tDaJ ziUH>>pO10m`eF1q-NybtA?i1y3U74i!4T%Qph;)!PdnEQ ztW33~832D$v~VD-7o@kH{P{-yZ5P6n!1+OvTpWPpl^v)y(15}K(!?MkjRCdYW(iN} zaVeK-aHpG9h0d7o1|I=87fy3;G=r%@+6{j)g$1*W01wtUoR_rQb$dg)lOM8En9c-$ zbtVd(3FwUO($n!SYy)XEV_60=figKl;s^ z63Eg%&e*9#;#-s~rnHo4n3Kidly?YjMH2TSiJOtsyU~kwFZf;H1%X$DzT`1K#CrNl z*|O;@SKiG;tc6OwG9(#P{Cd?zDI}7Arn+*lAhU`~>^WN`?l|oJFJUIZ!NpUmLbu+= zBN16Y$5|LQ92KMBVzbiyW&rc_{$|u^W5buQLqFk4T0JXyPBX4cV$x56J3pM|XH2nr z78;Y!Ig9DWpYUO{DfmKH54<)Ij8>{u)zedUAie9+-RNraK?bXI7wp;YtF^^{jDSZy zV<<#H-#mYYdnR)G9hieHxaFS&kr304Bp&QTGD{>JGizMtc%XnNhgJ=kZGURPi!=fp zUG}wd6&v6-pc)9(gR8Kf11k%=UQyH0+PyfhtTt%MZZh3zDxv9dfJ?7qj*ES1Nz z-NW`oK5nB~`29p~A7^h{oa0i3p<13d-HE`*{UWPwk76rQ;C7;Z6AA;Eqi^!+Q-3Vs0d3G6)8xg)A<4uCeG^C} z43!to@sLPCmU@WR5!<*c>`IjkrlJ*=RxmB>$Kk^+mcY1wEdY^!6&f!Jy673RK*<=y z+OtAX# zD0KmUtTur!R%c@51M5k*UkhGv#C9bG$Cod!?k2Ca43$Lk9M1*(a8Zd~lI|3(AX+>S zh}Qdp8a}qBwXnisYeB8qBa%%U!6MjXdr#qdEj1~j2})RhsGJjNu!EajB)b{9$;Cm@ zIN8ow)BzC$LaO+z=GH+FMi`|q;U7b}Tf@qtC8{OqLp=@2m(KwQ<^ey_;pCx)IJ^}a9FHQ-Sinm`eA}{fR;yElgVjimQ>03x z^D)fsVL;h`A}VTJbnKYZ;&crr zKjQlNx`4s}>M#ZE=o7~~*vjG2JcZC}fZ{3knX&@Y^-zsg8#EWWUR7&Jtnof$gh||h zo3hiPP#xl!xWk_ zw=XZzu}*z{-(qrdQ*nu&aq9DXPo)n)MT<#ra4-X>^B2Gxp?n8dH-3xN=T7gk{iQK~ zomJsYlR?FwOa{ZjjgRNZ3m71ju@c}=$ggj`kZ5`hQRgXI9714ga zAjnFchT%n;QUIV#3^XZUF&hih5YF?*$8cNXhWA@dzwc$WIPRQXtk&_fTn114n6>jq z^kBU{Qj9c6bLj69bNPdV2v>anhMpk z;Fe8eBrT|R_!}l`RGa*$0UK<8cKyqvH;kTBNh$|GMWt^AB|MB*i?W9xE)KFjCXdO4 zCIiVF)v%$=O+(ti=p~z9fSdez_FC5!)mb6g2X1b;&XAMLb*2p5mUU zzHY<6?V>a)DZ}&j4(a`72t4&~WU5R_68P7z!DFKeoi+1E7Z*qhe=3N78CB>DLSJyB z05-s#NgQTa+%{V~ICBsH0tp4=eVP`v!&O|=<;w&}w7QU+lb>RY;8S`kKTbz?qsw=4 znompnT8|j&38x12B%mjudUEsClC@_QY#LscffmkZU?NcHIrfL2-;zh_MSU3XK2Ow(TnsSZ za!8^YnU?mfL{1xO7Gg-E8d;ZUJ(`RG`|lH-MQH28MM9oK(ONs4XRuYFEEsZRHeN-C z2q)3a9#PUlvvZLHEi^RkPzqWn$0_EIYONhKIWB8-E;V)^n@UrED^{$+S84Fu;8-H1 zy7w?|og7;)Hku7_iG8nf;nykev`?BL^`;;Llpt#D6_T~jS`lD!v9vI)YC<-)V@{Sl z^J(1+L*`kU{d{afiyTX8eF`m#*R>{5*#0=^+JFDEsQq~F)WOx>7n@xE{4NZdr}auGdH@`wz7gP_^6Ym=G0XzuA=MXpz# z{wR~82x@v2x>sR(b$4rH&`fa!^8=MrH{VjVmA)JlC5LlpA_uY9kF8-N@ysFx0d=G23o?R-Oe{d)!l;bJTb_v10*y zs1U0pvmeWUYblq#0cOis?B*f4h7?W;hwDNY;>5fculJwbf&cNaWAO&$AM#c~U2AFX z9*|=PfrJBuAu;m|Z5&IOK7U7_z?fD|MN~)CA*acOB^QRAy_2^17*^>^v}c;usL!nP zEuJco3ZkGTPs${lBK$xe)k#YVM~VXGRZU7IP8r^R8Qm2P9%VU)-WNGVQ)-G6iD`1s z*N1(BGdc=;SlN^ zL1yn>z}?UFvYN-qZqe`rb)iFm z!^8dCeiA<^+8WQ;)(ffH_tl%_LcD6d>f7yqRfU*?$O-wHxglREM_TK%A6eCP)!J+m zaBa8}3z824Q{2mAvx%al&dp=+?0<~CDhc&qM_Jg^Y;f72F39T;8>Ph)4xTqi(_Kwi z_?~H}tO4!RG4rJ8A5eMvxQ@Ew7>}&CUQ+QqE5j9l{AvQ?6Uc9~9Z9K{OQjbK!jy{9Og)clqUzfYN_P; zm}w{Hsijj4{Je{czFIAE3lJ40ehQs5QzX~h{hm^{p*JSF@Uo<=5_%%{5T(fsDCdLq z@-Ar0oE+Yx$=q4)lfazZR-{mH&N)q_= zA5@0UwcDvFbPl07Bb$a|i)7#b^DE4dK<&)~td@75ME7OKnFC}3U8=@;{+cjch%>x_ z1)gL;!bwDW3Sl@7l+QR$hJ_qe!!lxo5u z%zzXvoE&L1Cu0N1f9xyxy2FS7g$tOAOGN2KCk>J-E6ul1#<}bO$|@R0SQxMHs#M;K z_PUwE3)B8zYUY#!HtThKtt%)w6SEZ3nN#z*j!insdq4B<__Yv`=ak#3dEi`d3r!33 z=|o)Lj3$(9!MjOFlB_aJbHY`9{Z`n;Wh& z8MTG5fe=pYn%%M`mavomIqmMVK~cfGH%>r=6US!Zs3M*KIJQjPeEi~fKkruzNpESi zn>M?D@imw8`|Ax^kBLt4o#vw}(JiAUJRhYwr~EG;}pyisLnk*Tc>`P0MOmo%kP9 zrd)iG+Bt|VnV5Gk=N`M3=`vbB%j1VF<<-%F%taTY+N>-W=&#(2r^@l@Je$rb8kMd7;!KZAzScW-{$Q~WO3vzeQCN695MZ5G*ulLk4eZ>iPT#lP@hyf{z~)dW9|m_p z`q#PDzrF_ndJD3D15H%%6(7B2q z2~xS`kt+c#JCuBrb(#IG9K9yx{8N_Ho7aVpzS@1ysl}a3z3jm)n;nQO&aj>5i}1Bl z>sGFg1q=1)wq3 zMimV#!Oy0sDI~$DGw8e!9W!JAsTc?#6-X)+ zsm`ZIdFTwOlLO~Z9-qm4PU_tmSTfYe~XvKeK0R4b0cC zY-wwd-feGxijf7a^y4byCZ4r;nbRr;#)Bajy5H8bgp|T~@G>Imp#!1a8gg;`AS6Bl zQ^_j`pSq1Z#@#HU#JQNe16{#C)R;65!2k|JIPK}FJ8{ym-KqAtUa?P%m83~RODudz z{w0MV6oQ+HsYiLXi9aPV`D3tYsY}>1(-@d|RT7YY5g6FbhXZn#m$n8KWXI{swPNyB zaTHoliLn&y1N`bJOLhr%62nh8q$_sFl1`@38%W6&!%7~b8u*D~y?5Q%dYJ|ui5SB0jxlTa7~&l8^eN+)@c zF0xsF)iaqdVcLGcQviXjL+}RPC`Z>UVCB;+J50fEJc1o0;UamHWL1+W@gOx8pxDfb zD=c9X0Y`UtBV&qg1#t>z#qe(MR=YGbWi-%nP?bm^Gbt=CT$Dr86vCkp*tfvGMO3Jg z9a^0PZ;2FObfP^o@$CK@J|EG0Pf;psVi8q;a8eB@veyOJi{Dvbmx}jR9*TDN6vd%9 zwVi3jX-|t6(;DnL_e1#9jXre|}{I1wuQM$sR!sSF7v)g^4lXkqS&L)h`y&)pN{D8{2G ze@|ZL`Te`)`T4o=IF`q^S2Dqnwm5l&(EuZbK2fR4$c~n+1SU0f`nHB$DMP}Iw*{rF zISRm*(}6JwBo~%kxa7i&$%CU-c7jrWB9}u>Z9MV`!h5kp1$k2}RC#4sfXL>T6_RZ# zGNWLueiVemn#evni#8OudBK_{$b4xV7Q*f~+oGPhQUsQsN{=b$%OonP|6ed9g(noA zH{7N3`)&Ae_`B7?0_cg|#<||a3UR?J<5{nxl3S^D{CrszA@Dgv?kF6TXsWMJvVzI82bl2UJh`>q#Bas_V~FqKca%% zkX9&K*GAaN@o*fbPKBeFNOlULA|smX@z|y zTf(+E=bfU;-J)f#*1V7B8hJ~9_7_!M2-Su5CgaSVPdR1vRR%d_d@E?aKM*_1X=1*i0-VdIAbZ4RRTfE@uKzWYvvA2p z1D;Xeaw6xXx`90hWY8ri-!W;5(f@zF`>IV5-BmTM|O|T9-nOZqf@QYcpt_`3^*gSrj-1PvxeU@JEWDdh$1lp@ zd(u=$Dh>K0IKDm$sFz!~YCLZ>BUW!lH-r0M)DBqd~Z8PHg( z2&)R@jgs9a)sZO`ngwJ2;Ga~Mq-S9e(D=G(*oOe%j-Q)4Bj)VO+xC!9=YpcL(Mwf+ zB`@K_w0>RrndAr$tJ;_&RXckN-!(4%ocEP`Rq^XX``lCKx`oGo924dJ`9G zm#o9K#YX+ujl9E}>BPBFfF5&VA=WP77OiGQWvm}_Wg%8AASif)Nmb(6w$m5Ms~DXA zr<)sjH<~owc`iv%DO#c~F{GOY3q$vJ6c$dmaM;*w@+*cxUWRT(-NP7lSt|?xSWt2m zE7i`Q-(SA1)3Dcnl3|%PAOxutnGdHKWm>WFDWh}H<%QI|7N(@DukgVdpkCLP^XEBP zevTuzzyc?XLmvgWRuHyZMQ^@xG^DPm8xJ`nU~K4xu=q_SNBGPHe4+3~?rxRxUEggk zhtHmjI#{-8zT0BRA<($ldrWmFWgIy)o6kWV7xVa(J9qPc`s_*SgLR|~bS|ThDK@1H zIH#VL4eD}szeqsP(g4NK6bm|}-W~o_iorSjtOM3%J6}dmdt%D8Wl7bCHM)DIWQHc*bE)##t zf4s$UWWjZRC^5b~_0b%ODW+yC_><=yEyxwZ_*(S+rWe!F(SjW|w68_q2gd7bl*!|F zr|0ia-|csL=H{@%m;g5)`blU6j7&)JMc{j@ib!cYYzId$j++GSUS&-Tly?aJq*4Z} zAkEgSnUfz)q0;Nhqw3Jl)bkf~Yg`T1x|Adp+L`))J{d$6g0->x4B~YY8L{p;qV(Md z<|)`?u_CEf4_GT|0`a2d8D-z<6xxx;DavpQ)r;Y#;%4bNM5%Vao z&1^`U5VIr0iFZ*9n<=#IRKB(98;F{XHR>MB(*E<6g&~wA7{++-U}sah=^8oyw3h zpdw_XMLt1CN36Y>$N})YxYXT~uBD=XyhTU29W-`@8t(z6d1t3ew+1HRu-f}X5QB(lWwQTdC58Ns?zqs|ip!`uz(qny4qF~0Mv^F;Le{~~R#{f~SQ{qF^apth zthfK=b5rF#6&9vx@XR(;21w|iypGBT%s17XW~2EmR!D02i_p7CFiCWS!Mz-RpARJP z`aEArzp=K+()(=C0oSE3^;O-fSleO=$m?Nnm@&}Vl;myYO-rxSvopQN9PhocAe3v5 zwp)J}U8z{#6N#%`87thFEP4FE%DZMymVC^Y0>vd&2aAy^42*KwMOX9SCn87TI;-e` zcrA5P$7~OLf4RIUmBDJI3TxSa58R5|6W&it?G}$vs1fGEGHoO(osxQ@)~7N#T#e!e zf;jQBxF6{0>?S~Q12+BX=<}pi#HJB~+>lKocVag6hQVIeSgWu?n_hjqX;OBlMpD+N zQ}sBOR;#QZFDoI(cuJe)E*`3UgDG&<7zv-M7+(soOQP6L`S5$_!s@)cBc|~z(tG!jx8+#J_Nk6a;mL3$vF83phm_Q2MC)5y1Ff; z%x&)isfw7k163ST%bM4d*2$Q)czHcxrG>2!v~M$tobo?v`ZkvNQPFm*iwJ19%_73}kXZP~nY%HD`HdPbPJ2X-D00we?#JR^n z0)`Fil45VFR0V23g8hnpTlV_GXpM~Wg`u+D9n3x#p@IekV*GqyfOX-(bl`;KZ^f|c zD>RI=ePR`^W*lU9O{LWjS9fbLU;-J8bfyGmeQh}J@egxnjFVj-WQC zxVEaVH!Zn;?o}Cta=C&qPy}W6dRe%f*OcI0#;(mF)feRR$^6i-5aaMc;8z zqHAZe*t6r1E$=$B9!sd zO*lst8uPm7BZJ-rrA5_H%P>NDU&AoVi~cYQhBl%$uBH<18regbvVr^-JUc$-DF!lf z>4rIfvXZkf`xb_{voE5jXtBd|cfs~CkiZ7ya(C0#C^^=;ME{~v}zYV1@{Kkx*h zAICL5#i&c$k5K&x+mBHFlueS9xs}w*e9IIK052BBMin{6WxOTqP z=PQQ%LQ51iX#Z6$1+|3n>Dcl^!$TZ@s2^8%)p4=XVN7+pTr87lss?Da1SQ!%K>bsr z^~EBJrZ#}qN)R@u<)QM{Z48~GtI6oojY;$RK_y!2R)s3>uZAskrjj4v%!c#YzZ+D4 zo(5G;v32~2JZ7I9SxgAoMhQ`m3R}Czr8vtvK1QhS#ra#Wo3KBGdf2jDh_UWT(cEBi zb^u~02%Vf4fjs}l?+$KT@Jz;D&GvsM6VZj#rEV_H-?g8mMsi}qzmvV_Lg+F$0EiNY zsu&wl>a_wZ-+~_j1mPeQgaH-FSyzI8rj^l(BR8G;GftTeqcu^5k1Af^-s(q2O`<9s z@w>a*Z8u3#CsZt$mg7`kzrcRE9S%qboL1IPY%2;NjBsJ#iDA_CG{LN7y>+;Ke3rM# zb|sI8)86e{$5^w6jmGAfIjI^A79ci2?_dX!S_^6|xV6C6g8OzXH;V1TBBS4bmG+n; zblKt@*wRLto2q#34U8BT8&$w6{}P*;0~@831NJ7jqc4Yf>p7Wsr8ge=-5*le1A%gd zJz8xSzq^zJ;D+Ytaf9c*;h%r-F7mreA%*D)`32Jxtp2qz#ZjJm6#kb9XAr<2Ulih@ zP)+44(h4A|wm#=0KXm!X`kkkL%%|W{r&-O4S=MAn$dP%2n&9|)WBR~`!S~i_xWd

    kl(bXRgmYpz=-#~DL=P!_1R+03BT~>dE1)bOyc7e8n|LFr&D`sZ1GD%(q9Zhs&__$6 zs<~{PXr$`dR%IpSiY^YVor9NMRf8@q_*;^9f>WIC_*~xJ^gh3JWB#6J+o*M*U+bRW zNpfEpT9u3T6S8+crC=ax$Fk2N97VfiWiP{h3QI*TG__Tu*F-MYN8cEY zxuNv5Cz1+AU>ES#8-=D;Q3&h{ZuSf^Yr`XDZz*j9^s_j4MSd5PATt(!VV$pNz4YLa zSk9)yFSAB=Qde_4iu%pjipyu%b@DAs@_Q{DvQPKvZnlby8&0}B;~!3c9eo;KkJ>to z)*L-m9UNWsi(S}eLjH%S9ZJC!_WWR1*mJMjy_Ux}x)75rp4yd(Sda#GHA{sjH~c6N zNExBuRy+H@#;u>7enxwLOB(F)_!+6120i~3{QR(Nl&da4AiX;hRk5PzJ~O-a0fNV( zgCa86S<7B!dC?QT>4*nt&Yomcvx`zj6;*Q_sDq3)N?X93Ba<@lKY{!Q!q*O?W8+vZ z!+5jL`!H`kt%5`$umDQ=y^tYSO_9BUrb6{Du%V#d9S0>&K0)DsrTj8z<4xklz3W$2 zl>y=qcOvV_r1~S}F!e`UwbLI-87fyq*VS>8OcQ}@2ItVT(Rhc!e2E1T|B8XI`!ql< z9!oPC&uF$0PbfU^?qj#zmQ?VgCI9~|S|~{)PZFDxLVN08-Sp(|+Ok7cXt`wC8jnH= zsG+dk+O*o#O#IG&8^~ZmTp^}rBmkQthFu!cEZJ-@i)*s|TAJncp5_BVzyg7}OPlW>g?0bTJ9wp&U{P7IsGh#w}6vtv1gB_o=f}h2JSV z6#8Y`EK#frE7n!0uwm4cfKwT!7+WZ8l|_oxVa0L_Y{SS|c=45S&VZGZB%MxxPVe#z z{(CQ1(R`bKY&eh}H}Q%H!%BPa0UZGo)zDQAh~@>QB5;A|AkrfiE({(69KT3;V#D=Q zv`yAzW~dQ*O5!}FhII%Xc}L|?(IsAB$hx3PDL`URkiRYL0t0qH+5|q{eQt_+{JkS3 zZxZCJqagmAUslIS-DC=kED)LfJ%6f(R)>mqEp4L-e)sOY?d$qUr_EDNrfn3! z0H2GCP>z-dhD0!xhpB;7e$D;cHa1qtGs})Ub7Kd6neUE6Uvl5E*OdEB(Ruz5&zr~C zt;&UehweD|CjAsk$CSOMUYMW2sosT0G4vp)2llFhm)?R)q0_0!f?bun4Uv{%u-La- zf_$<=+lJ!%+ePd$iDKPTX4(SR{VXuT%7I>?^x$16VWEsls8x=4Ob!J*w0^hiopvD2 zg7j-@k|ZBm!Bm5Cmc5;JBqdOjA6XYRb$k|oB3**x(0o{V3}zKHhE01ni`ErPqM1%t z>N0U6+Sydocuy)nn0zehWZR|`Gobo{%5cl|)VofI&T5y=@#)V?a>PE(LZ69@azTCf z^2Gt=F?tuim96R_ak;<(0Puu zo}_g2MTqKMso<4du`TIZRQZ%nZYM-3?V(P3Z^oS z_Qt16XY-jv5CoK0?EL@$;tFs7lQ705skVNl<_;)-9GjKh%qLL& zJXN|VDrIA7yB(O$=#+3e(+Y6rHmKsB?W~y03*E<+b>t8`8V&&c$@!ZXRycaTz%SHo zgeydg*4Bu;g`0+Z)CW~1t1#v^YNyUwloh!?#W+Lw%hl7;J08QKIMdoBERaA6nJcYC zwXz=^z+*ap>w$9p2)s;x`J1)%sQ0%WFxW9FGgSl>)iTA1wZ~oQIqiw1vS7l12?7%Y z<{6%*Uddm{+Zay9rL&Tz5f-&vZIk<97_3iZ zn9Wb${;AzVYg8|_>>6%YcaADu?4B}DtGE+RO+{H(2tZf>K>)&zUGUgF)_N8dp3r%9 zBx{r6lQo=0gg6qR`i@%9}W3WE z9dYGU<|cL6nKhY2BsF~lu zyvZ<$JIY!aCVN9I6h$*17LXG?h(BW}EG%7vi}~AsHLl3xdYHUId*Rle@lL1$=BCh| zuDR`@dkx%463>$FP5eQ<6o-C~3cx9x#6uz$R+8nq8}?9uxbnnrfo$B(Cb7 z@IDXeOQVSVcm-hyg#kM;yL3^$8+NPJ2;O8eJ(~DC z1Ti2D*sRR@m zbn{Vy7z8(2si3X$@&^*3N$ecw{2#y@;kzL3nu?XXS7E0#q6{M}47oj{SrP9Svpm7B z^1yzn%}yzrPB5J?bYy%WC(Me-gnA!;3N$j1dnJb>On2MaF2=-GY>8<+;H!)v7J$ty zq4x!q#aV)h-AGqUdS1ID0&lT;c4Bl|rh7RMl~p<+2GR>AQNGr8cLn}r_3T9cU=o&X z^!J3=VPFHp?Ix-6h^gH7Evd+nx`_g4A#c#5=PBg8Upeody=Kr{#sFHl3XF4qfVp7a zh_g`4k;yF|6FJibHx;@m%&RBPo}aTjoRrKX^u&3O8n^vJM?sAuSj6U-YbNnU2GFcpzXd+W#^3hIfT9ph|Z)%7lf7 zR3fPiDh&tZW7+xj{l6`%{SNkj=fuL3DG^j3m4;&hRcKyAYCP2mug@66AMc@cY6J1| znej|8V~bsdZ@F8{3R@OC`xDiaU6?6eiHb^K^8axC2+t^mfvHkCK`!%?3hJtr*OW&y z1vOFe2~7A;n1V6Mu`S4Dc~Y2WW zDlW^gWL#!ko%%Y74pj&H-Rb-NA@iDm>w^IaN>!*)#Z){R#mO_kE`>j)YM#wSz&?e_ z1Hz&zZ3*tMrE-}y24X{jH^Bj4*aYUsEDBQCueQ#vf{Bq$rY2J~uB8Do0L}j;$(Yfd wP6T!$Y$xPmM%kAOruMwqX%%Lw7^K@}n=}WiIlZz=&T_y%35DSOU#4lA3_vvar~m)} diff --git a/dev/references/index.html b/dev/references/index.html index 79876c87a0..1060ffa8b6 100644 --- a/dev/references/index.html +++ b/dev/references/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    +
    diff --git a/dev/search_index.js b/dev/search_index.js index ef3abcfe1a..9d2bb1baca 100644 --- a/dev/search_index.js +++ b/dev/search_index.js @@ -1,3 +1,3 @@ var documenterSearchIndex = {"docs": -[{"location":"ForecastModel/SingleColumnModel/Forcing/#musc-forcing","page":"Forcing","title":"MUSC Forcing","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"From Eric Bazile: The fields SXXXFORC0001 –> SXXXFORC00NN in the initial file for MUSC are the atmospheric forcing without any rules for variables or advection etc ...","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"XXX = vertical levels\nNN = number of forcing fields","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"So if you put for NN=1 the temperature and QV in 2, and the geostrophic wind ug (3) and Vg (4) and you want to force MUSC for 48h with a nudging for T and Q and a geostrophic wind you should add in the MUSC namelist ","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"NAMCT0\n LSFORC=T\n LSFROCS= FALSE ; default ONLY for surface forcing without SURFEX\n/ \n&NAMGFL\n NGFL_FORC=4 ; number of atmospheric forcing fields\n/\n&NAMLSFORC\n LGEOST_UV_FRC=.TRUE., ; geostrophic wind forcing\n LMUSCLFA=.TRUE.,\n NGEOST_U_DEB=3, ; Ug is in position 3 in GFL_FORC\n NGEOST_U_NUM=1, ; ONLY 1 Ug available \n NGEOST_V_DEB=4, ; Vg is in position 4 in GFL_FORC\n NGEOST_V_NUM=1, ; ONLY one Vg available\n LT_NUDG=.TRUE., : Nudging for T\n LQV_NUDG=.TRUE., ; Nudging for Qv\n RELAX_TAUT=43200. ; Relaxation time for Nudging for T\n RELAX_TAUQ=43200. ; same for Q\n NT_NUDG_NUM=1 ; Number of nudging profile for T \n NT_NUDG_DEB=1 ; Profile 1 used for the nudging of T\n NQV_NUDG_NUM=1 ; Number of nudging profile for Q\n NQV_NUDG_DEB=2 ; Profile 2 used for nudging Qv\n/","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"Here you can run MUSC for 1 day or 100 years with the same nudging profile and geostrophic wind !","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"So it is fully flexible BUT the user should know how the initial profile was created and which fields are in FORC00NN etc ....","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"After you can have several nudging profile (for several time) instead of one profile used for all the simulation. You just need to put the number of profile For ex you have 5 profiles for T for the nudging at 0, 6, 12 18 24. and if you put the T profile 0 in 1, etc ... the modified namelist","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"&NAMGFL\n NGFL_FORC=8 ; number of atmospheric forcing fields\n/\n&NAMLSFORC\n LGEOST_UV_FRC=.TRUE., ; geostrophic wind forcing\n LMUSCLFA=.TRUE.,\n NGEOST_U_DEB=7, ; Ug is in position 3 in GFL_FORC\n NGEOST_U_NUM=1, ; ONLY 1 Ug available \n NGEOST_V_DEB=8, ; Vg is in position 4 in GFL_FORC\n NGEOST_V_NUM=1, ; ONLY one Vg available\n LT_NUDG=.TRUE., : Nudging for T\n LQV_NUDG=.TRUE., ; Nudging for Qv\n RELAX_TAUT=43200. ; Relaxation time for Nudging for T\n RELAX_TAUQ=43200. ; same for Q\n NT_NUDG_NUM=5 ; Number of nudging profile for T \n NT_NUDG_DEB=1 ; Profile 1 used for the nudging of T\n NQV_NUDG_NUM=1 ; Number of nudging profile for Q\n NQV_NUDG_DEB=6 ; Profile 2 used for nudging Qv\n NL_T_NUDG_TIME(1) = 0\n NL_T_NUDG_TIME(2) = 21600\nNL_T_NUDG_TIME(3) = 43200\nNL_T_NUDG_TIME(4) = 64800\nNL_T_NUDG_TIME(5) = 86400\n/","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"and now you can not run MUSC more than 1 day ... if the time between the forcing profile is the same you can use *_FREQ instead of TIME ...","category":"page"},{"location":"Overview/Content/#Harmonie-Content","page":"Content","title":"Harmonie Content","text":"","category":"section"},{"location":"Overview/Content/#Overview","page":"Content","title":"Overview","text":"","category":"section"},{"location":"Overview/Content/","page":"Content","title":"Content","text":"Harmonie is HIRLAM's adaptation of the LAM version of the IFS/ARPEGE project. The common code shared with the ALADIN program, Meteo France and ECMWF only contains the source code. Harmonie adds the build environment, scripts, support for a scheduler, and a number of diagnostics tools for file conversion and postprocessing. In summary a git clone of harmonie from github contains the following main directories","category":"page"},{"location":"Overview/Content/","page":"Content","title":"Content","text":"config-sh : Configuration and job submission files for different platforms.\nconst : A selected number of constant files for bias correction, assimilation and different internal schemes. A large number of data for climate generation and the RTTOV software is kept outside of the repository. See [wiki:HarmonieSystemDocumentation#Downloaddata].\necf : Directory for the main configuration file config_exp.h and the containers for the scheduler ECFLOW.\nsuites Scripts and suit definition files for ECFLOW, the scheduler for HARMONIE. \nnam : Namelists for different configurations.\nscr : Scripts to run the different tasks.\nsrc : The IFS/ARPEGE source code.\nutil : A number of utilities and support libraries.","category":"page"},{"location":"Overview/Content/#util","page":"Content","title":"util","text":"","category":"section"},{"location":"Overview/Content/","page":"Content","title":"Content","text":"The util directory contains the following main directories","category":"page"},{"location":"Overview/Content/","page":"Content","title":"Content","text":"auxlibs : Contains gribex, bufr, rgb and some dummy routines\nbinutils : https://www.gnu.org/software/binutils/\nchecknorms : Script for code norm checking\ngl_grib_api : Boundary file generator and file converter\nmakeup : HIRLAM style compilation tool\nmusc : MUSC scripts\nobsmon : Code to produce obsmon sqlite files\noffline : SURFEX offline code\noulan : Converts conventional BUFR data to OBSOUL format read by bator.\nRadarDAbyFA : Field alignment code","category":"page"},{"location":"Observations/Iasi/#IASI-radiances-(pre-)-processing","page":"IASI","title":"IASI radiances (pre-) processing","text":"","category":"section"},{"location":"Observations/Iasi/#Introduction","page":"IASI","title":"Introduction","text":"","category":"section"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"Typical IASI radiance data reception consists of a subset of 366 channels out of the full set of 8461. These cover the infrared absorption spectrum from 3.8 to 15.4 micrometers. In the context of NWP, the most useful IASI channels include (i) the temperature-sounding channels in the approximate channel index range 100-450, (ii) the humidity-sounding channels at 2800-3500 and 5000-5500 indices, and (iii) surface-sensing window channels at 500-1000. Most of the NWP impact from IASI is thought to come from group (i) and especially from the upper-tropospheric and lower-stratospheric channels in the range 200-300.","category":"page"},{"location":"Observations/Iasi/#Including-IASI-radiances-in-a-HARMONIE-run","page":"IASI","title":"Including IASI radiances in a HARMONIE run","text":"","category":"section"},{"location":"Observations/Iasi/#scr/include.ass","page":"IASI","title":"scr/include.ass","text":"","category":"section"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"scr/include.ass should be edited to \"switch on\" the use of AMSUA (AMSU-A), AMSUB (AMSU-B/MHS):","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"export IASI_OBS=1 # IASI\nexport ATOVS_SOURCE=mars # local: EUMETCast;\nexport IASI_SOURCE=ears # mars:MARS | else: file in $OBDIR\nexport IASI_RT_COEF=lblrtm # genln2|kcarta|lblrtm\n[[ $IASI_OBS -eq 1 ]] && types_BASE=\"$types_BASE iasi\"","category":"page"},{"location":"Observations/Iasi/#Loading-the-IASI-radiances","page":"IASI","title":"Loading the IASI radiances","text":"","category":"section"},{"location":"Observations/Iasi/#Data-extracted-from-MARS","page":"IASI","title":"Data extracted from MARS","text":"","category":"section"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":" elif [ \"$base\" = iasi ] ; then\n # IASI\n if [ \"$IASI_OBS\" -eq 1 ]; then\n echo \"iasi iasi BUFR iasi \">>batormap\n ln -sf \"${HM_LIB}\"/const/bator_param/param_bator.cfg.iasi param.cfg\n if [ \"$IASI_SOURCE\" = mars ] ; then\n ln -sf \"$WRK\"/splitObs/iasi ./BUFR.iasi\n else\n ln -sf $OBSDIR/iasi$DTG ./BUFR.iasi\n fi\n fi","category":"page"},{"location":"Observations/Iasi/#Locally-received-data","page":"IASI","title":"Locally received data","text":"","category":"section"},{"location":"Observations/Iasi/#Controlling-the-detection-of-cloud","page":"IASI","title":"Controlling the detection of cloud","text":"","category":"section"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"IASI radiances are strongly influenced by cloud. Because of inaccurate forward modelling, large background errors in cloud fields, and non-linear effects, success in the use of IASI requires careful screening and removal of cloud-affected data. In the HARMONIE data assimilation system, the screening for cloud follows the method of McNally and Watts (2003). The power of this method lies in the use of a large number of individual channels such that much of the disturbing instrument noise can be smoothed out and the cloud radiative effect is therefore more easily detected.","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"Even if the active use of IASI is limited to relatively small number of channels (such as the 55-channel subset in MetCoOp in early 2021), it is advisable to include more than 100 channels in the cloud detection channel list. Furthermore, it is important that all these channels are subjected to VarBC. To achieve the latter, one needs to make sure that blacklisting for the cloud detection channels uses the fail(EXPERIMENTAL) syntax rather than fail(CONSTANT). The following excerpt from src/blacklists/hirlam_blacklist.b.data_selection_after_20140601 illustrates the concept:","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":" if (SENSOR = iasi) then\n\n ! remove channels that are not used in either cloud detection\n ! or minimization\n if PRESS notin (38, 49, 51, 55, 57, 61, 63, 83, 85, 87,\n 104, 109, 111, 116, 122, 128, 135, 141, 146, 148,\n 154, 159, 161, 167, 173, 179, 180, 185, 187, 193,\n 199, 205, 207, 210, 212, 214, 217, 219, 222, 224,\n 226, 230, 232, 236, 239, 242, 243, 246, 249, 252,\n 254, 256, 258, 260, 262, 265, 267, 269, 275, 278,\n 280, 282, 284, 286, 288, 290, 292, 294, 296, 299,\n 306, 308, 310, 312, 314, 316, 318, 320, 323, 325,\n 327, 329, 331, 333, 335, 341, 345, 347, 350, 352,\n 354, 356, 358, 360, 362, 364, 366, 369, 371, 373,\n 375, 377, 379, 381, 383, 386, 389, 398, 401, 404,\n 407, 410, 414, 416, 426, 428, 432, 434, 439, 445,\n 457, 515, 546, 552, 559, 566, 571, 573, 646, 662,\n 668, 756, 867, 921, 1027, 1133, 1191, 1194, 1271, 1805,\n 1884, 1946, 1991, 2094, 2239, 2701, 2819, 2910, 2919, 2991,\n 2993, 3002, 3008, 3014, 3098, 3207, 3228, 3281, 3309, 3322,\n 3438, 3442, 3484, 3491, 3499, 3506, 3575, 3582, 3658, 4032)\n then fail(CONSTANT); endif;\n\n if PRESS notin (38, 51, 63, 85, 104, 109, 167, 173, 180, 185,\n 193, 199, 205, 207, 212, 224, 230, 236, 239, 242,\n 243, 249, 296, 333, 337, 345, 352, 386, 389, 432,\n 2701, 2819, 2910, 2919, 2991, 2993, 3002, 3008, 3014, 3098,\n 3207, 3228, 3281, 3309, 3322, 3438, 3442, 3484, 3491, 3499,\n 3506, 3575, 3582, 3658, 4032)\n then fail(EXPERIMENTAL);\n endif;\n\n endif; ! SENSOR = IASI","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"Here we provide two lists of IASI channels. The first list includes all those channels that are either used in the cloud detection, or are intended for active assimilation (or both). The second list is a subset of the first and includes just those intended for active assimilation. Only those channels included in the latter list will have a significant weight during the assimilation process.","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"The control for the cloud detection happens via namelist file at nam/IASI_CLDDET.NL. The format of the namelist file is illustrated below:","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"&NAMCLDDET\nN__Band_Size(1)=145\nN__Bands(1:145,1) =\n 38, 49, 51, 55, 57, 61, 63, 83, 85, 87,\n 104, 109, 111, 116, 122, 128, 135, 141, 146, 148,\n 154, 159, 161, 167, 173, 179, 180, 185, 187, 193,\n 199, 205, 207, 210, 212, 214, 217, 219, 222, 224,\n 226, 230, 232, 236, 239, 242, 243, 246, 249, 252,\n 254, 256, 258, 260, 262, 265, 267, 269, 275, 278,\n 280, 282, 284, 286, 288, 290, 292, 294, 296, 299,\n 306, 308, 310, 312, 314, 316, 318, 320, 323, 325,\n 327, 329, 331, 333, 335, 341, 345, 347, 350, 352,\n 354, 356, 358, 360, 362, 364, 366, 369, 371, 373,\n 375, 377, 379, 381, 383, 386, 389, 398, 401, 404,\n 407, 410, 414, 416, 426, 428, 432, 434, 439, 445,\n 457, 515, 546, 552, 559, 566, 571, 573, 646, 662,\n 668, 756, 867, 921, 1027, 1133, 1191, 1194, 1271, 1805,\n1884, 1946, 1991, 2094, 2239\n/","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"Here we specify a list of 145 channels to be included in \"band 1\" of the cloud detection, i.e., in the main cloud detection channel band. The setup of the cloud detection involves not just the channel list but several additional tuning parameters that can be modified to make the screening more or less conservative. The default settings are specified in src/arpifs/obs_preproc/cloud_detect_setup.F90. A comprehensive description of the cloud detection scheme, including explanations of the various tuning parameter values, is given at the NWPSAF web site https://nwp-saf.eumetsat.int/site/software/aerosol-and-cloud-detection/documentation/.","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"Log file of the Screening task will indicate whether the formatting of the namelist file is appropriate:","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":" READING CLOUD DETECTION FILE FOR IASI\n IASI CLOUD DETECTION FILE READ OK","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"In case of an error, the following is printed instead:","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":" READING CLOUD DETECTION FILE FOR IASI\n PROBLEM READING IASI CLOUD DETECTION FILE: Using Default Values","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"The third possibility is that the namelist file does not appear in the working directory, in which case the printout statement is this:","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":" READING CLOUD DETECTION FILE FOR IASI\n NO IASI CLOUD DETECTION FILE : Using Default Values","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"Please note that the use of the \"Default Values\" is generally not a desired outcome. This is because many of the cloud detection channels in the default list (see src/arpifs/obs_preproc/cloud_detect_setup.F90) are sensitive to higher stratosphere and therefore may be severely affected by the relatively low model top of limited-area HARMONIE systems.","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"References:","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"McNally, AP, and PD Watts, 2003: A cloud detection algorithm for high-spectral-resolution infrared sounders. Quarterly Journal of the Royal Meteorological Society, 129, 3411-3423, doi:10.1256/qj.02.208.","category":"page"},{"location":"Verification/Verification/#Monitor","page":"Verification","title":"Monitor","text":"","category":"section"},{"location":"Verification/Verification/","page":"Verification","title":"Verification","text":"monitor documentation","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#configure-your-experiment","page":"Experiment","title":"Experiment configuration","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Introduction","page":"Experiment","title":"Introduction","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"There are several levels on configuration available in HARMONIE. The highest level of configuration is done in ecf/config_exp.h. It includes the environment variables, which are used to control the experimentation. In the following we describe the meaning of the different variables and are described in the order they appear in ecf/config_exp.h.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Host specific paths and environment variables for your system are defined in Env_system. ","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Build-options","page":"Experiment","title":"Build options","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Build and bin paths ****\n# Definitions about Build, should fit with hm_rev\nBUILD=${BUILD-yes} # Turn on or off the compilation and binary build (yes|no)\nBUILD_WITH=${BUILD_WITH-makeup} # Which build system to use (makeup|cmake)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"BUILD is a switch for compiling HARMONIE code (yes|no) and BUILD_WITH controls which build system to use when compiling HARMONIE-AROME.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"BINDIR=${BINDIR-$HM_DATA/bin} # Binary directory","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"BINDIR is the location of where your HARMONIE binaries will be installed. You can use this to point to binaries outside of your experiment. A few other options for non default configurations exists as well:","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"COMPILE_ENKF=${COMPILE_ENKF-\"no\"} # Compile LETKF code (yes|no)\nCOMPILE_DABYFA=${COMPILE_DABYFA-\"no\"} # Compile FA/VC code (yes|no)\nSURFEX_OFFLINE_BINARIES=\"no\" # Switch to compile and use offline SURFEX binaries","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#General-settings","page":"Experiment","title":"General settings","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Misc, defined first because it's used later ****\n\nCNMEXP=HARM # Four character experiment identifier\nWRK=$HM_DATA/$CYCLEDIR # Work directory","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"CNMEXP: experiment identifier used by MASTERODB\nWRK is the work directory. The suggested path on ECMWF.atos is $SCRATCH/hm_home/${EXP}/$CYCLEDIR","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Archive-settings-(ECMWF)","page":"Experiment","title":"Archive settings (ECMWF)","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Since $SCRATCH is cleaned regularly on ECMWF some files are transferred to ECFS for a more permanent storage by the scripts scr/Archive_host1 and scr/Archive_logs. ","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Paths to archive ****\n# We need to define ARCHIVE early since it might be used further down\n\nARCHIVE_ROOT=$HM_DATA/archive # Archive root directory\nECFSLOC=ectmp # Archiving site at ECMWF-ECFS: \"ec\" or ECFS-TMP \"ectmp\"\nECFSGROUP=accord # Group in which to chgrp the ECMWF archive, \"default\" or \"accord\"\nEXTRARCH=$ARCHIVE_ROOT/extract # Archive for fld/obs-extractions","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"ARCHIVE_ROOT is the path to forecast file archive. Note that at ECMWF this directory is not a permanent storage\nEXTRARCH is the path to field extraction archive. Note that at ECMWF this directory is not a permanent storage\nECFSLOC Archiving site at ECMWF-ECFS (ectmp|ec) Note that files archived on ectmp will be lost after 90 days. If you wish your files to stay longer you should set ECFSLOC=ec. \nECFSGROUP Group in which to chgrp the ECMWF archive, (accord|default)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Running-Mode","page":"Experiment","title":"Running Mode","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Running mode ****\nRUNNING_MODE=research # Research or operational mode (research|operational)\n # operational implies that the suite will continue even if e.g.\n # observations are missing or assimilation fails\n\nSIMULATION_TYPE=nwp # Type of simulation (nwp|climate)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"RUNNING_MODE can be research or operational. Operational is more forgiving in the error handling and e.g. the assimilation will be skipped if Bator doesn't find any observations. Exceptions handled by the operational mode are written to $HM_DATA/severe_warnings.txt\nSIMULATION_TYPE Switch between nwp and climate type of simulation. The climate simulations are still in an experimental stage. ","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Model-domain-settings","page":"Experiment","title":"Model domain settings","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Horizontal domain settings. Further information is available here","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"\n# **** Model geometry ****\nDOMAIN=DKCOEXP # See definitions in scr/Harmonie_domains.pm\nTOPO_SOURCE=gmted2010 # Input source for orography. Available are (gmted2010|gtopo30)\nGRID_TYPE=LINEAR # Type of grid (LINEAR|QUADRATIC|CUBIC)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"DOMAIN defines your domain according to the settings in scr/Harmonie_domains.pm (DKCOEXP). The spectral truncation for your domain is determined from NLON and NLAT by scr/Harmonie_domains.pm. Further information on model domains are available here\nTOPO_SOURCE: Defines input source for model orography (gmted2010|gtopo30). Further information available here: hi-res topography\nGRID_TYPE: This variable is used to define the spectral truncation used (LINEAR|QUADRATIC|CUBIC). GRID_TYPE is used in scr/Climate and scr/Forecast","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Vertical-levels","page":"Experiment","title":"Vertical levels","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Set the number vertical levels to use. Further information is available here","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"VLEV=65 # Vertical level definition name\n # HIRLAM_60, MF_60,HIRLAM_40, or\n # BOUNDARIES = same number of levs as on boundary file.\n # See the other choices from scr/Vertical_levels.pl","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"VLEV is the name of the vertical levels defined in scr/Vertical_levels.pl (65). Further information is available here. If you intend to run upper air assimilation you must select the same domain and level definition for which you have derived structure functions. Read more Structure Functions","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Forecast-model","page":"Experiment","title":"Forecast model","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Higher level forecast model settings.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** High level forecast options ****\nNAMELIST_BASE=\"harmonie\" # Input for namelist generation (harmonie|alaro1)\n # harmonie : The default HARMONIE namelist base nam/harmonie_namelists.pm\n # alaro1 : For ALARO-1 baseline with only a few configurations available\n # nam/alaro1_namelists.pm\nDYNAMICS=\"nh\" # Hydrostatic or non-hydrostatic dynamics (h|nh)\nVERT_DISC=vfd # Discretization in the vertical (vfd,vfe)\n # Note that vfe does not yet work in non-hydrostatic mode\nPHYSICS=\"arome\" # Main model physics flag (arome|alaro)\nSURFACE=\"surfex\" # Surface flag (old_surface|surfex)\nDFI=\"none\" # Digital filter initialization (idfi|fdfi|none)\n # idfi : Incremental dfi\n # fdfi : Full dfi\n # none : No initialization (AROME case)\nLSPBDC=no # Spectral boundary contions option off(no) | on(yes)\nLGRADSP=yes # Apply Wedi/Hortal vorticity dealiasing\nLUNBC=yes # Apply upper nested boundary condition","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"NAMELIST_BASE: Two different namelist sets are available (harmonie|alaro).\nDYNAMICS: Hydrostatic or non-hydrostatic dynamics (h|nh)\nVERT_DISC: Vertical discretization (vfd,vfe)\nPHYSICS: HARMONIE uses either AROME or ALARO for its forecast model physics (arome|alaro)\nSURFACE: Surface physics flag to use either the SURFEX or the ALADIN surface scheme(surfex|old_surface)\nDFI: Digital filter initialization switch (idfi|fdfi|none). idfi - incremental dfi, fdfi - full dfi, none - no initialization. See Digital filter for more information\nLSPBDC: Specify whether the boundary conditions are spectral or not (yes|no)\nLGRADSP: Switch to apply vorticity dealiasing (yes|no)\nLUNBC: Switch to apply upper boundary conditions (yes|no)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Physics","page":"Experiment","title":"Physics","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Physics options.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# Highlighted physics switches\nCISBA=\"3-L\" # Type of ISBA scheme in SURFEX. Options: \"3-L\" and \"2-L\".\nCROUGH=\"NONE\" # SSO scheme used in SURFEX \"NONE\"|\"'Z01D'\"|\"'BE04'\"\nSURFEX_SEA_ICE=\"none\" # Treatment of sea ice in surfex (none|sice)\nMASS_FLUX_SCHEME=edmfm # Version of EDMF scheme (edkf|edmfm)\n # Only applicable if PHYSICS=arome\n # edkf is the AROME-MF version\n # edmfm is the KNMI implementation of Eddy Diffusivity Mass Flux scheme for Meso-scale\nHARATU=\"yes\" # Switch for HARATU turbulence scheme (no|yes)\nALARO_VERSION=0 # Alaro version (1|0)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"CISBA: If SURFACE is set to surfex this selects the type of ISBA scheme to use in SURFEX. (3-L|2-L). See src/surfex_namelists.pm Namelists\nCROUGH: If SURFACE is set to surfex this selects the sub-grid scale orography scheme used in SURFEX. (NONE|Z01D|BE04). See src/surfex_namelists.pm Namelist\nSURFEX_SEA_ICE: Treatment of sea ice in surfex (none|sice). See nam/surfex_namelists.pm\nMASS_FLUX_SCHEME: If PHYSICS is set to arome choose the mass flux scheme to be used by AROME; edkf to use the AROME-MF scheme or edmfm to use the KNMI developed scheme\nHARATU: Switch to use the HARATU turbulence scheme\nALARO_VERSION: If PHYSICS is set to alaro select version of ALARO to use (0|1)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Assimilation","page":"Experiment","title":"Assimilation","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Data assimilation settings. More assimilation related settings, in particular what observations to assimilate, can be found in src/include.ass","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Assimilation ****\nANAATMO=3DVAR # Atmospheric analysis (3DVAR|4DVAR|blending|none)\nANASURF=CANARI_OI_MAIN # Surface analysis (CANARI|CANARI_OI_MAIN|CANARI_EKF_SURFEX|none)\n # CANARI : Old style CANARI\n # CANARI_OI_MAIN : CANARI + SURFEX OI\n # CANARI_EKF_SURFEX : CANARI + SURFEX EKF ( experimental )\n # none : No surface assimilation\nANASURF_MODE=\"before\" # When ANASURF should be done\n # before : Before ANAATMO\n # after : After ANAATMO\n # both : Before and after ANAATMO (Only for ANAATMO=4DVAR)\nINCV=\"1,1,1,1\" # Active EKF control variables. 1=WG2 2=WG1 3=TG2 4=TG1\nINCO=\"1,1,0\" # Active EKF observation types (Element 1=T2m, element 2=RH2m and element 3=Soil moisture) \n\nSST=BOUNDARY # Which SST fields to be used in surface analysis\n # BOUNDARY : SST interpolated from the boundary file. ECMWF boundaries utilize a special method.\n # HIRLAM and HARMONIE boundaries applies T0M which should be SST over sea.\nLSMIXBC=no # Spectral mixing of LBC0 file before assimilation\n[ \"$ANAATMO\" = 3DVAR] && LSMIXBC=yes\nJB_INTERPOL=no # Interpolation of structure functions from a pre-defined domain to your domain\n","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"ANAATMO: Atmospheric analysis (3DVAR|4DVAR|blending|none)\nANASURF: Surface analysis (CANARI|CANARIOIMAIN|CANARIEKFSURFEX|none). See nam/surfex_namelists.pm\nANASURF_MODE:When the surface should be called (before|after|both)\nINCV: Active EKF control variables. 1=WG2 2=WG1 3=TG2 4=TG1 (0|1)\nINCO: Active EKF observation types (Element 1=T2m, element 2=RH2m and element 3=Soil moisture) (0|1)\nSST: which sea surface temperature field to use in the surface analysis\nLSMIXBC Spectral mixing of LBC0 file before assimilation (no|yes)\nJB_INTERPOL Interpolation of structure functions from a pre-defined domain to your domain (no|yes). Note that this has to be used with some caution.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Observations","page":"Experiment","title":"Observations","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Observations ****\nOBDIR=$HM_DATA/observations # Observation file directory\nRADARDIR=$HM_DATA/radardata # Radar observation file directory\nSINGLEOBS=no # Run single obs experiment with observation created by scr/Create_single_obs (no|yes)\n\nUSE_MSG=no # Use MSG data for adjustment of inital profiles, EXPERIMENTAL! (no|yes)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"OBDIR: Defines the directory that your (BUFR) observation files (obYYYYMMDDHH) are to read from\nRADARDIR: Defines the directory that your (OPERA HDF5) radar observation files are to be read from. BALTRAD OPERA HDF5, MF BUFR and LOCAL files are treated in scr/Prepradar\nSINGLEOBS Run single obs experiment with synthetic observation created by scr/Create_single_obs scr/Create_single_obs (no|yes)\nUSE_MSG: Use MSG data for adjustment of inital profiles, EXPERIMENTAL! (no|yes), expects MSGcloudYYYYMMDDHH.grib in $OBDIR","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#4DVAR-settings","page":"Experiment","title":"4DVAR settings","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"4DVAR settings (experimental)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** 4DVAR ****\nNOUTERLOOP=1 # 4DVAR outer loops, need to be 1 at present\nILRES=2,2 # Resolution (in parts of full) of outer loops\nTSTEP4D=360,360 # Timestep length (seconds) of outer loops TL+AD\nTL_TEST=yes # Only active for playfile tlad_tests\nAD_TEST=yes # Only active for playfile tlad_tests","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"NOUTERLOOP: Number of outer loops, need to be 1 at present\nILRES: Resolution (in parts of full) of outer loops\nTSTEP4D: Timestep length (seconds) of outer loops TL+AD\nTL_TEST: Only active for playfile tlad_tests (yes|no)\nAD_TEST: Only active for playfile tlad_tests (yes|no)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#digital-filter","page":"Experiment","title":"Digital filter settings ","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Digital filter initialization settings if DFI is not equal to \"none\"","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** DFI setting ****\nTAUS=5400 # cut-off frequency in second\nTSPAN=5400 # 7200s or 5400s","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"TAUS cut-off frequency in seconds \nTSPAN length of DFI run in seconds","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Boundaries-and-initial-conditions","page":"Experiment","title":"Boundaries and initial conditions","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Settings for generation of lateral boundaries conditions for HARMONIE. Further information is available here","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Lateral boundary conditions ****\nHOST_MODEL=\"ifs\" # Host model (ifs|hir|ald|ala|aro)\n # ifs : ecmwf data\n # hir : hirlam data\n # ald : Output from aladin physics\n # ala : Output from alaro physics\n # aro : Output from arome physics\n\nHOST_SURFEX=\"no\" # yes if the host model is run with SURFEX\nSURFEX_INPUT_FORMAT=lfi # Input format for host model run with surfex (lfi|fa)\n\nNBDMAX=12 # Number of parallel interpolation tasks\nBDLIB=ECMWF # Boundary experiment, set:\n # ECMWF to use MARS data\n # RCRa to use RCRa data from ECFS\n # Other HARMONIE/HIRLAM experiment\n\nBDDIR=$HM_DATA/${BDLIB}/archive/@YYYY@/@MM@/@DD@/@HH@ # Boundary file directory,\n # For more information, read in scr/Boundary_strategy.pl\n\nSST_SOURCES=$HOST_MODEL # List of external SST sources like $HOST_MODEL|HIROMB|NEMO|ROMS|ECE\n # See util/gl/ala/merge_ocean.F90 for more details\nSST_IS_LSM=\"auto\" # Switch for using SST as LSM (lsm|sst|auto)\n\nINT_BDFILE=$WRK/ELSCF${CNMEXP}ALBC@NNN@ # Interpolated boundary file name and location\n\nBDSTRATEGY=simulate_operational # Which boundary strategy to follow\n # as defined in scr/Boundary_strategy.pl\n #\n # available : Search for available files in BDDIR, try to keep forecast consistency\n # This is ment to be used operationally\n # simulate_operational : Mimic the behaviour of the operational runs using ECMWF LBC,\n # i.e. 6 hour old boundaries\n # same_forecast : Use all boundaries from the same forecast, start from analysis\n # analysis_only : Use only analysises as boundaries\n # era : As for analysis_only but using ERA interim data\n # latest : Use the latest possible boundary with the shortest forecast length\n # RCR_operational : Mimic the behaviour of the RCR runs, ie\n # 12h old boundaries at 00 and 12 and\n # 06h old boundaries at 06 and 18\n # enda : use ECMWF ENDA data for running ensemble data assimilation\n # or generation of background statistic.\n # Note that only LL up to 9h is supported\n # with this you should set your ENSMSEL members\n # eps_ec : ECMWF EPS members (on reduced gaussian grid)\n # : Only meaningful with ENSMSEL non-empty, i.e., ENSSIZE > 0\n\nBDINT=1 # Boundary interval in hours\n\nSURFEX_PREP=\"yes\" # Use offline surfex prep facility (Alt. gl + Fullpos + prep )","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"HOST_MODEL defines the host model that provides the lateral boundaries conditions for your experiment\nhir for HIRLAM.\nald for ALADIN \nala for ALARO\naro for AROME\nifs for ECMWF-IFS. \nHOST_SURFEX Set to yes if host model runs with SURFEX. (no|yes)\nSURFEX_INPUT_FORMAT Input format for host model run with surfex (lfi|fa)\nBDLIB is the experiment to be used as boundaries. Possible values, ECMWF for IFS from MARS (default), RCRa for HIRLAM-RCR from ECFS or other HARMONIE experiment. \nBDDIR is the boundary file directory. The possible date information in the path must be given by using UPPER CASE letters (@YYYY@=year,@MM@=month,@DD@=day,@HH@=hour,@FFF@=forecast length). \nBDSTRATEGY Which boundary strategy to follow i.e. How to find the right boundaries with the right age and location. Read more\nBDINT is boundary interval in hours.\nBDCLIM is the path to climate files corresponding the boundary files, when nesting HARMONIE to HARMONIE.\nINT_BDFILE is the name and location of the interpolated boundary files. These files are removed every cycle, but if you wish to save them you can specify a more permanent location here. By setting INT_BDFILE=$ARCHIVE the interpolated files will be stored in your archive directory.\nNBDMAX Number of parallel boundary interpolation tasks in mSMS. The current default value is 12.\nSST_SOURCES defines the host model used for SST & SIC\nSST_IS_LSM in interpolation of SST/SIC from host to HARMONIE grid, use SST (with missing values above land) to derive LSM, or use the actual provided LSM. The default, auto, makes a smart guess based on host model.\nSURFEX_PREP Use SURFEX tool PREP instead of gl+FULLPOS to prepare SURFEX initial conditions. This is now the default. The gl+FULLPOS version is still working but will not be maintained in the future (no|yes)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Read more about the boundary file preparation here.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Ensemble-mode-settings","page":"Experiment","title":"Ensemble mode settings","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# *** Ensemble mode general settings. ***\n# *** For member specific settings use msms/harmonie.pm ***\nENSMSEL= # Ensemble member selection, comma separated list, and/or range(s):\n # m1,m2,m3-m4,m5-m6:step mb-me == mb-me:1 == mb,mb+1,mb+2,...,me\n # 0=control. ENSMFIRST, ENSMLAST, ENSSIZE derived automatically from ENSMSEL.\nENSINIPERT= # Ensemble perturbation method (bnd). Not yet implemented: etkf, hmsv.\nENSCTL= # Which member is my control member? Needed for ENSINIPERT=bnd. See harmonie.pm.\nENSBDMBR= # Which host member is used for my boundaries? Use harmonie.pm to set.\nENSMFAIL=0 # Failure tolerance for all members.\nENSMDAFAIL=0 # Failure tolerance for members doing own DA. Not implemented.\nSLAFK=1.0 # best set in harmonie.pm\nSLAFLAG=0 # --- \" ---\nSLAFDIFF=0 # --- \" ---\n\n# *** This part is for EDA with observations perturbation\nPERTATMO=none # ECMAIN : In-line observation perturbation using the default IFS way.\n \t\t\t# CCMA : Perturbation of the active observations only (CCMA content)\n\t \t\t# before the Minimization, using the PERTCMA executable.\n \t\t\t# none : no perturbation of upper-air observations\n\nPERTSURF=none # ECMA : perturb also the surface observation before Canari (recommended\n \t\t\t# : for EDA to have full perturbation of the initial state).\n # model : perturb surface fields in grid-point space (recursive filter)\n\t\t\t # none : no perturbation for surface observations.\n\nFESTAT=no # Extract differences and do Jb calculations (no|yes)\n","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"ENSMSEL Ensemble member selection, comma separated list, and/or range(s):\n # m1,m2,m3-m4,m5-m6:step mb-me == mb-me:1 == mb,mb+1,mb+2,...,me\n # 0=control. ENSMFIRST, ENSMLAST, ENSSIZE derived automatically from ENSMSEL.\nENSINIPERT Ensemble perturbation method (bnd). Not yet implemented: etkf, hmsv, slaf.\nENSMFAIL Failure tolerance for all members. Not yet implemented.\nENSMDAFAIL Failure tolerance for members doing own DA. Not yet implemented.\nENSCTL Which member is my control member? Needed for ENSINIPERT=bnd. See harmonie.pm.\nENSBDMBR Which host member is used for my boundaries? Use harmonie.pm to set.\nSLAFK Perturbation coefficients for SLAF, experimental\nSLAFLAG Time lag for boundaries in SLAG, experimental","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"For member dependent settings see msms/harmonie.pm.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"PERTATMO Observation perturbation with three options \nECMA : In-line observation perturbation using the default IFS way.\nCCMA : Perturbation of the active observations only (CCMA content) before the Minimization, using the PERTCMA executable.\nnone : no perturbation of upper-air observations\nPERTSURF Perturbation of surface observations before Canari (recommended for EDA to have full perturbation of the initial state) (no|yes).","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"FESTAT Extract differences and do Jb calculations (no|yes). Read more about the procedure here.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Aerosol-choices","page":"Experiment","title":"Aerosol choices","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"USEAERO influences import of near-real-time aerosol via boundaries and use of aerosol in the forecast model. It selects use of n.r.t, climatology or no aerosol. When USEAERO=climaero, CAERO selects data for monthly climate file generation from existing sources (4 species of Tegen or CAMS AOD@550nm or 11 species of CAMS vertically integrated mass). CAMS aerosol mass climatology [camscms] and MOCAGE n.r.t. [mocanrt] are currently not available for within HARMONIE forecast system. ","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Aerosol choices [USEAERO implies aerosol usage in physics via forecast_model_settings!] ****\nUSEAERO=camsnrt # Aerosol usage: camsnrt | climaero | noaero | [mocanrt]\nCAERO=tegenaod # Aerosol climatology generation: tegenaod | [camscms not yet available] ","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Climate-file-settings","page":"Experiment","title":"Climate file settings","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Climate file generation settings. Further information is available here","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Climate files ****\nCREATE_CLIMATE=${CREATE_CLIMATE-yes} # Run climate generation (yes|no)\nCLIMDIR=$HM_DATA/climate/$DOMAIN # Climate files directory\nBDCLIM=$HM_DATA/${BDLIB}/climate # Boundary climate files (ald2ald,ald2aro)\n # This should point to intermediate aladin\n # climate file in case of hir2aro,ifs2aro processes.\nECOCLIMAP_PARAM_BINDIR=$HM_DATA/climate # Binary cover param files directory\n\n# Physiography input for SURFEX\nECOCLIMAP_VERSION=SG # Version of ECOCLIMAP for surfex\n # Available versions are 1.1-1.5,2.0-2.2,2.2.1,2.5_plus and SG\n # FLake requires 2.5_plus or SG\nXSCALE_H_TREE=1.0 # Scale the tree height with this factor\n# Activate inclusion of fake trees for open land VEGTYPEs. The vector positions represent:\n# 1 NVT_BOGR, 2 NVT_GRAS, 3 NVT_TROG, 4 NVT_C3W, 5 NVT_C3S, 6 NVT_C4, 7 NVT_FLGR\nLFAKETREE=.F.,.F.,.F.,.F.,.F.,.F.,.F.\nLDB_VERSION=3.0 # Lake database version.\nSOIL_TEXTURE_VERSION=SOILGRID # Soil texture input data FAO|HWSD_v2|SOILGRID|SOILGRID_v2\n\n# Path to pre-generated domains, in use if USE_REF_CLIMDIR=yes set in Env_system\n# Saves time for quick experiments\nREF_CLIMDIR=ec:/hlam/harmonie_climdir/release-43h2.1.1/$DOMAIN/$GRID_TYPE/$ECOCLIMAP_VERSION\n","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"CREATE_CLIMATE: Run climate generation (yes|no). If you already have a full set of climate files generated in CLIMDIR you can set this flag to no for a faster run.\nCLIMDIR: path to the generated climate files for your specific domain. The input data for the climate generation is defined by HM_CLDATA defined in Env_system -> config-sh/config.YOURHOST\nBDCLIM: path to intermediate climate files\nECOCLIMAP_PARAM_BINDIR: Direcotry where the binray version of ECOCLIMAP 1st generation parameter files will be stored.\nECOCLIMAP_VERSION is the version of ECOCLIMAP to be used with SURFEX. Available versions are 1.1-1.5,2.0-2.2,2.2.1,2.5_plus,SG. See surfex_namelists.pm Namelist\nXSCALE_H_TREE: A factor that scales the original tree height that comes from the database.\nLFAKETREE: Only relevant for ECOCLIMAP_VERSION=SG. It activates the inclusion of fake trees for open land VEGTYPEs to increase the roughness length. The vector positions represent 1 NVT_BOGR, 2 NVT_GRAS, 3 NVT_TROG, 4 NVT_C3W, 5 NVT_C3S, 6 NVT_C4, 7 NVT_FLGR.\nLDB_VERSION: Specifies the version of the Global Lake Database used for FLake.\nSOIL_TEXTURE_VERSION Soil texture input data (FAO|HWSD_v2|SOILGRID|SOILGRID_v2). See surfex_namelists.pm more info.\nREF_CLIMDIR: Specifies the location of possible pre-generated domains. Is used if USE_REF_CLIMDIR=yes set in Env_system.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Archiving-settings","page":"Experiment","title":"Archiving settings","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Archiving settings ****\nARCHIVE_ECMWF=yes # Archive to $ECFSLOC at ECMWF (yes|no)\n# Archiving selection syntax, settings done below\n#\n# [fc|an|pp]_[fa|gr|nc] : Output from\n# an : All steps from upper air and surface analysis\n# fc : Forecast model state files from upper air and surfex\n# pp : Output from FULLPOS and SURFEX_LSELECT=yes (ICMSHSELE+NNNN.sfx)\n# in any of the formats if applicable\n# fa : FA files\n# gr : GRIB[1|2] files\n# nc : NetCDF files\n# sqlite|odb|VARBC|bdstrategy : odb and sqlite files stored in odb_stuff.tar\n# fldver|ddh|vobs|vfld : fldver/ddh/vobs/vfld files\n# climate : Climate files from PGD and E923\n# Some macros\n# odb_stuff=odb:VARBC:bdstrategy:sqlite\n# verif=vobs:vfld\n# fg : Required files to run the next cycle","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Forecast-output","page":"Experiment","title":"Forecast output","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Cycles to run, and their forecast length ****\n\nTFLAG=\"h\" # Time flag for model output. (h|min)\n # h = hour based output\n # min = minute based output\n\n\n# The unit of HWRITUPTIMES, FULLFATIMES, ..., SFXFWFTIMES should be:\n# - hours if TFLAG=\"h\"\n# - minutes if TFLAG=\"min\"\n\n# Writeup times of # history,surfex and fullpos files\n# Comma separated list, and/or range(s) like:\n# t1,t2,t3-t4,t5-t6:step tb-te == tb-te:1 == tb,tb+1,tb+2,...,te\n\nif [ -z \"$ENSMSEL\"] ; then\n # Standard deterministic run\n HH_LIST=\"00-21:3\" # Which cycles to run, replaces FCINT\n LL_LIST=\"12,3\" # Forecast lengths for the cycles [h], replaces LL, LLMAIN\n # The LL_LIST list is wrapped around if necessary, to fit HH_LIST\n HWRITUPTIMES=\"00-21:3,24-60:6\" # History file output times\n FULLFAFTIMES=$HWRITUPTIMES # History FA file IO server gather times\n PWRITUPTIMES=\"00-60:3\" # Postprocessing times\n PFFULLWFTIMES=-1 # Postprocessing FA file IO server gathering times\n VERITIMES=\"00-60:1\" # Verification output times, may change PWRITUPTIMES\n SFXSELTIMES=$HWRITUPTIMES # Surfex select file output times\n # Only meaningful if SURFEX_LSELECT=yes\n SFXSWFTIMES=-1 # SURFEX select FA file IO server gathering times\n SWRITUPTIMES=\"00-06:3\" # Surfex model state output times\n SFXWFTIMES=$SWRITUPTIMES # SURFEX history FA file IO server gathering times\n if [ \"$SIMULATION_TYPE\" == climate]; then #Specific settings for climate simulations\n HWRITUPTIMES=\"00-760:6\" # History file output times\n FULLFAFTIMES=\"00-760:24\" # History FA file IO server gather times\n PWRITUPTIMES=$HWRITUPTIMES # Postprocessing times\n VERITIMES=$HWRITUPTIMES # Verification output times, may change PWRITUPTIMES\n SFXSELTIMES=$HWRITUPTIMES # Surfex select file output times - Only meaningful if SURFEX_LSELECT=yes\n SWRITUPTIMES=\"00-760:12\" # Surfex model state output times\n SFXWFTIMES=$SWRITUPTIMES # SURFEX history FA file IO server gathering times\n fi\n\n ARSTRATEGY=\"climate:fg:verif:odb_stuff: \\\n [an|fc]_fa:pp_grb\" # Files to archive on ECFS, see above for syntax\n\nelse\n # EPS settings\n HH_LIST=\"00-21:3\" # Which cycles to run, replaces FCINT\n LL_LIST=\"36,3,3,3\" # Forecast lengths for the cycles [h], replaces LL, LLMAIN\n HWRITUPTIMES=\"00-06:3\" # History file output times\n FULLFAFTIMES=$HWRITUPTIMES # History FA file IO server gather times\n PWRITUPTIMES=\"00-48:1\" # Postprocessing times\n PFFULLWFTIMES=-1 # Postprocessing FA file IO server gathering times\n VERITIMES=\"00-60:3\" # Verification output times, may change PWRITUPTIMES\n SFXSELTIMES=$HWRITUPTIMES # Surfex select file output times\n # Only meaningful if SURFEX_LSELECT=yes\n SFXSWFTIMES=-1 # SURFEX select FA file IO server gathering times\n SWRITUPTIMES=\"00-06:3\" # Surfex model state output times\n SFXWFTIMES=$SWRITUPTIMES # SURFEX history FA file IO server gathering times\n\n ARSTRATEGY=\"climate:fg:verif:odb_stuff: \\\n an_fa:pp_grb\" # Files to archive on ECFS, see above for syntax\n\nfi\n","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"The writeup times of model output can be defined as a space separated list or as a fixed frequency for model history files, surfex files and postprocessed files respectively. The unit of the steps of WRITUPTIMES, SWRITUPTIMES, PWRITUPTIMES and OUTINT should be in hours or minutes depending on the TFLAG Regular output interval can be switched on by setting OUTINT>0. Consequently, OUTINT will override the WRITUPTIMES lists!","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"TFLAG: Time flag for model output. Hourly or minute-based output (h|min)\nHWRITUPTIMES: Output list for history files. Default is 00-21:3,24-60:6 which will output files every 3 hours for 00-21 and every 6 hours for 24-60.\nVERITIMES: Output list for verification files. Default is 00-60:1 which will produce file every 1 hour for 00-60\nSWRITUPTIMES Output list for surfex files. Default is 00-06:3 which output a SURFEX file every 3 hours for 00-06.\nPWRITUPTIMES Output list for fullpos (post-processed) files. Default is 00-21:3,24-60:6 which will output files every 3 hours for 00-21 and every 6 hours for 24-60.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"SURFEX_LSELECT=\"yes\" # Only write selected fields in surfex outpute files. (yes|no)\n # Check nam/surfex_selected_output.pm for details.\n # Not tested with lfi files.\nINT_SINI_FILE=$WRK/SURFXINI.fa # Surfex initial file name and location\n\n# **** Postprocessing/output ****\nIO_SERVER=yes # Use IO server (yes|no). Set the number of cores to be used\n # in your Env_submit\nIO_SERVER_BD=yes # Use IO server for reading of boundary data\nPOSTP=\"inline\" # Postprocessing by Fullpos (inline|offline|none).\n # See Setup_postp.pl for selection of fields.\n # inline: this is run inside of the forecast\n # offline: this is run in parallel to the forecast in a separate task\n\nFREQ_RESET_TEMP=3 # Reset frequency of max/min temperature values in hours, controls NRAZTS\nFREQ_RESET_GUST=1 # Reset frequency of max/min gust values in hours, controls NXGSTPERIOD\n # Set to -1 to get the same frequency _AND_ reset behaviour as for min/max temperature\n # See yomxfu.F90 for further information.\n","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"SURFEX_LSELECT: Switch to write a selection of fields in SURFEX output files (yes|no). See surfex_selected_output.pm for more info. Namelist\nINT_SINI_FILE: name and location of the initial SURFEX file\nARCHIVE_ECMWF: archive files to ECFSLOC at ECMWF (yes|no)\nIO_SERVER: Use IO server (yes|no). If set to \"yes\" changes may be required in Env_submit -> config-sh/submit.YOURHOUST\nPOSTP: Postprocessing by Fullpos (inline|offline|none).\nFREQ_RESET_[TEMP|GUST]: Reset frequency of max/min values in hours, controls NRAZTS. Default is every 3/1 hours","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** GRIB ****\nCONVERTFA=yes # Conversion of FA file to GRIB/nc (yes|no)\nARCHIVE_FORMAT=GRIB1 # Format of archive files (GRIB1|GRIB2|nc). nc format yet only available in climate mode\nNCNAMES=nwp # Nameing of NetCDF files follows (climate|nwp) convention.\nRCR_POSTP=no # Produce a subset of fields from the history file for RCR monitoring\n # Only applicable if ARCHIVE_FORMAT=GRIB\nMAKEGRIB_LISTENERS=1 # Number of parallel listeners for Makegrib\n # Only applicable if ARCHIVE_FORMAT=GRIB\n","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"More options on fullpos postprocessing can be found in scr/Select_posp.pl","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"CONVERTFA: Conversion of FA files to GRIB or NetCDF (yes|no)\nARCHIVE_FORMAT: Format of archive files (GRIB1|nc). NetCDF format yet only available in climate mode\nRCR_POSTP: Produce a subset of fields from the history file for RCR monitoring (yes|no). This is only applicable if ARCHIVE_FORMAT=GRIB1|GRIB2\nMAKEGRIB_LISTENERS: Number of parallel listeners for Makegrib. Only applicable if ARCHIVE_FORMAT=GRIB1|GRIB2","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"More options on file conversion can be found in scr/Makegrib","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Verification-and-monitoring","page":"Experiment","title":"Verification and monitoring","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Verification extraction ****\nOBSEXTR=yes # Extract observations from BUFR (yes|no)\nFLDEXTR=yes # Extract model data for verification from model files (yes|no)\nFLDEXTR_TASKS=1 # Number of parallel tasks for field extraction\nVFLDEXP=$EXP # Experiment name on vfld files\nSCREXTR=no # Use Screening (NCONF=002) to produce O-F data\nSCREXTR_TASKS=1 # Number of parallel tasks for O-F extraction\nFGREFEXP=${FGREFEXP-undef} # reference experiment name for FirstGuess\nOBREFEXP=${OBREFEXP-undef} # reference experiment name for ODBs","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"OBSEXTR: Extract observations for verification from BUFR (yes|no)\nFLDEXTR: Extract model data for verification from model files (yes|no)\n*FLDEXTR_TASKS: Number of parallel tasks for field extraction\nVFLDEXP: Change vfld file name to this string\nSCREXTR : Switch on extraction of O-F data for verification using Screening (NCONF=002)\nSCREXTR_TASKS : Number of parallel tasks for O-F extraction\nFGREFEXP=${FGREFEXP-undef} : reference experiment name for FirstGuess (useful with PLAYFILE=allobsver)\nOBREFEXP=${OBREFEXP-undef} : reference experiment name for ODBs (useful with PLAYFILE=allobsver)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Read more about the verification package here","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Field-verification","page":"Experiment","title":"Field verification","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# *** Field verification ***\nFLDVER=no # Main switch for field verification (yes|no)\nFLDVER_HOURS=\"06 12 18 24 30 36 42 48\" # Hours for field verification","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"FLDVER Main switch for field verification (yes|no). The field verification extracts some selected variables for calculation of bias, rmse, stdv and averages on the model grid.\nFLDVER_HOURS Hours for field verification","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"More options on field verification can be found in scr/Fldver and scr/AccuFldver","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Observation-monitoring-and-general-diagnostics","page":"Experiment","title":"Observation monitoring and general diagnostics","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# *** Observation monitoring ***\nOBSMONITOR=obstat # Create Observation statistics plots\n # Format: OBSMONITOR=Option1:Option2:...:OptionN\n # obstat: Daily usage maps and departures\n # no: Nothing at all\n #\n # obstat is # only active if ANAATMO != none\nOBSMON_SYNC=no # Sync obsmn sqlite tables from HOST1 (if set) to HOST0 (yes|no)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"OBSMONITOR Selection for observation statistics plots","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"obstat Observations usage. Read more here.\nno No monitoring","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Note that this is only active if ANAATMO != none","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Mail-addresses","page":"Experiment","title":"Mail addresses","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# Recipient(s) to send mails to (you@work,you@home)\nMAIL_ON_ABORT= # when a task aborts\nMAIL_TESTBED= # testbed results summary","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"MAIL_ON_ABORT e-mail address to send a mail to if a task fails in ecFlow\nMAIL_TESTBED e-mail address to send a mail to with a summary of the testbed results","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Testbed","page":"Experiment","title":"Testbed","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"export TESTBED_LIST=\"AROME AROME_1D AROME_3DVAR \\\n AROME_BD_ARO AROME_BD_ARO_IO_SERV \\\n HarmonEPS HarmonEPS_IFSENS \\\n AROME_CLIMSIM\"","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"TESTBED_LIST contains the configurations that will be run in the testbed","category":"page"},{"location":"DataAssimilation/MTEN/#Moist-Total-Energy-Norm-(MTEN)-diagnostic","page":"MTEN","title":"Moist Total Energy Norm (MTEN) diagnostic","text":"","category":"section"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"MTEN shows the sensitivity of the forecast model to different observations withdrawn from the full analysis system. There are two ways of computing the MTEN diagnostic: A special branch was created in CY40 (see below) where the MTEN diagnostic can be requested. This approach uses Harmonie ensemble system to perform series of observation denial independent runs. This means that the following settings are used in msms/harmonie.pm","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":" 'ENSBDMBR' => [ 0 ],\n 'ENSCTL' => [ '000', '001', '002', '003', '004', '005', '006', '007' ],\n 'AIRCRAFT_OBS' => [ 0, 1, 1, 1, 1, 1, 1, 1],\n 'BUOY_OBS' => [ 1, 0, 1, 1, 1, 1, 1, 1],\n 'AMSUA_OBS' => [ 1, 1, 0, 1, 1, 1, 1, 1],\n 'AMSUB_OBS' => [ 1, 1, 1, 0, 1, 1, 1, 1],\n 'POL_OBS' => [ 1, 1, 1, 1, 0, 1, 1, 1],\n 'HRW_OBS' => [ 1, 1, 1, 1, 1, 0, 1, 1],\n 'TEMP_OBS' => [ 1, 1, 1, 1, 1, 1, 0, 1],\n 'IASI_OBS' => [ 1, 1, 1, 1, 1, 1, 1, 0],","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"In this particular example, we are interested in the impact of aircraft, Buoy, amsu-a, amsu-b/mhs, polar winds, high-resolution geowinds, radiosonde, and iasi observations. This setting is activated in config.exp with the following choice:","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"export REFEXP DOMTEN\nexport SYNOP_OBS=1 # All synop\nexport AIRCRAFT_OBS=1 # AMDAR, AIREP, ACARS\nexport BUOY_OBS=1 # Buoy\nexport POL_OBS=1 # Satob polar winds\nexport GEO_OBS=0 # Satob geo winds\nexport HRW_OBS=1 # Satob HRWind\nexport TEMP_OBS=1 # TEMP, TEMPSHIP\nexport PILOT_OBS=1 # Pilot, Europrofiler\nexport SEVIRI_OBS=0 # Seviri radiances\nexport AMSUA_OBS=1 # AMSU-A\nexport AMSUB_OBS=1 # AMSU-B, MHS\nexport IASI_OBS=1 # IASI\nexport PAOB_OBS=0 # PAOB not defined everywhere\nexport SCATT_OBS=0 # Scatterometer data not defined everywhere\nexport LIMB_OBS=0 # LIMB observations, GPS Radio Occultations\nexport RADAR_OBS=0 # Radar\nexport GNSS_OBS=0 # GNSS","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"Where REFEXP is the reference experiment (see below), and DOMTEN (yes,no) is activate the MTEN choice when fetching the First-guess and the VarBC files for the MTEN computation, as follows: in /scr/Fetch_assim_data:","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"if [ ${DOMTEN} = \"yes\" ]; then\n HM_REFEXP=/sbt/harmonie/$REFEXP\n adir=${ECFSLOC}:${HM_REFEXP}/$YY/$MM/$DD/$HH\nelse\n adir=$( ArchDir $HM_EXP $YY $MM $DD $HH )\nfi","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"in scr/FirstGuess (be careful this happens twice in the script)","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"if [ ${DOMTEN} = \"yes\" ]; then\n HM_REFEXP=/sbt/harmonie/$REFEXP\n adir=${ECFSLOC}:${HM_REFEXP}/$FGYY/$FGMM/$FGDD/$FGHH\nelse\n adir=$( ArchDir $HM_EXP $FGYY $FGMM $FGDD $FGHH )\nfi","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"The MTEN can be also computed using a deterministic system. In this case, you need to take care of the First-guess and the VarBC files, which should come from the reference experiment. You need to carefully set the choice of the observations to be tested in scr/include.ass. In this case, you need to adapt the above Fetch_assim_data and FirstGuess scripts accordingly.","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"The MTEN diagnostic, similarly to DFS, is case sensitive, so it's better to male the computation with times and dates enough distant (by 5 days or more).","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"The MTEN can be computed the example below:","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":" for EXP in EXP1 EXP2;\n for RANGE in 06 12 18 24 30 36 42 48;\n do\n\n YY=`echo $DTG | cut -c 1-4`\n mm=`echo $DTG | cut -c 5-6`\n dd=`echo $DTG | cut -c 7-8`\n hh=`echo $DTG | cut -c 9-10`\n # -- Get the FA files\n # ===================\n ecp ec:/$USER/harmonie/$REFEXP/$YY/$mm/$dd/$hh/ICMSHHARM+00$RANGE ./FAREF$RANGE\n ecp ec:/$USER/harmonie/${EXP}/$YY/$mm/$dd/$hh/ICMSHHARM+00$RANGE ./${EXP}$RANGE\n $MTEN_BIN/MTEN ./FAREF$RANGE ./${EXP}$RANGE\n\n done\n done\n","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"See (Storto and Randriamampianina, 2010) for more details.","category":"page"},{"location":"DataAssimilation/Surface/CANARI_EKF_SURFEX/#canari_ekf_surfex","page":"CANARI EKF SURFEX","title":"Surface variables assimilated / read in EKF_MAIN","text":"","category":"section"},{"location":"DataAssimilation/Surface/CANARI_EKF_SURFEX/","page":"CANARI EKF SURFEX","title":"CANARI EKF SURFEX","text":"From cycle 37 EKF is implemented in research/development mode. The following tiles and variables are modified:","category":"page"},{"location":"DataAssimilation/Surface/CANARI_EKF_SURFEX/#NATURE","page":"CANARI EKF SURFEX","title":"NATURE","text":"","category":"section"},{"location":"DataAssimilation/Surface/CANARI_EKF_SURFEX/#WG2/WG1/TG2/TG1","page":"CANARI EKF SURFEX","title":"WG2/WG1/TG2/TG1","text":"","category":"section"},{"location":"DataAssimilation/Surface/CANARI_EKF_SURFEX/","page":"CANARI EKF SURFEX","title":"CANARI EKF SURFEX","text":"The uppermost two levels in ISBA of soil moisture and temperature are assimilated. With CANARI/CANARI_OI_MAIN by an OI method, by CANARI_SURFEX_EKF by an Extended Kalman Filter (EKF).","category":"page"},{"location":"DataAssimilation/Surface/CANARI_EKF_SURFEX/","page":"CANARI EKF SURFEX","title":"CANARI EKF SURFEX","text":"For 2012 it is planned to have a re-writing of OI_MAIN/EKF_MAIN to be the same binary in order to be able to apply the work done for OI_MAIN in EKF_MAIN and thus reduce the maintainance costs.","category":"page"},{"location":"DataAssimilation/StructureFunctions/#structure-functions","page":"Structure functions","title":"Derivation of Structure Functions","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/#General","page":"Structure functions","title":"General","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"For each new model domain, in order to carry out upper air data assimilation (3DVAR or 4DVAR) one needs to generate background error covariances (generally referred to as structure functions). The recommended procedure is to use a two step approach. In step one you generate background error statistics by downscaling (this is needed since you do not have have statistics for your domain setup for this forecast model version and physics options, so that you cannot run data-assimilation (unless you use statistics from old system possibly derived from a slighthly different domain and with a different model version, which is not recommended). In step 2 you then use the statistics derived in step 1 to generate the final background error statistics files by applying ensemble data assimilation within the HARMONIE-AROME modelling system.","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"In step 1 structure functions are generated from difference fields from ensemble members of HARMONIE-AROME forecast. These are obtained from downscaling of ECMWF EDA ensemble forecast. To alleviate spin-up issues, these phase 1 downscaled HARMONIE-AROME forecasts are run up to 6 hours, and differences are generated from these. Using the ECMWF LBC data, 6h HARMONIE ensemble forecasts are initiated from ECMWF 6h forecasts daily from 00 UTC and 12 UTC, with ECMWF forecasts as initial and lateral boundary conditions. To obtain stable statistics, it is recomended to run 4 ensembles for two chosen one-month episode (s). The episodes should sample different seasons. Therefore it is recommended to run for one winter month and one summer month, for example June 2016 and January 2017. These periods are chosen so as to benefit from the latest upgrade to ECMWF's EDA system. Thereby we sample both seasonal (January, July) and daily (00 UTC and 12 UTC) variations. After running of the ensembles the archived results (6h forecasts) are processed to generate structure functions by running a program called 'festat'. Festat will be run automatically within the SMS system when DTGEND is approached by an experiment and the statistics will be based on difference files generated by intermediate program femars and stored on ecfs in ec:/$uid/harmonie/$exp/femars (software to generate binary GRIB files of forecasts differences after each cycle). This will mean that if you start by running a one month experiment for January the structure functions generated when you reach DTGEND will be for January. When you use the same experiment name and launch also an experiment for July you will when you reach DTGEND have background error statistics based on both January and July differences files (since both of those are now found in ec:/$uid/harmonie/$exp/femars). These combined winter/summer background error statistics files from phase one are final product from step 1 and can are the intermediate background error statistics files to plug into the HARMONIE-AROME data assimilation of step 2. It should be mentioned that there is a possibility for the more advanced user to run festat off-line and with any combinations of January-July forecast difference files from ec:/$uid/harmonie/$exp/femars. That will be described in ore detail further below and is something you might want to do with forecasts difference files generated from step 2 to produce monthly background error statistics files by combining in different ways.","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"In step 2 we run again two one-month ensemble experiments for the same Januar and July months again utilizing ECMWF EDA forecasts as lateral boundary conditions. Again you use 4 ensemble members. The important difference as compared to step 1 is that you now carry out ensemble-data assimilation also within the HARMONIE-AROME framework. You use the background error statistics from phase 1 and do the eda within a data assimilation cycle. This has the important advantage that you significantly reduce spinup caused by the HARMONIE-AROME model adjustments to ECMWF EDA starting initial states. Because of this we can in step 2 derive the statistics from +3h forecast difference (rather than +6 that is used in step 1). ","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"Note that there are methods to circumvent step1 and to technically run 3/4DVAR using structure functions derived from another HARMONIE model domain. Such existing methods include aspects such as horizontal truncation or extrapolation of horizontal spectra and possibly vertical interpolation in between vertical level geometries. Since the recommended procedure is to use the two stp approach described above these alternative methods are not described in detail. Furthermore it should be noted that there are background error covariance related tuning coefficients REDNMC and REDZONE. Settings of values of these ae not covered here. If you have a new domain you will use the default value 0.6 for REDNMC and 100 for REDZONE which are considered apropiate values for the derivation of structure functions. If you re-derive your statistics for an existing domain you will use the REDNMC and REDZONE values as assigned in scr/include.ass. ","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"There are various existing tools for investigating your newly derived structure functions and at the end of this page there are some documentation of existing tools and how to use them. ","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"The procedure for generating structure functions from an ensemble of forecasts is described below for a AROME setup with 2.5 km horizontal resolution and 65 vertical levels. The experiment is run for a one mont winter-period of followed by a one month summer-period on the ECMWF computing system. Forecast differences are derived twice a day (00 forecasts from 12 UTC) from combinations of the four ensemble members. Besides the scientific recommendation to cover many different weather situations there is as well a matemathical constraint that the number of forecast difference files provided to festat needs to be larger than the number of vertical levels used in the forecast model integration. In the section below detailed instructions on how to generate the structure functions are given. The other sections deals with how to diagnose the structure functions recent and ongoing work and future development plans.","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"It is recommended for future coming enhancements regarding handling of B statistics and diagnostics of it to save all generated forecast difference files as well as stabal.cv, stabal.cvt and stabal.bal and generated .xy and .y files (.cvt .xy and .y for diagnotical puroposes):","category":"page"},{"location":"DataAssimilation/StructureFunctions/#Generating-background-error-statistics-(using-43h2.2)","page":"Structure functions","title":"Generating background error statistics (using 43h2.2)","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"The following instructions are valid for trunk and any 43h2.2 tags that have been created. These instructions will only work at ECMWF. If you do have a new domain (or are not sure) you should follow that route in step 1 below. New domain creation is described in ModelDomain which links to the useful Domain Creation Tool ","category":"page"},{"location":"DataAssimilation/StructureFunctions/#STEP-1-Downscaling","page":"Structure functions","title":"STEP 1 Downscaling","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"Create a new experiment on ECMWF:\nIn case you do have an existing domain setup do:\nmkdir -p $HOME/hm_home/jbdownexp\ncd $HOME/hm_home/jbdownexp\n~hlam/Harmonie setup -c JBDSC -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1 -d DOMAIN # where domain is the name of your domain\nIn case you are creating structure functions for a new domain (or you are not sure):\nmkdir -p $HOME/hm_home/jbdownexp\ncd $HOME/hm_home/jbdownexp\n~hlam/Harmonie setup -c JBDSC -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1\n~hlam/Harmonie co scr/Harmonie_domains.pm\nThen edit scr/Harmonie_domains.pm and add your new domain definition.\nThe ensemble that will be used to generate the structure functions needs to be defined in suites/harmonie.pm. An edited ensemble configuration file should define a four member ensemble that only varies the boundary memeber input (ENSBDMBR) as follows:\n%env = (\n# 'ANAATMO' => { 0 => '3DVAR' },\n# 'HWRITUPTIMES' => { 0 => '00-21:3,24-60:6' },\n# 'SWRITUPTIMES' => { 0 => '00-06:3' },\n# 'HH_LIST' => { 0 => '00-21:3' },\n# 'LL_LIST' => { 0 => '36,3' },\n# 'LSMIXBC' => { 0 => 'no' },\n# 'ANASURF' => { 0 => 'CANARI_OI_MAIN' },\n 'ENSCTL' => [ '001', '002', '003', '004'],\n# 'OBSMONITOR' => [ 'obstat'],\n# SLAFLAG: Forecast length to pick your perturbation end point from\n# SLAFDIFF: Hours difference to pick your perturbation start point from\n# SLAFLAG=24, SLAFDIFF=6 will use +24 - +18\n# SLAFDIFF=SLAFLAG will retain the original SLAF construction\n# SLAFK should be tuned so that all members have the same perturbation size\n 'ENSBDMBR' => [ 1,2,3,4],\n# 'SLAFLAG' => [ 0, 6, 6, 12, 12, 18, 18, 24, 24, 30, 30],\n# 'SLAFDIFF' => [ 0, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6],\n# 'SLAFK' => ['0.0','1.75','-1.75','1.5','-1.5','1.2','-1.2','1.0','-1.0','0.9','-0.9'],\n# When using ECMWF ENS the members should be defined\n# # 'ENSBDMBR' => [ 0, 1..10],\n\n### Normally NO NEED to change the settings below\nRun for two one-month (30 day) periods:\ncd $HOME/hm_home/jbdownexp\n~hlam/Harmonie start DTG=2016060100 DTGEND=2016070100\n#\n#~hlam/Harmonie start DTG=2017010100 DTGEND=2017013100\nGenerate the statistics using festat standalone:\nPlace yourself at $TEMP on ECMWF\nCopy Festat.standalone to $TEMP on ECMWF\nEdit the script to reflect your user and experiment details (in particular copy femars data ec:/$uid/harmonie /jbdownexp/femars/ to your femars-directory on $TEMP)\nsubmit with\nqsub ./Festat.standalone\nyou will get a log-file festat.log on $TEMP and results in directory festat_wrk. when the program has finished do:\ncd festat_wrk\nemkdir ec:/$uid/jbdata\ngzip stab_your_exp.cv\ngzip stab_your_exp.bal\necp stab_your_exp.cv.gz ec:/$uid/jbdata/. (with your own filename and directory)\necp stab_your_exp.bal.gz ec:/$uid/jbdata/. (with your own filename and directory)\n(also create a tar.file with all *.xy *.y *.cv, *.bal and *.cvt and put on ecfs for future diagnostical purposes) ","category":"page"},{"location":"DataAssimilation/StructureFunctions/#STEP-2-Generating-background-error-statistics-with-EDA-cycling-(instructions-under-testing)","page":"Structure functions","title":"STEP 2 Generating background error statistics with EDA cycling (instructions under testing)","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"Create a new experiment on ECMWF:\nIn case you do have an existing domain setup do:\nmkdir -p $HOME/hm_home/jbedaexp\ncd $HOME/hm_home/jbedaexp\n~hlam/Harmonie setup -c AROME_JBEDA -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1 -d DOMAIN # where domain is the name of your domain\nIn case you are creating structure functions for a new domain (or you are not sure):\nmkdir -p $HOME/hm_home/jbedanexp\ncd $HOME/hm_home/jbedaexp\n~hlam/Harmonie setup -c AROME_JBEDA -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1\n~hlam/Harmonie co scr/Harmonie_domains.pm\nThen edit scr/Harmonie_domains.pm and add your new domain definition.\nThe ensemble that will be used to generate the structure functions needs to be defined in suites/harmonie.pm. An edited ensemble configuration file should define a four member ensemble that only varies the boundary memeber input (ENSBDMBR) as follows:\n%env = (\n# 'ANAATMO' => { 0 => '3DVAR' },\n# 'HWRITUPTIMES' => { 0 => '00-21:3,24-60:6' },\n# 'SWRITUPTIMES' => { 0 => '00-06:3' },\n# 'HH_LIST' => { 0 => '00-21:3' },\n# 'LL_LIST' => { 0 => '36,3' },\n# 'LSMIXBC' => { 0 => 'no' },\n# 'ANASURF' => { 0 => 'CANARI_OI_MAIN' },\n 'ENSCTL' => [ '001', '002', '003', '004'],\n# 'OBSMONITOR' => [ 'obstat'], \n# SLAFLAG: Forecast length to pick your perturbation end point from\n# SLAFDIFF: Hours difference to pick your perturbation start point from\n# SLAFLAG=24, SLAFDIFF=6 will use +24 - +18\n# SLAFDIFF=SLAFLAG will retain the original SLAF construction\n# SLAFK should be tuned so that all members have the same perturbation size\n 'ENSBDMBR' => [ 1,2,3,4],\n# 'SLAFLAG' => [ 0, 6, 6, 12, 12, 18, 18, 24, 24, 30, 30],\n# 'SLAFDIFF' => [ 0, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6],\n# 'SLAFK' => ['0.0','1.75','-1.75','1.5','-1.5','1.2','-1.2','1.0','-1.0','0.9','-0.9'],\n# When using ECMWF ENS the members should be defined\n# # 'ENSBDMBR' => [ 0, 1..10],\n\n### Normally NO NEED to change the settings below\nLink to your newly generated Jb statistics from STEP1 :\nEdit in $HOME/hm_home/jbedaexp/scr/include.ass as follows (example for DOMAIN=METCOOP25D): In the section for your relevant domain point to the structure function stored in STEP one as follows:\n elif [ \"$DOMAIN\" = YOUR DOMAIN]; then\n JBDIR=${JBDIR-\"ec:/hirlam/harmonie_jbdata\"}\n JBDIR=ec:/$uid/jbdata\n f_JBCV=stabfiltn_your_exp.cv_jbconv.cv (without .gz)\n f_JBBAL=stabfiltn_your_exp.bal_jbconv.bal (without.gz)\nRun for two one-month (30 day) periods:\ncd $HOME/hm_home/jbedaexp\n~hlam/Harmonie start DTG=2016060100 DTGEND=2016070100\n#\n#~hlam/Harmonie start DTG=2017010100 DTGEND=2017013100\nGenerate the statistics using festat standalone: \nPlace yourself at $TEMP on ECMWF\nCopy Festat.standalone to $TEMP at ECMWF\nEdit the script to reflect your user and experiment details (in particular copy femars data ec:/$uid/harmonie/jbdownexp/femars/ to femars-directory on $TEMP)\nMake sure you have removed old femars_wrk directory and only have forecast differences from you EDA experiment in your femars directory As well preferably name files differently than in STEP 1 downscaling. ","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":" submit with \n\n ```bash\n qsub ./Festat.standalone\n ```\n \n you will get a log-file `festat.log` on `$TEMP` and results in directory `festat_wrk`\n when the program has finished do:\n \n ```bash\n cd festat_wrk\n emkdir ec:/smx/jbdata (with smx replaced with your own user id) \n gzip stab_your_eda_exp.cv\n gzip stab_your_eda_exp.bal\n ecp stab_your_eda_exp.cv.gz ec:/smx/jbdata/. (with your own filename and directory)\n ecp stab_your_eda_exp.bal.gz ec:/smx/jbdata/. (with your own filename and directory)\n ```\n\n also create a tar-file with all `*.xy`, `*.y`, `*.cv`, `*.bal` and `*.cvt` and put on ecfs for future diagnostical purposes) These new files are you final background error statistics to be diagnosed (compared with STEP 1 ones perhaps) and inserted to your data assimilation by modyfying `include.ass` (as in bullet 3 above) to point to your new files.","category":"page"},{"location":"DataAssimilation/StructureFunctions/#Diagnosis-of-background-error-statistics","page":"Structure functions","title":"Diagnosis of background error statistics","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"Diagnosis of background error statistics is a rather complicated task. To get an idea of what the correlations and covariances should look like take a look in the article: Berre, L., 2000: Estimation of synoptic and meso scale forecast error covariances in a limited area model. Mon. Wea. Rev., 128, 644-667. Software for investigating and graphically illustrate different aspects of the background error statistics has been developed and statistics generated for different domains has been investigated using the AccordDaTools package. With this software you can also compare your newly generated background error statistics with the one generated for other HARMONIE domains. This will give you and idea if your statistics seems reasonable. For diagnosing the newly derived background error statistics follow these instructions:\nGet the code and scripts:\nDownload and install AccordDaTools following instructions in the README\nDon't forget to add the package tools directory to your PATH: \nexport PATH=/path/to/da_tools:$PATH\nRun Jb diagnostics script:\nFor example for a new domain using horizontal grid-spacing of 2500 m and (Harmonie) 65 vertical levels:\njbdiagnose -b jb_data/stab_IRELAND25_064_480.bal -c jb_data/stab_IRELAND25_064_480.cv -g 2500 -l harmL65 -e jbdiag_IRELAND25_064\nThe output will be made written to jbdiag_IRELAND25_064","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"The AccordDaTools package also provides two tools for plotting the data produced by jbdiagnose, plotjbbal and plotjbdiag. plotjbbal plots Jb balances for different parameters. plotjbdiag produces spectral density (spdens) and vertical correlation (vercor) diagnostic plots for your structure funtions. For example:\nplotjbbal:\nplotjbbal -t stdv -p QQ -r jbdiag_ -e IRELAND25_064\nplotjbdiag:\nplotjbdiag -l 50 -t vercor -p QQ -r jbdiag_ -e IRELAND25_064","category":"page"},{"location":"DataAssimilation/StructureFunctions/#Run-3DVAR/4DVAR-with-the-new-background-error-statistics","page":"Structure functions","title":"Run 3DVAR/4DVAR with the new background error statistics","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"create hm_home/jb_da. Then cd $HOME/hm_home/jb_da.\ncreate experiment by typing\n~hlam/Harmonie setup -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1\nIn scr/include.ass set JBDIR=ec:/$uid/jbdata (uid being your userid, in this example 'ec:/smx/jbdata') and f_JBCV is name of your .cv file in ec:/$uid/jbdata (without .gz) and f_JBBAL is 'name of your .bal file in ec:/$uid/jbdata (without .gz) (in this example, f_JBCV=stab_METCOOPD_65_20200601_360.cv, stab_METCOOPD_65_20200601_360.bal). Add these three lines instead of the three lines in include.ass that follows right after the elif statement: elif [ \"$DOMAIN\" = METCOOP25D]; then. If domain is other than METCOOP25D one has to look for the alternative name of the domain. \nFrom $HOME/hm_home/jb_da launch experiment by typing\n~hlam/Harmonie start DTG=2021010100 DTGEND=2021010103\nThe resulting analysis file be found under $TEMP/hm_home/jb_da/archive/2021/01/01/03 and it will be called MXMIN1999+0000 and on and ec:/$uid/harmonie/2021/01/01/03. To diagnose the 3D-VAR analysis increments of the jb_da-experiment, copy the files MXMIN1999+0000 (analysis) and ICMSHHARM+0003 (fg) to $SCRATCH. The first guess (background) file can be found on $TEMP/hm_home/jb_da/archive/2021/01/01/00 and ec:/$uid/harmonie/jb_da/2021/01/01/00. Convert from FA-file format to GRIB with the gl-software ($SCRATCH/hm_home/jb_da/bin/gl) by typing ./gl -p MXMIN1999+0000 and ./gl -p ICMSHANAL+0000. Then plot the difference between files file with your favorite software. Plot horizontal and vertical cross-sections of temperature and other variables using your favourite software (epygram for example).\nNow you have managed to insert the newly generated background error statistics to the assimilation system and managed to carry out a full scale data assimilation system and plot the analysis increments. The next natural step to further diagnose the background error statistics is to carry out a single observation impact experiment, utilizing your newly generated background error statistics. Note the variables REDNMC and REDZONE in include.ass. REDNMC is the scaling factor for the background error statistics (default value 0.6/0.9) for METCOOP25D/NEW_DOMAIN). REDZONE described how far from the lateral boundaries (in km) the observations need to be located to be assimilated (default value 150/100) for METCOOP25D/NEW_DOMAIN.","category":"page"},{"location":"DataAssimilation/StructureFunctions/#In-line-Interpolation-and-Extrapolation-of-Jb-statistics","page":"Structure functions","title":"In-line Interpolation and Extrapolation of Jb-statistics","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"In case you do not have existing background error statistics derived for your domain there is a built technical possibility to use Jb-files from another domain derived with the same number of vertical levels. From this host Jb-files background error statistics are then interpolated or extrapolated to the current domain configuration. The assumption is then (which is in general questionable) that the statistics derived derived on the host domain is as well valid for the current domain. If the longest side of the host domain is shorter than the longest side of the current domain an extrapolation of background error covariance spectra is needed. Such extrapolation should be avoided over a wide range of wavenumbers. Therefore it is recommended that the longest side of the host Jb-file is as long or longer than the longest side of the current domain.The interpolation is invoked by in ecf/config_exp.h set JB_INTERPOL=yeś and JB_REF_DOMAIN=$HOST_JB, where $HOST_JB is for example METCOOP25B. These settings will activate runnning of script jbconv.sh (in case no Jb files present for current domain), called from Fetch_assim_data. ","category":"page"},{"location":"DataAssimilation/StructureFunctions/#On-going-work-and-future-developments","page":"Structure functions","title":"On-going work & future developments","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"Recent and on-going work as well as plans for future developments:","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"Ongoing-work regarding structure functions concerns investigations of effects on B statistics and data assimilation of the upper-level relaxation towards ecmwf at upper boundary condition through LUNBC=.true. Longer term research is towards flow dependent background error statistics and close link between the data assimilation and the ensemble forecasting system. Plans for future work also include adopting towards use of cy46 Festat.standalone, reading FA-files rather than femars-files. Here is a newly developed stand-alone tool for interpolation in of Jb-statistics as well between different vertical levels (not recommended) not yet publicly available and documented. Finally it should be mentioned that there are alternative methods to EDA for carrying out STEP 2 of teh background error statistics derivation. Such alternatives are BRAND and BREND and these have been tested and compared with EDA in various contexts, such as in reanalysis frameworks. The conclusion is that there are both pros and cons with BRAND as compared with EDA. The main conclusion is that both EDAand BRAND are hampered by the homogeneity and isotrophy assumptions in 3DVAR/4DVAR framework, so that differences are smaller than in hybrid DA frameworks. Therefore continued EDA/BRAND comparisons are carried out withing hybrid ensemble/da frameworks. Nevertherless we aim here to include as well instructions for optionally replacing STEP 2 EDA in procedure above with STEP 2 BRAND. As well we aim for introducing instructions for using extended complementary diagnosis tools for Jb statistics using fediacov tool and associated plotting scripts. Such tools do exist, but not yet publicly available and documented ","category":"page"},{"location":"DataAssimilation/StructureFunctions/#References","page":"Structure functions","title":"References","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"festat_guidelines, Ryad El Katib, Meteo France, 2014\nfestatforfa_guidelines, Ryad El Katib, Meteo France, 2016","category":"page"},{"location":"PostProcessing/gl/#gl","page":"GL","title":"Post processing with gl","text":"","category":"section"},{"location":"PostProcessing/gl/#Introduction","page":"GL","title":"Introduction","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl ( as in griblist ) is a multi purpose tool for file manipulation and conversion. It uses ECMWF's ecCodes library, and can be compiled with and without support for HARMONIE FA/LFI or NETCDF files. The gl package also includes software for extraction for verification, fldextr, and field comparison, xtool.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" USAGE: gl file [-n namelist_file] [-o output_file] -[lfgmicp(nc)sdtq] [-lbc CONF]\n\n gl [-f] file, list the content of a file, -f for FA/lfi files \n -c : Convert a FA/lfi file to grib ( -f implicit ) \n -p : Convert a FA file to grib output without extension zone\n (-c and -f implicit ) \n -nc : Convert a FA/lfi file to NetCDF ( -f implicit ) \n -musc : Convert a MUSC FA file ASCII ( -c implicit ) \n -lbc ARG : Convert a CONF file to HARMONIE input \n where CONF is ifs or hir as in ECMWF/HIRLAM data \n climate_aladin assumed available \n -d : Together with -lbc it gives a (bogus) NH boundary file \n climate_aladin assumed available \n -s : Work as silent as possible \n -g : Prints ksec/cadre/lfi info \n -m : Prints min,mean,max of the fields \n -i : Prints the namelist options (useless) \n -tp : Prints the GRIB parameter usage \n -t : Prints the FA/lfi/GRIB table (useful) \n -wa : Prints the atmosphere FA/NETCDF/GRIB table in wiki fmt \n -ws : Prints the surfex FA/NETCDF/GRIB table in wiki fmt \n -q : Cross check the FA/lfi/GRIB table (try) \n -pl X : Give polster_projlat in degrees \n\n gl file -n namelist_file : interpolates file according to \n namelist_file \n gl -n namelist_file : creates an empty domain according to \n specifications in namelist_file \n -igd : Set lignore_duplicates=T \n -igs : Set lignore_shortname=T. Use indicatorOfParameter \n instead of shortName for selection \n","category":"page"},{"location":"PostProcessing/gl/#ecCodes-definition-tables","page":"GL","title":"ecCodes definition tables","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Since ecCodes has replaced grib_api as the ECMWF primary software package to handle GRIB, we will hereafter only refer to ecCodes but same similar settings applies for grib_api as well. With the change to ecCodes we heavily rely on the shortName key for identification. To get the correct connection between the shortnames and the GRIB1/GRIB2 identifiers we have defined specific tables for harmonie. These tables can be found in /util/gl/definitions. To use these tables you have to define the ECCODES_DEFINITION_PATH environment variable as ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"export ECCODES_DEFINITION_PATH=SOME_PATH/gl/definitions:PATH_TO_YOUR_ECCODES_INSTALLATION","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"If this is not set correctly the interpretation of the fields may be wrong.","category":"page"},{"location":"PostProcessing/gl/#GRIB/FA/LFI-file-listing","page":"GL","title":"GRIB/FA/LFI file listing","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Listing of GRIB/ASIMOF/FA/LFI files.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" gl [-l] [-f] [-m] [-g] FILE","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"where FILE is in GRIB/ASIMOF/FA/LFI format","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Option Description\n-l input format is LFI\n-f input format is FA\n -l and -f are equivalent\n-g print GRIB/FA/LFI header\n-m print min/mean/max values","category":"page"},{"location":"PostProcessing/gl/#GRIB/FA/LFI-file-conversion","page":"GL","title":"GRIB/FA/LFI file conversion","text":"","category":"section"},{"location":"PostProcessing/gl/#Output-to-GRIB1","page":"GL","title":"Output to GRIB1","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl [-c] [-p] FILE [ -o OUTPUT_FILE] [ -n NAMELIST_FILE]","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"where ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" \n-c converts the full field (including extension zone) from FA to GRIB1\n-p converts field excluding the extension zone (\"p\" as in physical domain) from FA to GRIB1","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The FA/LFI to GRIB mapping is done in a table defined by a util/gl/inc/trans_tab.h","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"To view the table:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl -t\ngl -tp","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"To check for duplicates in the table:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl -q","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The translation from FA/LFI to GRIB1 can be changed through a namelist like this one:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" &naminterp\n user_trans%full_name ='CLSTEMPERATURE',\n user_trans%t2v = 253,\n user_trans%pid = 123,\n user_trans%levtype = 'heigthAboveGround',\n user_trans%level = 002,\n user_trans%tri = 000,\n /","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"or for the case where the level number is included in the FA name","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" &naminterp\n user_trans%full_name='SNNNEZDIAG01',\n user_trans%cpar='S'\n user_trans%ctyp='EZDIAG01',\n ...\n /","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Conversion can be refined to convert a selection of fields. Below is and example that will write out ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"T (shortname='t',pid=011), u (shortname='u',pid=033) andv (shortname='v',pid=034) on all (level=-1) model levels (levtype='hybrid')\nT (shortname='t',pid=011) at 2m (lll=2) above the ground (levtype='heightAboveGround') [T2m]\nTotal precipitation (shortname='tp',pid=061,levtype='heightAboveGround',level=000)","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" &naminterp\n readkey%shortname= 't', 'u', 'v', 't', 'tp', 'fg',\n readkey%levtype='hybrid','hybrid','hybrid','heightAboveGround','heightAboveGround','heightAboveGround',\n readkey%level= -1, -1, -1, 2, 0, 10,\n readkey%tri = 0, 0, 0, 0, 4, 2,\n /","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"where ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"shortname is the ecCodes shortname of the parameter \nlevtype is the ecCodes level type\nlevel is the GRIB level\ntri means timeRangeIndicator and is set to distinguish between instantaneous, accumulated and min/max values.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The first three ones are well known to most users. The time range indicator is used in HARMONIE to distinguish between instantaneous and accumulated fields. Read more about the options here Note that for levtype hybrid setting level=-1 means all. ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"We can also pick variables using their FA/lfi name:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" &naminterp\n readkey%faname = 'SPECSURFGEOP','SNNNTEMPERATURE',\n /","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Where SNNNTEMPERATURE means that we picks all levels.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Fields can be excluded from the conversion by name","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" &naminterp\n exclkey%faname = 'SNNNTEMPERATURE'\n /","category":"page"},{"location":"PostProcessing/gl/#Output-to-GRIB2","page":"GL","title":"Output to GRIB2","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"To get GRIB2 files the format has to be set in the namelist as ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" &naminterp\n output_format = 'GRIB2'\n /","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The conversion from FA to GRIB2 is done in gl via the ecCodes tables. All translations are defined in util/gl/scr/harmonie_grib1_2_grib2.pm where we find all settings required to specify a parameter in GRIB1 and GRIB2.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"\n tmax => {\n editionNumber=> '2',\n comment=> 'Maximum temperature',\n discipline=> '0',\n indicatorOfParameter=> '15',\n paramId=> '253015',\n parameterCategory=> '0',\n parameterNumber=> '0',\n shortName=> 'tmax',\n table2Version=> '253',\n typeOfStatisticalProcessing=> '2',\n units=> 'K',\n },\n","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"To create ecCodes tables from this file run","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" cd gl/scr\n ./gen_tables.pl harmonie_grib1_2_grib2","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"and copy the grib1/grib2 directories to gl/definitions.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Note that there are no GRIB2 transations yet defined for the SURFEX fields!","category":"page"},{"location":"PostProcessing/gl/#Output-to-NetCDF","page":"GL","title":"Output to NetCDF","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl -nc [-p] FILE [ -o OUTPUT_FILE] [ -n NAMELIST_FILE]","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"where ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" \n-p converts field excluding the extension zone (\"p\" as in physical domain) from FA to NetCDF\n-o output file name\n-n namelist file to be used in conversion","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The FA/LFI to NetCDF mapping is done using tables defined by util/gl/inc/trans_tab.h and util/gl/inc/nc_tab.h","category":"page"},{"location":"PostProcessing/gl/#Namelist-options-for-NetCDF","page":"GL","title":"Namelist options for NetCDF","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The translation from FA/LFI to NetCDF can be changed through a namelist like this one:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" &naminterp\n user_nctrans%full_name ='SFX.SIC',\n user_nctrans%s_name = \"\" \n user_nctrans%l_name = \"Sea-Ice Area Percentage (Atmospheric Grid)\",\n user_nctrans%unit = \"%\" ,\n /","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The unit entry can be used to do a limited set of unit conversions, in the example above SIC will be converted from the original units (fraction) to a percentage.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Other specific naminterp options for converting to netcdf:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"variable description default\nlacc2flux T: Convert accumulated fields (tri=4) to fluxes by dividing by the length of the interval FF-F0. Names, units and tri are adapted. .FALSE.\nlmergelevs T: write all levels of a variable to the same file; F: each level in a separate file .FALSE.\nlclimate_fields T: don't add a time dimension and variable, useful for climate fields like land mask .FALSE.\nlvertices T: add vertices (corner points) to netcdf file, only possible for newly created files .FALSE.\nlhistory T: add history global attribute to netcdf file .FALSE.\nikindnc NetCDF version (3 4), 3: larger files, but faster; 4: compressed, but slow\nref_date Reference date, used to generate relative time axis 19500101\nref_hour Reference hour, used to generate relative time axis 0\nctimeis time refers to \"start\", \"middle\", or \"end\" of interval for non-instantaneous fields. If writing several variables to 1 file, that don't have the same timing (e.g. accumulated vs. instantaneous), then \"end\" is probably the only safe option! end\ncsep separator in derived netcdf file name _\ncdatefname format for date in derived netcdf file name, if not recognized as format, use whatever is passed YYYYMMDDHH\ncfiden used in derived netcdf file name to indicate origin (e.g. his, sfx, fp) \ncfreq used in derived netcdf file name and as \"frequency\" global attribute (e.g. 1hr, 3hr, day, mon) \nchm_rev HARMONIE version, used as \"model_id\" global attribute \ncdomain domain name, used in derived netcdf file name and as \"domain\" global attribute \ncexperiment experiment id, used in derived netcdf file name and as \"experiment_id\" global attribute \ncinstitute used as \"institute_id\" global attribute \nchostmod used as \"drivingmodelid\" global attribute ","category":"page"},{"location":"PostProcessing/gl/#Setting-fstart-for-min-max-fields","page":"GL","title":"Setting fstart for min-max fields","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Min and max fields, with tri=2 (time range indicator) are valid for a certain period. By default the period is 3h, but this can be changed via variables FREQ_RESET_TEMP and FREQ_RESET_GUST in ecf/config_exp.h, for example to 1 to store min/max temperature over an hour. By default gl doesn’t have info on this frequency and it is assumed they are valid since the start of the run. Use the namelist option fstart to assign the appropriate starting value, e.g.:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"fstart(15) = 3","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"for max t2m (parameter code 15). The fstart value is then used in the time_bnds. This value needs to be updated with FREQ_RESET. In Makegrib_gribex and convertFA there are examples of how to do this. Note that this works in the same way for NetCDF and GRIB.","category":"page"},{"location":"PostProcessing/gl/#Derived-file-name","page":"GL","title":"Derived file name","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"If no output file name is supplied (-o flag) an output file name is derived from available info:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"nc_fld_name[_levinfo][_cfiden][_cdomain][_cexperiment][_cfreq][_timeinfo].nc","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"with: | | | | –- | –- | | ncfldname | name of netcdf variable as defined in nctab.h | | levinfo | indication of level info, with *lev if all levels are written to the same file (lsplitlev), or a level number/height otherwise | | cfiden | identifier of input file (e.g. his, sfx), set via namelist | | cdomain | domain name, set via namelist | | cexperiment | experiment name, set via namelist | | cfreq | frequency, set via namelist | | timeinfo | indicator of file date/time, format controlled via cdatefname namelist variable, not used if lclimatefields |","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"If elements are not set, either via the namelist or by a default value, they are excluded from the name. The separator is a _ by default but can be changed via the csep namelist variable. ","category":"page"},{"location":"PostProcessing/gl/#Time-axis","page":"GL","title":"Time axis","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"A relative time axis (days since …) is created using the refdate and refhour as reference. In existing files the current time step is looked up, and if it already exists it is overwritten. If it doesn’t exist yet, it is appended to the end of the file. Note that if time steps are not converted in order the time axis will not be consecutive. For non-instantaneous fields a time bounds variable is added. The start of the interval is taken from fstart (from tri=2) or outkey%F0, or just from the input file (usually 0). The end of the interval is the current time step. Whether the time variables refers to the beginning, middle or end of the interval can be controlled with the ctimeis namelist variable. If instantaneous and non-instantaneous files are written to the same file, it is best to use ctimeis=end. Start may also work, but this should be tested first. With the namelist variable cdatefname you can write output from multiple cycles to the same file. For example by setting it to YYYYMM the derived file name will contain year and month info, but not day and hour, so all cycles from a month are written to the same file. Be careful with the first time step of a cycle when using cdatefname as gl will overwrite the last time step of the previous cycle with those of the first step of the new cycle. You can decide to skip the first time step, or multiple steps, if cycles overlap more than 1 step. ","category":"page"},{"location":"PostProcessing/gl/#Multilevel-fields-and-fields-on-heights-or-pressure-levels","page":"GL","title":"Multilevel fields and fields on heights or pressure levels","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"All levels of multilevel fields can be written to one file (lmergelevs=.TRUE.) or to separate files (default). This is possible for model levels, pressure levels and height levels.","category":"page"},{"location":"PostProcessing/gl/#All-levels-in-one-file","page":"GL","title":"All levels in one file","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"If all levels are written to the same file and additional dimension is needed: lev for model levels, height for height levels. For model level fields, named SNNN, the number of levels is derived from the input file (glistnlev). For height level fields, HNNNNN, currently the heights must be set via the hlevlist namelist variable. The heights in this list are used to expand the HNNNNN (to H00010, H00250 etc) and are also used as coordinate variable. For pressure levels PNNNNN is used in the same way.","category":"page"},{"location":"PostProcessing/gl/#Single-level-fields-on-height","page":"GL","title":"Single level fields on height","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"For single level fields on a specific height such as 2m temperature and 10m wind a height variable is added with that height. This is done if level type = 105 and level ≠ 0. This may not be appropriate in all cases. Note that for some fields level is abused (e.g. level 760 for the sea tile), which gives useless height. The same approach is used when outputting multilevel fields with lsplitlev=.TRUE. (default).","category":"page"},{"location":"PostProcessing/gl/#Don’t-mix-fields","page":"GL","title":"Don’t mix fields","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"There is a check that all fields on height levels in one file have the same height specification, because only 1 height variable can be specified at the moment. So t2m and w10m cannot be in the same file. Not sure if the check is foolproof. It may be possible to define multiple heights in the code, e.g. height, height2 etc., but this has not been implemented yet. ","category":"page"},{"location":"PostProcessing/gl/#NetCDF-3-or-NetCDF-4","page":"GL","title":"NetCDF 3 or NetCDF 4","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"With the ikindnc namelist options the netcdf format can be set. NetCDF 4 files are compressed in gl, however, this makes the conversion much slower. At the moment it seems better to let gl use the NetCDF 3 format and then convert them to NetCDF 4 after creation of the file has finished. This can be done with the following command:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"nccopy -k 4 -d 1 -s $nc3_file $nc4_file","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"where:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" \n-d deflate level\n-s shuffling (can improve compression, speed and size)","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"In scr/convertFA this can be done by setting nc3to4=yes (default). At the end of the script the files are then converted from netcdf3 to netcdf4-classic with compression.","category":"page"},{"location":"PostProcessing/gl/#Direction-of-fluxes","page":"GL","title":"Direction of fluxes","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"A new element positive was added to the nctrans derived type in moduletypez, and in nctab.h positive, can be empty, 'd' or 'u'. If it is not empty a positive attribute is added to the variable in the NetCDF file. If it has value 'u', the values of the variable are multiplied by −1 to change the direction from towards the surface to away from the surface.","category":"page"},{"location":"PostProcessing/gl/#Fill-value","page":"GL","title":"Fill value","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"By default no missing value is added for atmospheric fields. For SURFEX fields either 999 (version ≤ 4) or 1+e20 (version ≥ 5) is used. It is possible to add a missing value via the namelist. To do so, in the namelist set variable lcheck_misval to .TRUE. and set rmisval to the correct value.","category":"page"},{"location":"PostProcessing/gl/#Adding-new-netcdf-variables","page":"GL","title":"Adding new netcdf variables","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"If you get messages like:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"No NETCDF conversion for ....","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"then you need to add the field to util/gl/inc/nc_tab.h, which contains the translation from FA to netcdf names. The file util/gl/inc/trans_tab.h contains the conversion to FA names to GRIB codes. If the field you would like to is absent there, it is probably best to add it in that file as well, as for example GRIB level types are used for functionality in the netcdf conversion as well. Remember to recompile.","category":"page"},{"location":"PostProcessing/gl/#postprocessing","page":"GL","title":"postprocessing","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl can be used to produce postprocessed parameters possibly not available directly from the model. ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Postprocessed parameters are defined in util/gl/grb/postprocess.f90 and util/gl/grb/postp_pressure_level.f90. Some more popular parameters are listed:\nPseudo satellite pictures\nTotal precipitation and snow\nWind (gust) speed and direction\nCloud base, cloud top, cloud mask and significant cloud top","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"For a comprehensive list please check the output information for each cycle. NOTE that all parameters may not be implemented in gl","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"To produce \"postprocessed\" MSLP and accumulated total precipitation and visibility use the following namelist, nam_FApp:\n&naminterp\n pppkey(1:3)%shortname='pres','tp','vis',\n pppkey(1:3)%levtype='heightAboveSea','heightAboveGround','heightAboveGround'\n pppkey(1:3)%level= 0, 0, 0,\n pppkey(1:3)%tri= 0, 4, 0,\n lwrite_pponly= .TRUE.,\n/\ngl -p ICMSHHARM+0003 -o output_pp.grib -n nam_FApp\nNote:\nSet lwrite_pponly as true to only write the postprocessed fields to file\nSet lwrite_pponly as false write all fields will be written to the file, input fields as well as the postprocessed fields.","category":"page"},{"location":"PostProcessing/gl/#Vertical-interpolation","page":"GL","title":"Vertical interpolation","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl can be used to carry out vertical interpolation of parameters. Four types are available","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"HeightAboveSea, give height above sea in meters\nHeightAboveGround, give height above ground in meters\nHeightAboveGroundHighPrecision, give height above ground in centimeters\nisobaricInHpa, give height above sea in hPa\nTo interpolation temperature to 1.40m (level 140 in cm) use the following namelist, nam_hl:\n&naminterp\n pppkey(1:1)%shortname='t',\n pppkey(1:1)%levtype='heightAboveGroundHighPrecision',\n pppkey(1:1)%level= 140,\n pppkey(1:1)%tri= 0,\n vint_z_order=1,\n lwrite_pponly= .TRUE.,\n/\ngl -p ICMSHHARM+0003 -o output_hl.grib -n nam_hl\nNote:\nVertical interpolation to z levels is controlled by VINTZORDER: 0 is nearest level, 1 is linear interpolation\nTo height interpolation (Levls 500, 850 and 925 in hPa, type=100) use the following namelist, nam_pl:\n&naminterp\n pppkey(1:3)%shortname='t','t','t',\n pppkey(1:3)%levtype='isobaricInhPa','isobaricInhPa','isobaricInhPa',\n pppkey(1:3)%level= 500, 850, 925,\n pppkey(1:3)%tri= 0, 0, 0,\n vint_z_order=1,\n lwrite_pponly= .TRUE.,\n/\ngl -p ICMSHHARM+0003 -o output_pl.grib -n nam_pl","category":"page"},{"location":"PostProcessing/gl/#Horizontal-interpolation","page":"GL","title":"Horizontal interpolation","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Interpolation/resampling between different geometries such as regular lat lon, Lambert conformal, Polar steregraphic, rotated lat lon and rotated Mercator is possible with gl\nThe interpolation methods available are:\nnearest grid-point (order=-2)\nmost representative grid-point (order=-1)\nnearest grid-point (order=0)\nbi-linear (order=1)\nbi-quadratic (order=2, mask not respected)\nbi-cubic (order=3, mask not respected)\nExample of (an Irish) rotated lat lon domain, nam_FArotll:\n&naminterp\n outgeo%nlon=50,\n outgeo%nlat=50,\n outgeo%nlev=-1,\n outgeo%gridtype='rotated_ll',\n outgeo%west=-2.5,\n outgeo%south=-2.5,\n outgeo%dlon=0.1,\n outgeo%dlat=0.1,\n outgeo%polon=-6.7,\n outgeo%polat=-36.2,\n order= 1,\n/","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"where DLON/DLAT are in degrees.The HIRLAM Domain Tool may be of use for viewing rotated lat lon domains.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl -p ICMSHHARM+0003 -n nam_FArotll -o output.grib","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Example of a lambert domain\n&naminterp\n outgeo%nlon = 50 ,\n outgeo%nlat = 50,\n outgeo%nlev = -1,\n outgeo%gridtype = 'lambert',\n outgeo%west = 15.0\n outgeo%south = 50.0\n outgeo%dlon = 10000.\n outgeo%dlat = 10000.\n outgeo%projlat = 60.\n outgeo%projlat2 = 60.\n outgeo%projlon = 15.\n/\nwhere DLON/DLAT are in meters.The HIRLAM Domain Tool may be of use for viewing rotated lat lon domains.\nExample polar stereographic projection\n&naminterp\n outgeo%nlon = 50 ,\n outgeo%nlat = 50,\n outgeo%nlev = -1,\n outgeo%gridtype = 'polar_stereographic',\n outgeo%west = 15.0\n outgeo%south = 50.0\n outgeo%dlon = 10000.\n outgeo%dlat = 10000.\n outgeo%projlat = 60.\n outgeo%projlon = 15.\n/\nwhere DLON/DLAT are in meters.Note: the GRIB1 standard assumes that the projection plane is at 60 degrees north whereas HARMONIE assumes it is at 90 degrees north.\nExample rotated Mercator\n&naminterp\n outgeo%nlon = 50 ,\n outgeo%nlat = 50,\n outgeo%nlev = -1,\n outgeo%projection = 11,\n outgeo%west = 15.0\n outgeo%south = 50.0\n outgeo%dlon = 10000.\n outgeo%dlat = 10000.\n outgeo%projlat = 60.\n outgeo%projlon = 15.\n/\nwhere DLON/DLAT are in metersNote: rotated Mercator is not supported in GRIB1.\nGeographical points is a special case of projection 0 use namelist file, nam_FAgp:\n&naminterp\n outgeo%nlon=3 ,\n outgeo%nlat=1,\n outgeo%nlev=-1,\n outgeo%gridtype='regular_ll',\n outgeo%arakawa= 'a',\n order = 0,\n readkey(1:3)%shortname='t','u','v',\n readkey(1:3)%levtype='heightAboveGround','heightAboveGround','heightAboveGround',\n readkey(1:3)%level= 2, 10, 10,\n readkey(1:3)%tri= 0, 0, 0,\n linterp_field = f,\n gplat = 57.375,57.35,57.60\n gplon = 13.55,13.55,14.63\n/\nThe result will be written to a ASCII file with the name gpYYYYMMDDHHLLL.\ngl -p ICMSHHARM+0003 -n nam_FAgp \ncat gp20140702_1200+003","category":"page"},{"location":"PostProcessing/gl/#Extract-(crop)-a-sub-domain","page":"GL","title":"Extract (crop) a sub-domain","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl can be used to \"cut out\" a sub-domain from an input file using the namelist namCUT:","category":"page"},{"location":"PostProcessing/gl/#Crop-using-lower-left-and-upper-right-coordinates","page":"GL","title":"Crop using lower left and upper right coordinates","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"&naminterp\nistart = 150\njstart = 150\nistop = 350\njstop = 350\n/","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Use this command:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl input.grib -n namCut -o cutout.grib","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Another way of specifying your sub domain is to define how many points to exclude in the end","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"&naminterp\nistart = 150\njstart = 150\nistop = -10\njstop = -10\n/","category":"page"},{"location":"PostProcessing/gl/#Crop-using-SW,NE-corner-and/or-number-of-points","page":"GL","title":"Crop using SW,NE corner and/or number of points","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Here, you specify any of the SouthWest, NorthEast corners and/or the number of gridpoints","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"&naminterp\noutgeo%gridtype = 'crop',\noutgeo%nlat = 200,\noutgeo%nlon = 300,\noutgeo%south = 50.155,\noutgeo%west = -12.88,\n/","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Or ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"&naminterp\noutgeo%gridtype = 'crop',\noutgeo%north = 58.277,\noutgeo%east = 12.3,\noutgeo%south = 50.155,\noutgeo%west = -12.88,\n/","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"If you specify outgeo%gridtype as 'crop', the SouthWest corner will be translated to lower left grid coordinates and Nlat,Nlon will translate to upper right coordinates. You may specify any of SW, NE, nlat/nlon. Priority is given to SW, NE. The behaviour is as follows:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"SW and NE have priority, these will anchor either corner. If a corner is not specified, Nlat/Nlon will extend from the other corner.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"If only one coordinate is specified, the other corner becomes the corner of the input domain. So: ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Specify only SW and you get a crop from there to the NE corner of the input domain.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Specify only NE and you get a crop from the SW corner of the input domain.\nSpecify SW and NE and you get a crop between these corners","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Specify SW and Nlat/Nlon and you get Nlat x Nlon from SW corner.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Specify NE and Nlat/Nlon and you get Nlat x Nlon south and west of NE corner.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Specify SW, NE and Nlat/Nlon and you get a crop between SW/NE corners. Nlat/Nlon are ignored.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Specify only Nlat/Nlon and you get Nlat x Nlon from SW corner of the input domain.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The crop must be within the original domain unless you set","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"ldemand_inside = .FALSE.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"in the namelist. In this case, the crop will be adjusted to lie within the original domain and the output will be smaller than Nlat x Nlon. In the case where the requested crop lies entirely outside the original domain, the program will abort.","category":"page"},{"location":"PostProcessing/gl/#Rotating-wind-components","page":"GL","title":"Rotating wind components","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"HARMONIE produces u and v wind components relative to the model grid. gl by default always outputs u and v relative to the output grid. So if no regridding is done and the output is still on the LCC grid, u and v will also still be relative to the LCC grid. But if the output is regridded to a regular lat-lon grid, then u and v will be rotated and will be relative to the regular lat-lon grid. Wind (from) direction (parameter 31), however, is always relative to a regular lat-lon grid. To rotate u and v to regular lat-lon, while retaining the data on the LCC grid set uvrelativetogrid=0 in the namelist. All u and v vectors that will be processed will be rotated to geographical E and N directions. ","category":"page"},{"location":"PostProcessing/gl/#Output-to-several-files","page":"GL","title":"Output to several files","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"It is possible to let gl read data once and do processing loops with these data. Let us look at an example namelist","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"&naminterp\n OUTPUT_FORMAT='MEMORY'\n/\n&naminterp\n INPUT_FORMAT='MEMORY'\n OUTPUT_FORMAT='GRIB'\n OUTFILE='test1.grib'\n/\n&naminterp\n INPUT_FORMAT='MEMORY'\n OUTPUT_FORMAT='GRIB'\n OUTFILE='test2.grib'\n READKEY%FANAME='SNNNTEMPERATURE'\n/\n&naminterp\n INPUT_FORMAT='MEMORY'\n OUTPUT_FORMAT='GRIB'\n READKEY%FANAME='CLSTEMPERATURE'\n outgeo%nlon = 50 ,\n outgeo%nlat = 50,\n outgeo%nlev = -1,\n outgeo%gridtype = 'polar_stereographic',\n outgeo%west = 15.0\n outgeo%south = 50.0\n outgeo%dlon = 10000.\n outgeo%dlat = 10000.\n outgeo%projlat = 60.\n outgeo%projlon = 15.\n OUTFILE='test3.grib'\n/","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"In the first loop we read data and store it in memory. In the second look we read the data from memory and output to the file test1.grib. Then we make two more loops where we in the first one only output a subset and in the last one also do an interpolation to a new grid. The data in memory is however still untouched.","category":"page"},{"location":"PostProcessing/gl/#Input-from-several-files","page":"GL","title":"Input from several files","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"It's also possible to read several files and write them into one. This is used to gather the various FA fields written from the IO-server. A typical namelist would look like","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"&naminterp\n maxfl=28,\n output_format='MEMORY',\n output_type = 'APPEND',\n input_format='FA',\n infile='forecast/io_serv.000001.d/ICMSHHARM+0003.gridall',\n/\n&naminterp\n output_format='MEMORY',\n output_type = 'APPEND',\n input_format='FA',\n infile='forecast/io_serv.000002.d/ICMSHHARM+0003.gridall',\n/\n...\n&naminterp\n input_format = 'MEMORY',\n output_format= 'GRIB'\n output_type = 'NEW',\n outfile = 'test.grib'\n/","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Where maxfl tells how many files that will be read.","category":"page"},{"location":"PostProcessing/gl/#domain_prop","page":"GL","title":"domain_prop","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop is used do extract various properties from a file. ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Climate: $MPPGL $BINDIR/domain_prop -DOMAIN_CHECK $LCLIMDIR/m$M1 -f || \\","category":"page"},{"location":"PostProcessing/gl/#Check-an-existing-domain-with-a-namelist-specification","page":"GL","title":"Check an existing domain with a namelist specification","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -DOMAIN_CHECK -f CLIMATE_FILE","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The geometry is read from fort.10 and the program aborts if the new and old geometries differs. See scr/Climate for an example.","category":"page"},{"location":"PostProcessing/gl/#Check-if-Q-is-in-gridpoint-or-spectral-representation","page":"GL","title":"Check if Q is in gridpoint or spectral representation","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -f -QCHECK FAFILE","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"returns 1 if Q is spectral and 0 if Q is in gridpoint space.","category":"page"},{"location":"PostProcessing/gl/#Check-if-a-specific-field-is-present","page":"GL","title":"Check if a specific field is present","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -f -CHECK_FIELD S001CLOUD_FRACTI","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"returns 1 if S001CLOUD_FRACTI is found, 0 otherwise","category":"page"},{"location":"PostProcessing/gl/#Check-the-number-of-levels-in-a-file","page":"GL","title":"Check the number of levels in a file","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -f -NLEV FAFILE ","category":"page"},{"location":"PostProcessing/gl/#Check-the-geographical-extension-of-the-domain","page":"GL","title":"Check the geographical extension of the domain","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -f -MAX_EXT FAFILE ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"This is used in several places to determine the domain to be extracted from MARS or limit the observations sample. Another way is to provide the projection parameters of your domain as input","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -MAX_EXTR \\\n-NLON $NLON -NLAT $NLAT \\\n-LATC $LATC -LONC $LONC \\\n-LAT0 $LAT0 -LON0 $LON0 \\\n-GSIZE $GSIZE","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"To get the geographical position of the lower left corner use","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -f -LOW_LEFT FAFILE ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"To print out the important projection parameters in a file use:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -f -4JB FAFILE","category":"page"},{"location":"PostProcessing/gl/#Get-time-information-from-a-file","page":"GL","title":"Get time information from a file","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -f -DATE FAFILE","category":"page"},{"location":"PostProcessing/gl/#fldextr-and-obsextr","page":"GL","title":"fldextr and obsextr","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Read about the verification extraction programs here","category":"page"},{"location":"System/MFaccess/#Using-Météo-France-Servers","page":"MF Access","title":"Using Météo-France Servers","text":"","category":"section"},{"location":"System/MFaccess/#Introduction","page":"MF Access","title":"Introduction","text":"","category":"section"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"The procedure to get access to MF servers and their read-only git repository is outlined here","category":"page"},{"location":"System/MFaccess/#First-steps","page":"MF Access","title":"First steps","text":"","category":"section"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"Discuss your requirements for access to MF servers with the HIRLAM System project leader, Daniel Santos (dsantosm@aemet.es).\nDownload two forms \"Undertaking for the use of Météo-France computer resources\" and \"Demande d'authorisation de conexion au résau de Météo Franc\" from http://www.cnrm.meteo.fr/aladin/spip.php?article157. \nThe \"Undertaking for the use of Météo-France computer resources\" form is to be signed by you only\nThe \"Demande d'authorisation de conexion au résau de Météo France\" must be signed by you and your department head. It must also include an institute stamp. You should enter details in Contacts, Compte d'accesés aux machines du Centre de Cacul and at the bottom with authorization from you institute manager with institute stamp. - A scan of both forms with a brief introductory note should be sent to Eric Escaliere (eric.escaliere@meteo.fr) and cc'ed to Daniel Santos (dsantosm@aemet.es) and Claude Fischer (claude.fischer@meteo.fr).\nBe careful with the \"Machine du client\". I had to specify the name and IP address of my institute's Firewall server as this is what the outside world sees when I access external servers from my PC.\nMétéo-France will send (by post) your username (Identificateur) and password (Mot de passe) for log in.\nThe authentication process itself remains in two steps (first “parme”, then target), as before. \nA few specific examples follow (see MF's instructions for full details):\nbeaufix:","category":"page"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"ewhelan@realin23:gcc-8.3.1:.../~> which beaufix\nalias beaufix='telnet beaufix.meteo.fr'\n\t/usr/bin/telnet\newhelan@realin23:gcc-8.3.1:.../~> beaufix \nTrying 137.129.240.110...\nConnected to beaufix.meteo.fr.\nEscape character is '^]'.\nCheck Point FireWall-1 authenticated Telnet server running on mascarpone\nUser: whelane\npassword: your_parme_password\nUser whelane authenticated by FireWall-1 authentication\n\nConnected to 137.129.240.110\nRed Hat Enterprise Linux Server release 6.9 (Santiago)\nKernel 2.6.32-696.6.3.el6.x86_64 on an x86_64\nbeaufixlogin0 login: whelane\nPassword: your_ldap_password\nLast login: Tue Oct 13 10:15:53 from gw2.met.ie\n _ __ _ \n| | / _|(_) \n| |__ ___ __ _ _ _ | |_ _ __ __\n| '_ \\ / _ \\ / _` || | | || _|| |\\ \\/ /\n| |_) || __/| (_| || |_| || | | | > < \n|_.__/ \\___| \\__,_| \\__,_||_| |_|/_/\\_\\ \n\n[whelane@beaufixlogin0 ~]$ ","category":"page"},{"location":"System/MFaccess/#What-next?-**TO-BE-CONFIRMED**","page":"MF Access","title":"What next? TO BE CONFIRMED","text":"","category":"section"},{"location":"System/MFaccess/#Access-to-MF-servers-via-parme","page":"MF Access","title":"Access to MF servers via parme","text":"","category":"section"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"Once you are happy that you can access PARME from your PC you should once again contact Eric Escaliere (eric.escaliere@meteo.fr) and request login details for merou (Eric will send you a temporary password) and LDAP login details to front-id to enable access to COUGAR, YUKI, BEAUFIX and ID-FRONT\nAn automatic e-mail will be sent from expl-identites@meteo.fr with you LDAP repository password.\nfront-id requires certain criteria for your password. These are detailed in French below. When you have received LDAP login details for front-id:\newhelan@eddy:~> telnet parme.meteo.fr\nTrying 137.129.20.1...\nConnected to parme.meteo.fr.\nEscape character is '^]'.\nCheck Point FireWall-1 authenticated Telnet server running on parmesan\nUser: whelane\npassword: ********\nUser whelane authenticated by FireWall-1 authentication\nHost: front-id\n\nConnected to id-front\nRed Hat Enterprise Linux AS release 4 (Nahant Update 5)\nKernel 2.6.9-55.ELsmp on an x86_64\nlogin: whelane\nPassword: \nLast login: Mon Nov 4 05:14:22 from gw2.met.ie\nBienvenue EOIN WHELAN\nVous pouvez changer votre mot de passe\n-------------------------------------------------------------------------\n- Controle de validite sur les mots de passe avant de poster la demande -\n- Le OLD doit etre fourni. -\n- Au moins 8 car, au plus 20 car. -\n- Au moins 2 car. alpha et 2 car. non-alpha. -\n- Ne pas ressembler a UID NAME et OLD sur une syllabe de + de 2 car. -\n-------------------------------------------------------------------------\n-------------------------------------------------------------------------\nHello EOIN WHELAN\nYou may change your password\n-------------------------------------------------------------------------\n- Validity control before demand acceptation -\n- You must enter the old password first -\n- The new password must contain: -\n- At least 8 characters, 20 characters maximum -\n- At least 2 alphanumeric characters and 2 non-alphanumeric characters -\n- The passwd must contain a part of UID NAME -\n-------------------------------------------------------------------------\nChanging password for user 'whelane(56064)'.\nEnter login(LDAP) password: \nNew password: \nRe-enter new password: \nVotre mot de passe a ete change\nWhen you have received login details for merou from Eric:\newhelan@eddy:~> telnet parme.meteo.fr\nTrying 137.129.20.1...\nConnected to parme.meteo.fr.\nEscape character is '^]'.\nCheck Point FireWall-1 authenticated Telnet server running on parmesan\nUser: whelane\npassword: ********\nUser whelane authenticated by FireWall-1 authentication\nHost: merou\n\nConnected to merou\nRed Hat Enterprise Linux Server release 5.6 (Tikanga)\nKernel 2.6.18-238.el5 on an x86_64\nlogin: whelane\nPassword: \nLast login: Tue Nov 5 10:06:35 from gw2.met.ie\n[whelane@merou ~]$ passwd\nChanging password for user whelane.\nChanging password for whelane\n(current) UNIX password: \nNew UNIX password: \nRetype new UNIX password: \npasswd: all authentication tokens updated successfully.\n[whelane@merou ~]$ ","category":"page"},{"location":"System/MFaccess/#Access-to-(read-only)-MF-git-arpifs-git-repository","page":"MF Access","title":"Access to (read-only) MF git arpifs git repository","text":"","category":"section"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"MF use ssh keys to allow access to their read-only git repository. If approved by the HIRLAM System PL you should request access to the repository by sending a request e-mail to Eric Escaliere and cc'ed to Daniel Santos and Claude Fischer your ssh public key attached.","category":"page"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"Once you have been given access you can create a local clone by issuing the following commands:","category":"page"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"cd $HOME\nmkdir arpifs_releases\ncd arpifs_releases\ngit clone ssh://reader054@git.cnrm-game-meteo.fr/git/arpifs.git","category":"page"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"Happy gitting!","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Data-assimilation-algorithms","page":"Algorithms","title":"Data assimilation algorithms","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/#3D-Var","page":"Algorithms","title":"3D-Var","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"The default upper-air data assimilation algorithm is three-dimensional variational assimilation (3D-Var). To use 3D-Var no changes to ecf/config_exp.h should be required assuming structure function data files are available for your domain. Structure function input is defined in scr/include.ass.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Settings","page":"Algorithms","title":"Settings","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"List of 3D-Var settings that the user should be concerned about.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#4D-Var","page":"Algorithms","title":"4D-Var","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"With 43h2.2 four-dimensional variational assimilation (4D-Var) is available as a non-default option. In order to setup an experiment to use 4D-Var one should issue the following commands:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"mkdir -p /path/to/home/hm_home/MY_EXP\ncd /path/to/home/hm_home/MY_EXP\n/path/to/Harmonie/config-sh/Harmonie setup -r /path/to/Harmonie -c AROME_4DVAR","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"To use 4D-Var no changes to ecf/config_exp.h should be required assuming suitable structure function data files are available for your domain (see also ILRES in the settings section). Structure function input is defined in scr/include.ass.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Settings-2","page":"Algorithms","title":"Settings","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"List of 4D-Var settings that the user should be concerned about.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"With the following settings the working of the 4D-Var can be changed. Defaults values are given","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"NOUTERLOOP=2 # number of 4DVAR outer loops\nILRES=6,3 # Resolution (in parts of full) of outer loops as compared to the forecast resolution. The domain NLATxNLON should have the property that for the settings of ILRES both NLON/ILRES and NLAT/ILRES are of the form 5^c^ 3^d^ 2^e^, where c, d and e are integers >= 0 and e>=1.\nTSTEP4D=300,150 # Timestep length (seconds) of outer loops TL+AD\nTSTEPTRAJ=300,300 # How often the model state is saved for linearization\nNITER4D=10,15 # Maximum number of inner loop iterations in 4D-Var outer loops\nNSIMU4D=15,20 # Maximum number of cost function simulations in 4D-Var outer loops\nCH_RES_SPEC=yes # yes => change of resolution of the increment spectrally; no => by FULLPOS\nFORCE1=no # yes => tendency increment; no => analysis increment in loop 1\nFORCE2=no # yes => tendency increment; no => analysis increment in loop 2","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Schematic-work-flow-of-4D-Var","page":"Algorithms","title":"Schematic work flow of 4D-Var","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"(Image: )","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"After the screening 4DVscreen for the default 3h observation window (2h before and 1h after the start of the main forecast), \n4DVprolog prepares the initial conditions at the appropriate resolution (ILRES settings) for the forecasts used in minimization. The input here is the background BGHR (ICMSHHARM+0001 fields of the former cycle supplemented with some surface fields). \nSubsequently 4DVminim produces initially the low resolution at the beginning of the observation window (an_lowres_$loop), which is then (CH_RES_SPEC=yes in config_exp.h) transformed to the field at forecast resolution (an_hr_begwin). \nThis field is complemented in Blendhr with necessary surface fields and the resulting field (anb_hr_begwin) acts as the initial condition for the trajectory run 4DVtraj. \nFrom the 2h forecast of 4DVtraj (ICMSHTRAJ+0002 fields) the main forecast is started.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Cloudingest-Cloud-Initialization","page":"Algorithms","title":"Cloudingest - Cloud Initialization","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"The cloud initialization modifies temperature and humidity fields with help of MSGcloudYYYYMMDDHH.grib file, which contains 2-d fields of cloudtop-temperature [K], cloudmask [0-1] and cloudbase [m]. Pre cy46h the Cloudingest happened within MASTERODB in src/arpifs specifically in src/arpifs/phys_dmn/msginit.F90 routine. Since cy46h the Cloudingest uses pysurfex and gl to do the job. ","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Settings-3","page":"Algorithms","title":"Settings","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"Enable Cloudingest in ecf/config_exp.h:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"USE_MSG=yes # Use MSG data for adjustment of inital profiles, EXPERIMENTAL! (no|yes), expects MSGcloudYYYYMMDDHH.grib in $OBDIR","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"And in src/include.ass:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"export CLOUD_BASE=1 # 1 and USE_MSG=true (ecf/config_exp.h) => msginit","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"to replace the cloudbase field in MSGcloudYYYYMMDDHH.grib with an OI interpolated field of SYNOP observations of CloudBaseHeights and postprocessed field of cloudbases of the first-guess file.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Ecflow-and-schematic-work-flow-of-Cloudingest","page":"Algorithms","title":"Ecflow and schematic work flow of Cloudingest","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"In case of 3DVar assimilation, the ecflow AnUA family should look like this:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"(Image: )","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"And a schematic work-flow of tasks and files involved:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"(Image: )","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"\nusing Graphviz_jll\nrun(`$(dot()) ../assets/cloudingest.dot -Tsvg -o ../assets/cloudingest.svg`)","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Incremental-Analysis-Updates-IAU","page":"Algorithms","title":"Incremental Analysis Updates - IAU","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"This is a modified and experimental version of the default implemented Forward-Facing Incremental Analysis Update, that is described elsewhere (and mainly controlled via namelist settings). This algoritm is using the same code but is changed in a few fundamental ways on the logistical plane. Instead of introducing the innovations gradually in the forecast from the analysis point (as in default 3Dvar) the analysis is done at \"t=0\" and then the forecast is started at an earlier point so that the center of the introductions of the innovations are located at the above mentioned analysis point.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"This is done through shifting the forecast start to 1h before \"t=0\" and activating a namelist change in the forecast namelist:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"NAMIAU=>{\n 'LIAU' => '.TRUE.,',\n 'TSTARTIAU' => '1800.0,',\n 'TSTOPIAU' => '5400.0,',\n 'ALPHAIAU' => '1.0,',","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"Hence the forecast is running freely for 30min and then starts adding the increments during 1h centered around the \"t=0\" point, during 1h. If these times are subject of change (for example if a larger window is desired, mind that you also need to have change what files are linked to various process such as the forecast so that the correct start files are linked to that process.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Settings-4","page":"Algorithms","title":"Settings","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"If you want to try this method, or want to test it in order to develop it further there is a few thing to keep in mind. Before doing anything else you have to modify the write up times in ecf/config_exp.h so that the model have access to the startfiles that it needs inorder to start.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"From the DEFAULT:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":" LL_LIST=\"12,3\" # Forecast lengths for the cycles [h], replaces LL, LLMAIN\n HWRITUPTIMES=\"00-21:3,24-60:6\" # History file output times\n FULLFAFTIMES=$HWRITUPTIMES # History FA file IO server gather times\n PWRITUPTIMES=\"00-60:3\" # Postprocessing times\n PFFULLWFTIMES=$PWRITUPTIMES # Postprocessing FA file IO server gathering times\n VERITIMES=\"00-60:1\" # Verification output times, changes PWRITUPTIMES/SFXSELTIMES\n SFXSELTIMES=$PWRITUPTIMES # Surfex select file output times","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"to:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":" LL_LIST=\"12,6\" # Forecast lengths for the cycles [h], replaces LL, LLMAIN\n HWRITUPTIMES=\"00-06:1,06-21:3,24-60:6\" # History file output times\n FULLFAFTIMES=$HWRITUPTIMES # History FA file IO server gather times\n PWRITUPTIMES=\"00-06:1,06-60:3\" # Postprocessing times\n PFFULLWFTIMES=$PWRITUPTIMES # Postprocessing FA file IO server gathering times\n VERITIMES=\"00-60:1\" # Verification output times, changes PWRITUPTIMES/SFXSELTIMES\n SFXSELTIMES=$PWRITUPTIMES # Surfex select file output times\n # Only meaningful if SURFEX_LSELECT=yes\n SFXSWFTIMES=$SFXSELTIMES # SURFEX select FA file IO server gathering times\n SWRITUPTIMES=\"00-06:1\" # Surfex model state output times\n SFXWFTIMES=$SWRITUPTIMES # SURFEX history FA file IO server gathering times","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"Then run one cycle for at least 6h so that all the prerequesite files are generated before turning on IAUVAR.\nSecond thing, when the previous run has completed, is to set the IAUVAR ecf/config_exp.h to yes and keep running. It should be automatic to continue as usual. Make sure that you do not manually set LL to be shorter then 6h.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"The surface assimilation is moved to the start of the forecast and hence it is only the upper air assimilation that is involved in the IAU.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"note: Note\nThere is a difference between the first run with IAUVAR and all the following, due to the first run couples to a run done without IAU and the others couple to a run that has done IAU, so the files used as startfiles are different in valid times. To this effect the first run saves a semaphore file in the $SCRATCH/hm_home/exp_name/ directory for the following runs to react to! So if you need to rerun the first run, for some reason, that semaphore file (named is_iauvar) needs to be manually removed!!","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Flow-diagram-of-IAU-(Magnus-will-help)","page":"Algorithms","title":"Flow diagram of IAU (Magnus will help)","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/#MUSC","page":"MUSC","title":"MUSC","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/#MUSC-using-the-develop-branch-(CY46)-in-the-git-repository","page":"MUSC","title":"MUSC using the develop branch (CY46) in the git repository","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"If you find any issues with any of the instructions or scripts, feel free to notify Emily Gleeson (emily.gleesonATmet.ie) and Eoin Whelan (eoin.whelanATmet.ie)","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Currently a \"reference\" test case, called musc_ref, works on ATOS, as well as the ARMCU cases (with and without SURFEX for both AROME and HARMONIE namelists) and the two microphysics-related cases (supercooled liquid) developed by Bjorg Jenny Engdahl in cycle 40. ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Some instructions on how to use MUSC are included below. See here for some information on HARMONIE-AROME experiments using MUSC but note that the scripts have changed somewhat since that paper was written.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Set-up-MUSC","page":"MUSC","title":"Set up MUSC","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Get the code:\nmkdir -p $SCRATCH/harmonie_releases/git/HCY46\ncd $SCRATCH/harmonie_releases/git/HCY46\ngit clone git@github.com:Hirlam/Harmonie.git\ncd Harmonie\ngit checkout dev-CY46h1 \n# If you already have a clone of the code but want to update it to the latest version, \nuse \"git pull\" rather than \"git branch\".\nSet up a MUSC experiment using HARMONIE scripting \nIn this example the ECMWF.atos config file is used. \nmkdir -p $HOME/hm_home/test_0001\ncd $HOME/hm_home/test_0001\n$SCRATCH/harmonie_releases/git/HCY46/Harmonie/config-sh/Harmonie setup -r $SCRATCH/harmonie_releases/git/HCY46/Harmonie/ -h ECMWF.atos \nCompile your experiment (still in $HOME/hm_home/test_0001)\n$SCRATCH/harmonie_releases/git/HCY46/Harmonie/config-sh/Harmonie install BUILD_WITH=cmake\n# Note that for the ARMCU cases cmake needs FFT modifications (not yet committed by Yurii)\nSome MUSC specific settings including copying over scripts and a check that Harmonie setup has been run\n$SCRATCH/harmonie_releases/git/HCY46/Harmonie/util/musc/scr/musc_setup.sh -r $SCRATCH/harmonie_releases/git/HCY46/Harmonie/\nGenerate your namelist, unless you're using an idealised case with pre-defined namelists (so for ARMCU* you do not generate the namelists for example). If you wish to change the radiation scheme (RADSCHEME- RAYFM (IFS) or RAY (ACRANEB2)) or how you use aerosols (BDAER - cams or none), you need to edit ecf/configexp.h in your expt before running muscnamelist.sh. For using NRT aerosols, they need to be included in your input files already e.g. the MUSCIN* files should come from a 3D NRT aerosol expt.\ncd $HOME/hm_home/test_0001\n./musc_namelist.sh -h\n./musc_namelist.sh -l -i \n -[N nudging - optional]\nGet a copy of the input files\ncd $SCRATCH\nretrieve the input files from https://github.com/Hirlam/HarmonieMuscData","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Note that if you need to do experiments with 2 patches etc, ensure you derive some MUSC input files yourself using 3D HARMONIE-AROME files run with 2 patches. MUSC*REFL65* input files have only 1 patch. Changing MUSC namelists won't enable 2 patch output from a MUSC run.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Run-MUSC","page":"MUSC","title":"Run MUSC","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Musc_ref","page":"MUSC","title":"Musc_ref","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"The reference test is a X-hr experiment (change CSTOP in musc_namelist.sh if you wish to change the run length) and produces Out *lfa files for each model time-step of the time period. ICM* files are produced at each hour.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Run your experiment\ncd $HOME/hm_home/test_0001\n./musc_run.sh -h\n./musc_run.sh -d $SCRATCH/muscCY46InputData/musc_ref -n REFL65 -i DEF [ -e ECOCLIMAP_PATH]\n# optional path for ECOCLIMAP data may be given. For musc_ref -i must be given as no \n# namelists are provided with this experiment and must be generated before musc_run.sh \n# is executed. For the idealised cases, if -i is not specified -i becomes the name of \n# the idealised case once the namelist files are copied to $HOME/hm_home/test_0001 e.g. \n# for armcu the namelist files become namelist_atm_armcu etc.\n","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#ARMCU","page":"MUSC","title":"ARMCU","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"** Note that these will not work until we can compile with FFTW.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"This is an idealized SCM test case, the \"Sixth GCSS WG-1 case (ARM—Atmospheric Radiation Measurement)\", focussing on the diurnal cycle of cumulus clouds over land (Brown et al, 2002, Lenderink et al, 2004. The input files and namelist settings have been taken from /src/validation/mitraille/namelist/L1ARO but the atmospheric namelist needed editing for use in our environment. Atmospheric and surface forcings are included in the MUSC input files and the namelists in the util/musc/test/armcu directory are set up specifically for this case and are hence not edited by musc_run.sh.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Run your experiment\ncd $HOME/hm_home/test_0001\n./musc_run.sh -h\n./musc_run.sh -d $HOME/muscCY46InputData/ARMCU_HAR -n ARMCU (There are now 4 ARMCU experiments to chose from e.g. ARMCU_EB and ARMCUs_EB are ones that use AROME namelists, ARMCU_Har and ARMCUs_Har use HARMONIE-AROME namelists. Currently, the results are a bit different.)","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#MUSC-Output","page":"MUSC","title":"MUSC Output","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/#DDH-Toolbox","page":"MUSC","title":"DDH Toolbox","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"The outputs from a MUSC run are small files in lfa format. DDH tools can be used to handle these files.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"To download the DDH toolbox, go to https://www.umr-cnrm.fr/gmapdoc/spip.php?article19 and download the tarball. Untar it and within the tools folder run ./install. Now the various \"tools\" are compiled. For example lfaminm $file shows you the max, min and mean of all the output variables in a file. lfac $file $var shows the value(s) of $var in $file e.g. lfac Out.000.0000.lfa PTS shows you surface temperature. In order to be able to use the plotting scripts below, you'll need the lfac tool in your path. ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"For example on ATOS, I set the following paths (may differ a bit for you depending on where you downloaded the ddhtools to). Perhaps add to your .bashrc file:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"export PATH=$HOME/ddhtoolbox/tools/lfa/:$PATH\nexport DDHI_BPS=$HOME/ddhtoolbox/ddh_budget_lists/\nexport DDHI_LIST=$HOME/ddhtoolbox/ddh_budget_lists/conversion_list","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Plot-output-time-series-from-the-MUSC-output-lfa-files","page":"MUSC","title":"Plot output time-series from the MUSC output lfa files","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"cd $HOME/hm_home/test_0001\n./musc_plot1Dts.sh -d \n\n## python based plotting scripts and \"default\" png plots \n## will be produced in $HOME/hm_home/test_0001/plots1Dts\n","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Extract-output-from-the-MUSC-output-ICM*-fa-files-and-plot-time-series-using-these","page":"MUSC","title":"Extract output from the MUSC output ICM* fa files and plot time-series using these","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"By default you get ICM* files on the hour - you can change the namelist should you require a higher frequency.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"cd $HOME/hm_home/test_0001\n./musc_convertICM2ascii.sh -l -f \n\n## Generates an OUT ascii file for each atm and sfx ICM* input file\n## ICM files have additional input not in lfa files e.g. TKE which is useful - also similar to 3D outputs\n\n./musc_plot_profiles_ICMfiles.sh -d -p -l \n","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Creating-your-own-input-files","page":"MUSC","title":"Creating your own input files","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"A converter script, musc_convert.sh, is available to extract a MUSC column from a model state file (ICMSHHARM+HHHH). musc_convert.sh is a Bash script that calls gl_grib_api to carry the data conversions.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Extract-a-MUSC-input-file","page":"MUSC","title":"Extract a MUSC input file","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"cd $HOME/hm_home/test_0001\n./musc_convert.sh -d $HOME/muscCY46InputData/harm_arome/ -c extr3d -n REFIRL -l 53.5,-7.5 -t 6\nmkdir $HOME/muscCY46InputData/musc_refirl\ncp MUSCIN_REFIRL_atm.fa MUSCIN_REFIRL_sfx.fa MUSCIN_REFIRL_pgd.fa $HOME/muscCY46InputData/musc_refirl/","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Convert-MUSC-FA-to-MUSC-ASCII","page":"MUSC","title":"Convert MUSC FA to MUSC ASCII","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"cd $HOME/hm_home/test_0001\n./musc_convert.sh -c fa2ascii -d $HOME/muscCY46InputData/musc_refirl -n REFIRL\nls -ltr\ncp MUSCIN_REFIRL_atm.ascii MUSCIN_REFIRL_sfx.ascii $HOME/muscCY46InputData/musc_refirl/","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Convert-MUSC-ASCII-to-MUSC-FA","page":"MUSC","title":"Convert MUSC ASCII to MUSC FA","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"cd $HOME/hm_home/test_0001\n./musc_convert.sh -c ascii2fa -d $HOME/muscCY46InputData/musc_refirl -n REFIRL\nls -ltr","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Forcing-in-MUSC","page":"MUSC","title":"Forcing in MUSC","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"musc_convert.sh includes forcing for temperature (11), humidity (51) and wind speed (32) . ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"You may edit the following lines to include other forcing:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":" PPPKEY(1:4)%shortname = 'ws','#','#','#',\n PPPKEY(1:4)%faname = '#','SNNNFORC001','SNNNFORC002','SNNNFORC003'\n PPPKEY(1:4)%levtype = 'hybrid','hybrid','hybrid','hybrid',\n PPPKEY(1:4)%level = -1,-1,-1,-1,\n PPPKEY(1:4)%pid = 32,-1,-1,-1,\n PPPKEY(1:4)%nnn = 0,0,0,0,\n PPPKEY(1:4)%lwrite = F,T,T,T,\n IFORCE = 11,51,32,","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Further information on forcing is available here: MUSC/Forcing","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#MUSC-local-adaptation","page":"MUSC","title":"MUSC local adaptation","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/#KNMI-workstations","page":"MUSC","title":"KNMI workstations","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"The following files were added to make it possible to run MUSC on KNMI workstations:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"config-sh/config.LinuxPC-MPI-KNMI\nconfig-sh/submit.LinuxPC-MPI-KNMI\nutil/makeup/config.linux.gfortran.mpi-knmi ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"for use with the setup script:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"./musc_setup.sh [...] -c LinuxPC-MPI-KNMI","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"In addition, the following workaround has to be applied to be able to run the REFL65 test case:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"$ git diff src/ifsaux/utilities/echien.F90\ndiff --git a/src/ifsaux/utilities/echien.F90 b/src/ifsaux/utilities/echien.F90\nindex 55d5ce94e..694c87d83 100644\n--- a/src/ifsaux/utilities/echien.F90\n+++ b/src/ifsaux/utilities/echien.F90\n@@ -532,7 +532,7 @@ IF((KINF == 0).OR.(KINF == -1).OR.(KINF == -2).OR.(KINF == -3)) THEN\n & 'LEVEL ',JFLEV,' : ',&\n & 'FILE = ',ZVALH(JFLEV), ' ; ARGUMENT = ',PVALH(JFLEV)\n IERRA=1\n- IERR=1\n+! IERR=1\n ENDIF\n IF(ABS(ZVBH(JFLEV)-PVBH(JFLEV)) > PEPS) THEN\n WRITE(KULOUT,*) ' VERTICAL FUNCTION *B* MISMATCH ON ',&","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Then you are ready to compile:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"remove the file experimentislocked from the experiment directory.\nremove the directory with your previous build (if any).\nstart the compile with the musc_compile.sh script","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"When starting the MUSC run, add the PATH to mpirun and the libraries:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"export PATH=$PATH:/usr/lib64/openmpi/bin\nexport LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/lib64/openmpi/lib\n./musc_run.sh [...]","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#MUSC-FAQ","page":"MUSC","title":"MUSC FAQ","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"If there is an error, what files do I look in? NODE.001_01 and lola in your output directory.\nHow to I handle the output files? The output files are of the form Out.XXX.XXXX and appear in your output directory. There are in lfa format and can be handled using ddh tools. See the bash script musc_plot1Dts.sh for ideas. There are also ICM*lfa output files that are also handy for plotting profiles - use musc_convertICM2ascii.sh to convert these files to ASCII and musc_plot_profiles_ICMfiles.sh to plot some profiles e.g. TKE, cloud liquid etc.\nI ran a different idealised case but did not get different results? The likely reason for this is that you did not delete the namelists from your experiment directory. If the namelists are there, the musc_run.sh script neither creates them nor copies them from the repository.\nHow do I create a new idealised case? This is not straightforward but the following was used to create the ASTEX cases in cy43 using info from cy38: https://www.overleaf.com/7513443985ckqvfdcphnng\nHow can I access a list of MUSC output parameters? Ensure you have the ddhtoolbox compiled. Then use lfaminm $file on any of your output files and it will show what is there. To look at a particular variable try lfac $file $parameter e.g. lfac $file PTS (for surface temperature). You can use cat to copy the values to an ASCII file for ease of use (e.g. lfac $file PTS > $ASCIIfile). \nIs MUSC similar to the full 3D model version - is the physics the same? Yes, if you checkout develop then you have MUSC up-to-date with that.\nDo I need to recompile the model if I modify code? Yes, if you modify code in a single file you must recompile the code but do not delete the original compiled model first. This will recompile relatively quickly. If you modify code in multiple files and you change what variables are passed between files, then you must delete your original compiled model and recompile the code. This will take longer to recompile. ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#MUSC-variable-names","page":"MUSC","title":"MUSC variable names","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"A list of variable names found in the MUSC lfa output files can be found here. Please note that this is not a complete list of MUSC output parameters (yet). The variables in regular ICMSH... fa output are documented here","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Outstanding-Issues","page":"MUSC","title":"Outstanding Issues","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"ARMCU and Jenny's cases run without surface physics, radiation etc and hence return NANs in apl_arome. To circumvent this on ECMWF, we needed to compile less strictly. This needs to be investigated further.\nThe ASTEX cases currently do not run on ECMWF but work perfectly at Met Eireann - debugging needed.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#MUSC-using-EMS","page":"MUSC","title":"MUSC using EMS","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"These instructions have moved to MUSC EMS","category":"page"},{"location":"EPS/SPPImplementation/#The-SPP-implementation-in-IAL-and-HARMONIE","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"","category":"section"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"The Stochastically Perturbed Parameterizations scheme (SPP) introduces stochastic perturbations to values of chosen closure parameters representing efficiencies or rates of change in parameterized atmospheric (sub)processes. See here for more information. See the main SPP documentation for selection of settings.","category":"page"},{"location":"EPS/SPPImplementation/#Controling-routines","page":"The SPP implementation in IAL and HARMONIE","title":"Controling routines","text":"","category":"section"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"The SPP data structure and logics is controlled by the following routines","category":"page"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"Routine Description\nsrc/arpifs/module/spp_mod.F90 Defines SPP scheme types TSPP_CONFIG_PAR and TSPP_CONFIG for the parameter config and the overall config respectively\nsrc/arpifs/module/spp_mod_type.F90 Harmonie specific data types TSPP_CONFIG_TYPE, ATM_SPP_VARS, SFX_VARS, control and the methods CLEAR_SSP_TYPE, SET_SPP_TYPE, APPLY_SPP, APPLY_SPP_SURFEX, DIA_SPP, SET_ALL_ATM_SPP, SET_ALL_SFX_SPP, CLEAR_ALL_ATM_SPP, CLEAR_ALL_SFX_SPP\nsrc/surfex/SURFEX/modd_sfx_spp.F90 SURFEX specific data types, control and methods CLEAR_SFX_SPP, SET_SFX_SPP, APPLY_SFX_SPP, CLEAR_ALL_SFX_SPP, SPP_MASK, SPP_DEMASK, PREP_SPP_SFX. Partly duplicates spp_mod_type.F90\nsrc/arpifs/namelist/namspp.nam.h The SPP namelist\nsrc/arpifs/setup/get_spp_conf.F90 Setup defaults and read the SPP namelist. Initialises the SPG parameters\nsrc/arpifs/phys_dmn/ini_spp.F90 Initialises the pattern used for SPP\nsrc/arpifs/phys_dmn/evolve_spp.F90 Control routine for pattern propagation\nsrc/mse/internals/aroset_spp.F90 Initialises the SURFEX part of SPP","category":"page"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"Note that the control routines shared with IFS will be totally rewritten, and much neater, with the introduction of CY49T1. See e.g. spp_def_mod.F90, spp_gen_mod.F90 ","category":"page"},{"location":"EPS/SPPImplementation/#SPG-routines","page":"The SPP implementation in IAL and HARMONIE","title":"SPG routines","text":"","category":"section"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"The pattern used for SPP within HARMONIE is SPG and the code for this is found under src/utilities/spg. For the propagation of the pattern we find the routine EVOLVE_ARP_SPG in src/arp/module/spectral_arp_mod.F90","category":"page"},{"location":"EPS/SPPImplementation/#Applying-the-patterns","page":"The SPP implementation in IAL and HARMONIE","title":"Applying the patterns","text":"","category":"section"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"In apl_arome.F90 the HARMONIE specific data types are initialised with SET_ALL_ATM_SPP and SET_ALL_SFX_SPP. These routine groups the different parameters and connects them to a pattern and a the correct diagnostic field EZDIAG if requested.","category":"page"},{"location":"EPS/SPPImplementation/#Applying-the-patterns-in-the-upper-air-part","page":"The SPP implementation in IAL and HARMONIE","title":"Applying the patterns in the upper air part","text":"","category":"section"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"In the routine were a specific parameter is used the pattern is applied by calling APPLY_SPP. This is done for each parameter accoding to the table below.","category":"page"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"Perturbation Routine\nRADGR src/arpifs/phys_dmn/apl_arome.F90\nRADSN src/arpifs/phys_dmn/apl_arome.F90\nRFAC_TWOC src/arpifs/phys_dmn/vdfexcuhl.F90\nRZC_H src/arpifs/phys_dmn/vdfexcuhl.F90\nRZL_INF src/arpifs/phys_dmn/vdfexcuhl.F90\nRZMFDRY src/arpifs/phys_dmn/vdfhghtnhl.F90\nRZMBCLOSURE src/arpifs/phys_dmn/vdfhghtnhl.F90\nCLDDPTHDP src/arpifs/phys_dmn/vdfhghtnhl.F90\nRLWINHF src/arpifs/phys_radi/recmwf.F90\nRSWINHF src/arpifs/phys_radi/recmwf.F90\nPSIGQSAT src/mpa/micro/internals/condensation.F90\nICE_CLD_WGT src/mpa/micro/internals/condensation.F90\nICENU src/mpa/micro/internals/rain_ice_old.F90\nKGN_ACON src/mpa/micro/internals/rain_ice_old.F90\nKGN_SBGR src/mpa/micro/internals/rain_ice_old.F90\nALPHA src/mpa/micro/internals/rain_ice_old.F90\nRZNUC src/mpa/micro/internals/rain_ice_old.F90","category":"page"},{"location":"EPS/SPPImplementation/#Applying-the-patterns-in-SURFEX","page":"The SPP implementation in IAL and HARMONIE","title":"Applying the patterns in SURFEX","text":"","category":"section"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"As SURFEX should have no dependencies to external modules the data is copied into the internalt SURFEX SPP data structure in AROSET_SPP called from ARO_GROUND_PARAM.","category":"page"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"For SURFEX the parameter table looks like","category":"page"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"Perturbation Routine\nCV src/surfex/SURFEX/coupling_isban.F90\nLAI src/surfex/SURFEX/coupling_isban.F90\nRSMIN src/surfex/SURFEX/coupling_isban.F90","category":"page"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"In SURFEX we also have to pack/unpack the data arrays to only use the active points for a specific tile or patch. This is done in the SPP_MASK and SPP_DEMASK routines found in src/surfex/SURFEX/modd_sfx_spp.F90 and called from src/surfex/SURFEX/coupling_surf_atmn.F90. At the time of writing returning the diagnostics of the pattern doesn't work satisfactory.","category":"page"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"The additional code changes done for SPP in SURFEX can be viewed here","category":"page"},{"location":"DataAssimilation/NWECHKEVO/#NWECHKEVO","page":"NWECHKEVO","title":"NWECHKEVO","text":"","category":"section"},{"location":"DataAssimilation/NWECHKEVO/#Introduction","page":"NWECHKEVO","title":"Introduction","text":"","category":"section"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"The new utility NWECHKEVO was introduced in order to make the generation of diagnostics for the study of spin-up in dynamics effects more efficient. The utility CHKEVO turned out to slow down the forecast run to unpractical times. NWECHKEVO produces timeseries for the variables log(Ps), horiz. vorticity, horiz. divergence, vertical divergence, pressure departure and temperature for the first 180 timesteps of integration at timestep resolution. These timeseries are produced at selected points within the domain and at all levels for the last upper-air five variables (HVor, HDiv, VDiv, PD and T). ","category":"page"},{"location":"DataAssimilation/NWECHKEVO/#Preparations.-NAMCHK-namelist","page":"NWECHKEVO","title":"Preparations. NAMCHK namelist","text":"","category":"section"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"The user must select a list of points at which locations he wants the diagnostics to be generated. The coordinates are given in GPx and GPy coordinates, not geographical coordinates. These co-ordinates are then introduced in the namelist NAMCHK as in the following example ","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"NAMCHK=>{\n 'NGPCHK' => '17,',\n 'NXCHK(1:17)' => '263,335,447,525,606,390,420,540,644,333,509,329,388,480,266,259,271,',\n 'NYCHK(1:17)' => '462,472,469,398,388,406,325,284,300,293,243,215,167,178,358,279,200,',\n },","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"NB: These points correspond to the locations of 17 different weather radars in the domain ","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"'IBERIAxxm_2.5'=>{\n 'TSTEP' => '60',\n 'NLON' => '800',\n 'NLAT' => '648',\n 'LONC' => '-4.5',\n 'LATC' => '40.0',\n 'LON0' => '-4.5',\n 'LAT0' => '40.0',\n 'GSIZE' => '2500.',\n },","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"The default value for LNWECHKEVO is set to FALSE in suechk.F90. To enable this option, set LNWECHKEVO=.TRUE in ecf/config_exp.h. This isetting will be carried over to scr/Forecast at the time of namelist_forecast specification. NWECHKEVO takes priority over ECHKEVO, that is, when LNWECHKEVO=.TRUE., LECHKEVO is set to FALSE no matter what is specified at the namelist level ( awarning message will appear in the logs). This is so in order to avoid conflicts between the previous and the new utilities. If the user wants the previous method just activate LECHKEVO and make sure not to activate LNWECHKEVO.","category":"page"},{"location":"DataAssimilation/NWECHKEVO/#Important-Info","page":"NWECHKEVO","title":"Important Info","text":"","category":"section"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"NWECHKEVO speeds up the generation of diagnostics by minimizing MPI overhead. During the set-up, it is determined which MPI-task processes each point igiven via NAMCHK. The internal arrays that contain this info are given a size such that no more than 10 points can be handled by each MPI-task. Therefore, when running with a small number of MPI-tasks it is possible that some of the points in NAMCHK are ignored. This situation however is very unlikely because the usual number of MPI-tasks is quite big. Nonetheless, the parameter NWJPGPCHK (in module YOMCHK) can be given a bigger value if necessary.","category":"page"},{"location":"DataAssimilation/NWECHKEVO/#Modifications-in-scr/Forecast","page":"NWECHKEVO","title":"Modifications in scr/Forecast","text":"","category":"section"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"For ease of use, NWECHKEVO dumps the results in text format to the standard output ( /NODE* files ). The size of these files remains managable unless the number of points in NAMCHK is very big. As the diagnostics are generated avoiding MPI comms., the standard output for all tasks must be activated by using the NOUTPUT parameter in NAMPAR0. This is reason why scr/Forecast must be modified. ","category":"page"},{"location":"DataAssimilation/NWECHKEVO/#Results","page":"NWECHKEVO","title":"Results","text":"","category":"section"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"The results can be easily obtained by grepping out from the NODEs files. First , in order to know which NODE file contains a given point we can do on NODE.001_01","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"grep SETUPNEWECHKEVO NODE.001_01","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"obtaining a table like this","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"SETUPNEWECHKEVO: JPROC 120 ISETA= 8 ISETB= 8 ISETW= 8 ISETV= 8\nSETUPNEWECHKEVO: IGP= 13 (nychk,nxchk)= 167 388 CC in chunk= 1032\nSETUPNEWECHKEVO: JPROC 138 ISETA= 9 ISETB= 10 ISETW= 9 ISETV= 10\nSETUPNEWECHKEVO: IGP= 14 (nychk,nxchk)= 178 480 CC in chunk= 527","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"which indicates that GP number 13 (co-ordinates 167,388) on the NAMCHK is allocated to MPI-task 120. The point has ordinate number 1032 within the domain chunk assigned to this MPI-task. It outputs to NODE.008_08","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"Once we know the NODE file for this GP, we grep out again the results. For PS","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"grep NWECHKEVO:PS NODE.008_08","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"NWECHKEVO:PS 13 000 0.11505382282124E+02 0.99248519555856E+05\nNWECHKEVO:PS 13 001 0.11505380867601E+02 0.99248379166582E+05\nNWECHKEVO:PS 13 002 0.11505354069965E+02 0.99245719580317E+05\nNWECHKEVO:PS 13 003 0.11505387331977E+02 0.99249020747545E+05\nNWECHKEVO:PS 13 004 0.11505409155285E+02 0.99251186713122E+05\nNWECHKEVO:PS 13 005 0.11505330599600E+02 0.99243390274322E+05\nNWECHKEVO:PS 13 006 0.11505214514108E+02 0.99231870225232E+05\nNWECHKEVO:PS 13 007 0.11505186261496E+02 0.99229066705312E+05\nNWECHKEVO:PS 13 008 0.11505301938506E+02 0.99240545891039E+05\nNWECHKEVO:PS 13 009 0.11505222637195E+02 0.99232676297636E+05\nNWECHKEVO:PS 13 010 0.11505173119706E+02 0.99227762666337E+05","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"the second column is the GP number, the third the time step, the fourth is log(Ps) and the fith is fo Ps (Pa)","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"For the upper-air variables we do","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"grep 'NWECHKEVO:UA 13' NODE.008_08 ","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"NWECHKEVO:UA 13 000 001 0.14364086697030E-04 -0.50805426278410E-05 0.22599560795742E+03 0.00000000000000E+00 0.80040376159261E-07\nNWECHKEVO:UA 13 000 002 0.33741231171738E-05 0.97134353537265E-05 0.21290057631960E+03 0.00000000000000E+00 0.29001061928223E-06\nNWECHKEVO:UA 13 000 003 0.69003223306262E-05 -0.10070828988605E-04 0.21384249855800E+03 0.00000000000000E+00 0.47109613483406E-06\nNWECHKEVO:UA 13 000 004 -0.30497765593290E-06 0.82334865627784E-05 0.21374844679188E+03 0.00000000000000E+00 -0.26145222191521E-06\nNWECHKEVO:UA 13 000 005 0.26070697062412E-04 -0.12291493034515E-04 0.21545103834209E+03 0.00000000000000E+00 0.39088530137662E-05\nNWECHKEVO:UA 13 000 006 0.15371311363862E-04 0.52048057489868E-05 0.21791733622236E+03 0.00000000000000E+00 0.81362351433664E-05\nNWECHKEVO:UA 13 000 007 0.51032970986510E-04 -0.13111824709755E-04 0.22004808804352E+03 0.00000000000000E+00 0.15756687491581E-04\nNWECHKEVO:UA 13 000 008 -0.19648225032526E-04 -0.10918787276801E-04 0.22163556827039E+03 0.00000000000000E+00 0.64289394144994E-05\nNWECHKEVO:UA 13 000 009 -0.17176145579185E-05 0.45899398495276E-04 0.22275608584458E+03 0.00000000000000E+00 -0.10037800019117E-04\n....\nNWECHKEVO:UA 13 000 056 0.13986028555310E-02 -0.66909930879866E-03 0.28628543762220E+03 0.00000000000000E+00 0.54829685721891E-03\nNWECHKEVO:UA 13 000 057 0.14038615170298E-02 -0.75779689206785E-03 0.28663735655175E+03 0.00000000000000E+00 0.59806781225837E-03\nNWECHKEVO:UA 13 000 058 0.13261896477891E-02 -0.74616001898282E-03 0.28698938510509E+03 0.00000000000000E+00 0.49588753382099E-03\nNWECHKEVO:UA 13 000 059 0.14489950905375E-02 -0.74227210464472E-03 0.28734520941903E+03 0.00000000000000E+00 0.27710629383431E-03\nNWECHKEVO:UA 13 000 060 0.17744491137974E-02 -0.61965914425958E-03 0.28761932093752E+03 0.00000000000000E+00 0.40814002634823E-03\nNWECHKEVO:UA 13 000 061 0.19899360944093E-02 -0.54623343124852E-03 0.28790566719217E+03 0.00000000000000E+00 0.47915020836391E-03\nNWECHKEVO:UA 13 000 062 0.23402553668209E-02 -0.78436980306753E-03 0.28827960078154E+03 0.00000000000000E+00 0.37896218758797E-03\nNWECHKEVO:UA 13 000 063 0.20467568555637E-02 -0.11207508904165E-02 0.28854880261461E+03 0.00000000000000E+00 0.46291124700252E-03\nNWECHKEVO:UA 13 000 064 0.16656728750091E-02 -0.10495856132860E-02 0.28883596008959E+03 0.00000000000000E+00 0.25911350987737E-03\nNWECHKEVO:UA 13 000 065 0.14631444722784E-02 -0.94533311737612E-03 0.28903858779818E+03 0.00000000000000E+00 0.64914638106075E-05","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"which gives at timestep 0 ( third col.) the profile (model level is fourth column) for HVor(s-1), Hdiv (s-1), T (K), PD (Pa for NPDVAR=2) and VD (units depend on NVDVAR definition) ","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"Consecutive timesteps follow","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"NWECHKEVO:UA 13 001 001 0.14391950814966E-04 -0.10126750142077E-04 0.22601300427477E+03 0.26233794693211E-05 0.13011272170719E-05\nNWECHKEVO:UA 13 001 002 0.35975280461999E-05 0.30191811529161E-05 0.21293465156805E+03 -0.29989147375432E-04 0.10063954562282E-06\nNWECHKEVO:UA 13 001 003 0.79264193785264E-05 -0.15031046611816E-04 0.21385134119954E+03 -0.33856415073342E-04 0.42661347477312E-05\nNWECHKEVO:UA 13 001 004 0.21090675053822E-05 0.31713133370971E-05 0.21377935010403E+03 -0.40445121858208E-04 -0.54989449665528E-05\nNWECHKEVO:UA 13 001 005 0.30451493480920E-04 -0.18284403001908E-04 0.21545646796919E+03 -0.42130887042681E-04 0.14684047934687E-04\n....","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"up to timestep 180 (hard-coded,the first 3 hours if timestep 1 minute)","category":"page"},{"location":"DataAssimilation/NWECHKEVO/#Plotting","page":"NWECHKEVO","title":"Plotting","text":"","category":"section"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"The results are easily plotted with any graphs utility (e.g. gnuplot)","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#ecmwfatos","page":"Running on Atos","title":"Running Harmonie on Atos","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Before-you-start","page":"Running on Atos","title":"Before you start","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"New Harmonie users will require membership of the accord user group at ECMWF. Please contact the HIRLAM System Manager, Daniel Santos, to make this request on your behalf. Futhermore ECMWF will have to setup a virtual machine for you to run the ecFlow server on (see here). Finally, make sure that your login shell is set to /bin/bash.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"tip: Tip\nTo share your experiments with the members of the accord group do: chmod 755 $HOME $SCRATCH $PERM $HPCPERM\nchgrp -R accord $HOME/hm_home $SCRATCH/hm_home $PERM/HARMONIE $HPCPERM/hm_home\nchmod g+s $HOME/hm_home $SCRATCH/hm_home $PERM/HARMONIE $HPCPERM/hm_homeThe chmod g+s sets the SGID bit which will ensure that new experiments created in hm_home will automatically be in the accord group","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Configure-your-experiment-(option-1)","page":"Running on Atos","title":"Configure your experiment (option 1)","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Create an experiment directory under $HOME/hm_home and use the master script Harmonie to set up a minimum environment for your experiment. \nmkdir -p $HOME/hm_home/my_exp\ncd $HOME/hm_home/my_exp\nln -sf /path/to/git/repository/config-sh/Harmonie\n./Harmonie setup -r /path/to/git/repository -h ECMWF.atos\nwhere \n-r Specifies the path to the git repository. Make sure you have checkout-ed the correct branch. \n-h tells which configuration files to use. At ECMWF config.ECMWF.atos is the default one. For harmonie-43h2.2 use -h config.aa\ntip: Tip\nAn Atos tagged versions of Harmonie are available in ~hlam/harmonie_release/git/tags/ln -sf ~hlam/harmonie_release/git/tags//config-sh/Harmonie \nHarmonie setup -r ~hlam/harmonie_release/git/tags/ -h ECMWF.atos","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"This would give you the default setup which currently is AROME physics with CANARI+OI_MAIN surface assimilation and 3DVAR upper air assimilations with 3h cycling on a domain covering Denmark using 2.5km horizontal resolution and 65 levels in the vertical.\nNow you can edit the basic configuration file ecf/config_exp.h to configure your experiment scenarios. Modify specifications for domain, data locations, settings for dynamics, physics, coupling host model etc. Read more about the options here. You can also use some of the predefined configurations by calling Harmonie with the -c option:\n./Harmonie setup -r PATH_TO_HARMONIE -h YOURHOST -c CONFIG -d DOMAIN\nwhere CONFIG is one of the setups defined in scr/Harmonie_configurations.pm. If you give -c without an argument or a non existing configuration a list of configurations will be printed.\nIn some cases you might have to edit the general system configuration file config-sh/config.ECMWF.atos. See here for further information. \nThe rules for how to submit jobs on Atos are defined in config-sh/submit.ECMWF.atos. See here for further information\nIf you experiment in data assimilation you might also want to change settings in scr/include.ass.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Configure-your-experiment-using-github-repo-(option-2)","page":"Running on Atos","title":"Configure your experiment using github repo (option 2)","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Disadvantage of option 1 for version control in git is that code is located in two places. Instead you can : ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Make a fork of the Harmonie repository. From now we assume your fork will be located at https://github.com//Harmonie.\nLog in to ATOS as usual and perform the following commands:","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"mkdir -p $PERM/hm_home && cd $PERM/hm_home/\ngit clone -b git@github.com:/Harmonie.git \ncd \ngit checkout -b \nexport PERL5LIB=$(pwd)\nconfig-sh/Harmonie setup -r $(pwd) -h ECMWF.atos","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Where the git clone command clones a specific branch into a directory called . git checkout with the -b flag, then creates a new branch for you to work on. Call it something meaningful. Then the experiment is set up as usual, but using your local repository as reference to itself.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Then you do some work and when ready to commit something you do","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"git add \ngit commit --author \"Name \" -m \"Commit message\"\ngit push --set-upstream origin ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Specifying --set-upstream origin to git push is only necessary the first time you push your branch to the remote. When ready you can now go to GitHub and make a pull-request to the Harmonie repository from your fork.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Start-your-experiment","page":"Running on Atos","title":"Start your experiment","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Launch the experiment by giving start time, DTG, end time, DTGEND","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"./Harmonie start DTG=YYYYMMDDHH DTGEND=YYYYMMDDHH\n# e.g., ./Harmonie start DTG=2022122400 DTGEND=2022122406","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"If successful, Harmonie will identify your experiment name and start building your binaries and run your forecast. If not, you need to examine the ECFLOW log file $HM_DATA/ECF.log. $HM_DATA is defined in your Env_system file. At ECMWF $HM_DATA=$SCRATCH/hm_home/$EXP where $EXP is your experiment name. Read more about where things happen further down.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Continue-your-experiment","page":"Running on Atos","title":"Continue your experiment","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"If your experiment have successfully completed and you would like to continue for another period you should write","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"./Harmonie prod DTGEND=YYYYMMDDHH","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"By using prod you tell the system that you are continuing the experiment and using the first guess from the previous cycle. The start date is take from a file progress.log created in your $HOME/hm_home/my_exp directory. If you would have used start the initial data would have been interpolated from the boundaries, a cold start in other words.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Start/Restart-of-ecflow_ui","page":"Running on Atos","title":"Start/Restart of ecflow_ui","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"To start the graphical window for ECFLOW","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"./Harmonie mon","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"The graphical window runs independently of the experiment and can be closed and restarted again with the same command. With the graphical interface you can control and view logfiles of each task. ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Making-local-changes","page":"Running on Atos","title":"Making local changes","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Very soon you will find that you need to do changes in a script or in the source code. Once you have identified which file to edit you put it into the current $HOME/hm_home/my_exp directory, with exactly the same subdirectory structure as in the reference. e.g, if you want to modify a namelist setting ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"./Harmonie co nam/harmonie_namelists.pm # retrieve default namelist harmonie_namelists.pm\nvi nam/harmonie_namelists.pm # modify the namelist","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Next time you run your experiment the changed file will be used. You can also make changes in a running experiment. Make the change you wish and rerun the InitRun task from the viewer. The InitRun task copies all files from your local experiment directory to your working directory $HM_DATA. Once your InitRun task is complete your can rerun the task you are interested in. If you wish to recompile something you will also have to rerun the Build tasks.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Issues","page":"Running on Atos","title":"Issues","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Harmonie exp stop at ECMWF(Atos) due $PERM mounting problem https://github.com/Hirlam/Harmonie/issues/628","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Account","page":"Running on Atos","title":"Account","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"In order to change the billing account, open Env_submit and find the definition of scalar_job. Then add a line like","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"'ACCOUNT' => $submit_type.' --account=account_name' to the definition of the dictionary.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Directory-structure","page":"Running on Atos","title":"Directory structure","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#SCRATCH","page":"Running on Atos","title":"$SCRATCH","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"In $SCRATCH/hm_home/$EXP you will find ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Directory Content\nbin Binaries\nlib Source code synced from $HM_LIB and compiled code\nlib/src Object files and source code (if you build with makeup, set by MAKEUP_BUILD_DIR)\nlib/util Utilities such as makeup, gl_grib_api or oulan\nclimate Climate files\nYYYYMMDD_HH Working directory for the current cycle. If an experiment fails it is useful to check the IFS log file, NODE.001_01, in the working directory of the current cycle. The failed job will be in a directory called something like Failed_this_job.\narchive Archived files. A YYYY/MM/DD/HH structure for per cycle data. ICMSHHARM+NNNN and ICMSHHARM+NNNN.sfx are atmospheric and surfex forecast output files\nextract Verification input data. This is also stored on the permanent disk $HPCPERM/HARMONIE/archive/$EXP/parchive/archive/extract\nECF.log Log of job submission","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#ECFS","page":"Running on Atos","title":"ECFS","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Since $SCRATCH is cleaned regularly we need to store data permanently on ECFS, the EC file system, as well. There are two options for ECFS, ectmp and ec. The latter is a permanent storage and first one is cleaned after 90 days. Which one you use is defined by the`ECFSLOC variable. To view your data type e.g.\nels ectmp:/$USER/harmonie/my_exp\nThe level of archiving depends on ARSTRATEGY in ecf/config_exp.h. The default setting will give you one YYYY/MM/DD/HH structure per cycle data containing:\nSurface analysis, ICMSHANAL+0000[.sfx]\nAtmospheric analysis result MXMIN1999+0000\nBlending between surface/atmospheric analysis and cloud variable from the first guess LSMIXBCout\nICMSHHARM+NNNN and ICMSHHARM+NNNN.sfx are atmospheric and surfex forecast model state files\nPFHARM* files produced by the inline postprocessing\nICMSHSELE+NNNN.sfx are surfex files with selected output\nGRIB files for fullpos and surfex select files\nLogfiles in a tar file logfiles.tar\nObservation database and feedback information in odb_stuff.tar.\nExtracted files for obsmon in sqlite.tar\nClimate files are stored in the climate directory\nOne directory each for vfld and vobs data respectively for verification data","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#PERM","page":"Running on Atos","title":"$PERM","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Directory Content\nHARMONIE/$EXP ecflow log and job files\nhm_lib/$EXP/lib Scipts, config files, ecf and suite, source code (not compiled, set by $HM_LIB). Reference with experiment's changes on top","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#HPCPERM","page":"Running on Atos","title":"$HPCPERM","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"In $HPCPERM/hm_home/$EXP","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Directory Content\nparchive/archive/extract/ Verification input data.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#HOME-on-ecflow-gen-{user}-001","page":"Running on Atos","title":"$HOME on ecflow-gen-${user}-001","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Directory Content\necflow_server/ ecFlow checkpoint and log files","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Cleanup-of-old-experiments","page":"Running on Atos","title":"Cleanup of old experiments","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"danger: Danger\nThese commands may not work properly in all versions. Do not run the removal before you're sure it's OK","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Once you have complete your experiment you may wish to remove code, scripts and data from the disks. Harmonie provides some simple tools to do this. First check the content of the different disks by","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Harmonie CleanUp -ALL","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Once you have convinced yourself that this is OK you can proceed with the removal.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Harmonie CleanUp -ALL -go ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"If you would like to exclude the data stored on e.g ECFS ( at ECMWF ) or in more general terms stored under HM_EXP ( as defined in Env_system ) you run ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Harmonie CleanUp -d","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"to list the directories intended for cleaning. Again, convince yourself that this is OK and proceed with the cleaning by","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Harmonie CleanUp -d -go","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"You can always remove the data from ECFS directly by running e.g.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"erm -R ec:/YOUR_USER/harmonie/EXPERIMENT_NAME ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"or","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"erm -R ectmp:/YOUR_USER/harmonie/EXPERIMENT_NAME ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"For more information about cleaning with Harmonie read here\nFor more information about the ECFS commands read here","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Debugging-Harmonie-with-ARM-DDT","page":"Running on Atos","title":"Debugging Harmonie with ARM DDT","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Follow instructions here. Use Run DDT client on your Personal Computer or End User Device ","category":"page"},{"location":"EPS/SLAF/Get_pertdia.pl.pm/#Get_pertdia","page":"Get_pertdia","title":"Get_pertdia","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/#Run-MUSC-with-EMS","page":"MUSC EMS","title":"Run MUSC with EMS","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"These instructions require the use of dev-CY46h1.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"If you find any issues with any of the instructions or scripts, feel free to notify Emily Gleeson (emily.gleesonATmet.ie) and Eoin Whelan (eoin.whelanATmet.ie)","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"In this section a description of how to install and run MUSC using EMS is provided. This is based on compilation and execution in a Ubuntu 20.04 container (tested using Apptainer on the ECMWF Atos HPC) and use of the EMS system to execute MUSC and convert the output to NetCDF. EMS is primarily developed by Romain Roehrig (Météo France) https://github.com/romainroehrig/EMS.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/#Start-your-container","page":"MUSC EMS","title":"Start your container","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"Log in to hpc-login on the Atos\nLoad the Apptainer module and start the Ubuntu 20.04 container:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"module load apptainer\n/home/dui/musc_ubuntu.sif","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"Further details concerning Apptainer on the Atos are available here","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/#Compile-the-code","page":"MUSC EMS","title":"Compile the code","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"I (Eoin) have not had time to sort out compilation using CMake but the following instructions provide a minimalist approach to compile the code using makeup. These instructions depend on you having a clone or copy of Harmonie (dev-46h1) in your $PERM directory on Atos where GHUSER is your Github username.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"cd $PERM\nGHUSER=your_github_username\ngit clone git@github.com:$GHUSER/Harmonie.git harmonie_git/$GHUSER/dev-CY46h1 -b dev-CY46h1\n\n(Ensure that your fork is up-to-date before doing this or else take the code from the main\nCY46 repo. Note that to run GABLS1 you need to use two updated surfex subroutines `tszo.F90`\nand `read_pgd_tsz0_parn.F90`. You can copy these files from util/musc/patches as follows:\n\ncp $PERM/harmonie_git/$GHUSER/dev-CY46h1/util/musc/patches/tsz0.F90 $PERM/harmonie_git/$GHUSER/dev-CY46h1/src/surfex/SURFEX/tsz0.F90\ncp $PERM/harmonie_git/$GHUSER/dev-CY46h1/util/musc/patches/read_pgd_tsz0_parn.F90 $PERM/harmonie_git/$GHUSER/dev-CY46h1/src/surfex/SURFEX/read_pgd_tsz0_parn.F90\nor by using git apply gabls.patch).\n","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"Compile the code in your code checkout/copy:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"cd $PERM/harmonie_git/$GHUSER/dev-CY46h1\nEXP=$(basename $(git symbolic-ref --short HEAD))\n. config-sh/config.ubuntu20_nompi\nutil/makeup/build -n 4 config.${HARMONIE_CONFIG}\nmkdir ${EXP}\nmv makeup.${HARMONIE_CONFIG} bin configlog.1 makelog.1 ${EXP}/","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/#Install-EMS","page":"MUSC EMS","title":"Install EMS","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"The following instructions provide details on how to download a HIRLAM version of EMS and install locally in your own account:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"cd $HOME\ngit clone git@github.com:ewhelan/EMS.git -b hirlam EMS_git/EMS\ncd EMS_git/EMS/\nmkdir build\ncd build/\nexport EMS_DIR=$HOME/metapp/ems\ncmake .. -DCMAKE_INSTALL_PREFIX=$EMS_DIR && make && ctest && make install\nexport PATH=${EMS_DIR}/bin:$PATH\nexport PYTHONPATH=${EMS_DIR}","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"note: Note\nFor GABLS1 in prep_nam_sfx.py_orig the following lines need to be commented out:nam[nn]['XUNIF_CLAY'] = ['1.']\nnam[nn]['XUNIF_SAND'] = ['0.']\nnam[nn]['XUNIF_RUNOFFB'] = ['0.5']\n\nnam[nn]['XHUG_SURF'] = ['-10.']\nnam[nn]['XHUG_ROOT'] = ['-10.']\nnam[nn]['XHUG_DEEP'] = ['-10.']\n","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/#Run-EMS","page":"MUSC EMS","title":"Run EMS","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"Here are some instructions on how to use EMS to execute idealised cases","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"export EMS_DIR=$HOME/metapp/ems\nexport PATH=${EMS_DIR}/bin:$PATH\nexport PYTHONPATH=${EMS_DIR}\nmkdir $HOME/ems_exec\ncd $HOME/ems_exec\ncp ${EMS_DIR}/share/config/config_46h1_HARMAROME_DEV.py .\n### edit his file to point to your binaries\nems_list_cases.py\nexport PYTHONPATH=$(pwd):$PYTHONPATH\n# MUSC.py -config config_46h1_HARMAROME_DEV.py -case $CASE -subcase $SUBCASE\nMUSC.py -config config_46h1_HARMAROME_DEV.py -case ARMCU -subcase REF","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"Output and log file can be found in $HOME/ems_exec/simulations/46t1/46h1_HARMAROME_DEV/${CASE}/${SUBCASE}","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/#EMS-Cases","page":"MUSC EMS","title":"EMS Cases","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"The table below lists the cases available in EMS and results of early tests.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"Case Status Reference(s)\nGABLS1 REF ❌[1] \nGABLS1 MESONH ❌[1] \nGABLS4 STAGE3 ❌[1] \nGABLS4 STAGE3-SHORT ❌[1] \nAYOTTE 00SC ✔️ \nAYOTTE 00WC ✔️ \nAYOTTE 03SC ✔️ \nAYOTTE 05SC ✔️ \nAYOTTE 05WC ✔️ \nAYOTTE 24SC ✔️ \nIHOP REF ✔️ \nSCMS REF ✔️ \nRICO SHORT ✔️ \nRICO MESONH ✔️ \nARMCU REF ✔️ \nARMCU MESONH ✔️ \nARMCU E3SM ✔️ \nBOMEX REF ❌[2] \nMPACE REF ✔️ \nFIRE REF ✔️ \nSANDU REF ✔️ \nSANDU FAST ✔️ \nSANDU SLOW ✔️ \nAMMA REF ✔️ \nDYNAMO NSA3A ✔️ Takes a long time!\nDYNAMO NSA3A_D1 ✔️ \nDYNAMO NSA3A_D30 ❌[3] \nDYNAMO NSA3A_MJO1 ✔️ ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"[1]: Issue with SURFEX namelist","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"[2]: Python issue L241 $EMS_DIR/ems/prep_init_forc_atm_GMAP.py","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"[3]: Missing data_input.nc ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/#Analysing-results-using-Atlas","page":"MUSC EMS","title":"Analysing results using Atlas","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"Still in the Apptainer container","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"cd $HOME\ngit clone git@github.com:ewhelan/SCM-atlas.git -b hirlam SCM-atlas_git/ewhelan/hirlam\nexport PATH=$HOME/SCM-atlas_git/ewhelan/hirlam/apptools:$PATH\nexport PYTHONPATH=$HOME/SCM-atlas_git/ewhelan/hirlam:$PYTHONPATH\nexport ATLAS_CONFIG=\"\"\nmkdir -p $HOME/Atlas\ncd $HOME/Atlas\nmkdir config\ncp $HOME/SCM-atlas_git/ewhelan/hirlam/examples/config/config_HARM.py config/\n### edit config/config_HARM.py\nrun_atlas1d.py -config config/config_HARM.py","category":"page"},{"location":"Build/Build_with_makeup/#makeup","page":"Makeup","title":"Building with MAKEUP","text":"","category":"section"},{"location":"Build/Build_with_makeup/#Background","page":"Makeup","title":"Background","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Makeup is an alternative mechanism to build the HARMONIE system Instead of using GMKPACK to build the libraries and binaries, standard GNU make (gmake) procedures are used, making build of executables an easier task. Also parallel make comes for free, thus enhanced turn-around time for build process. Furthermore, rebuilds and change of compiler flags – either per project and/or per source files basis – are now trivial to do.","category":"page"},{"location":"Build/Build_with_makeup/#MAKEUP-very-quickly","page":"Makeup","title":"MAKEUP very quickly","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The process of using the MAKEUP system in stand-alone fashion is described next.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Lets define two helper variables for the presentation purposes:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The variable $HARMONIE_SRC refers to the directory, where the AROME source code is situated. Another variable $HARMONIE_MAKEUP refers to the directory, where build configuration files and MAKEUP's scripts are located. ","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"#!sh\n# In ksh/bash\nexport HARMONIE_SRC=/some/path/harmonie/src\nexport HARMONIE_MAKEUP=/some/path/harmonie/util/makeup\n# In csh/tcsh\nsetenv HARMONIE_SRC /some/path/harmonie/src\nsetenv HARMONIE_MAKEUP /some/path/harmonie/util/makeup","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Usually $HARMONIE_MAKEUP is $HARMONIE_SRC/../util/makeup , but it doesn't have to be (e.g. in FMI's production system the $HARMONIE_MAKEUP is situated on a separate disk than the source code $HARMONIE_SRC) – and MAKEUP can handle this now.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The process of building HARMONIE executable contains just a few steps:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Goto directory $HARMONIE_MAKEUP and create/edit your configuration file (config.*). Beware of preferred naming convention:\nconfig.....\nRun MAKEUP's configure script under $HARMONIE_SRC (for example):\ncd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\nIf applicable, adjust environment settings before launching of make. e.g., on some platforms, one needs to remember loading adequate modules, such as for DMI Cray XT5,\nmodule swap PrgEnv-pgi PrgEnv-pathscale # if pathscale is to be used\nmodule swap xt-mpt xt-mpt/3.5.0\nmodule swap xt-asyncpe/3.8 xt-asyncpe/3.4\nGoto $HARMONIE_SRC directory and type make (or gmake, if make is non-GNU make). Redirect output to a file & terminal:\ncd $HARMONIE_SRC\ngmake 2>&1 | tee logfile # ksh/bash\ngmake |& tee logfile # csh/tcsh","category":"page"},{"location":"Build/Build_with_makeup/#Using-MAKEUP-to-build-auxlibs-(bufr,-gribex,-rgb)","page":"Makeup","title":"Using MAKEUP to build auxlibs (bufr, gribex, rgb)","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"You can now build EMOS- and related libraries by using the MAKEUP. All you need to know is what is your sources. that you would use to build this stuff anyway. Pass that generic name to the MAKEUP's configure through -E option and you're in business. An example for FMI's Cray:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure -E sources.crayxt $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\ngmake","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"This will create extra libs (so called MY_SYSLIBS) libbufr.a, libgribex.a and librgb.a and they will end up being linked into your executables, like MASTERODB.","category":"page"},{"location":"Build/Build_with_makeup/#Using-MAKEUP-to-build-also-util/gl-tools","page":"Makeup","title":"Using MAKEUP to build also util/gl -tools","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"HARMONIE utility package GL as located in util/gl directory can also be built as part of MAKEUP process, if option -G is also given to the configure:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure -G $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\ngmake","category":"page"},{"location":"Build/Build_with_makeup/#Using-MAKEUP-to-build-also-Oulan-and/or-Monitor-tools","page":"Makeup","title":"Using MAKEUP to build also Oulan and/or Monitor -tools","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"HARMONIE utility package MONITOR and obs-preprocessor OULAN can also be build with MAKEUP. If you add option -B , then you will get Oulan and Monitor executables built, too. Or you can be more selective and oopt only for oulan with -b oulan, or just monitor -b monitor :","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n# Request for building both oulan & monitor, too\n$HARMONIE_MAKEUP/configure -B $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\n# .. or add oulan only :\n$HARMONIE_MAKEUP/configure -b oulan $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\n# .. or add monitor only :\n$HARMONIE_MAKEUP/configure -b monitor $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\ngmake","category":"page"},{"location":"Build/Build_with_makeup/#Building-objects-away-from-HARMONIE_SRC-directory","page":"Makeup","title":"Building objects away from $HARMONIE_SRC-directory","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"If you do not want to pollute your source directories with objects and thus making it hard to recognize which files are under version handling system SVN and which ain't (... although SVN command svn -q st would tell ...), then use -P option. This will redirect compilations away from source code, under $HARMONIE_SRC/../makeup.ZZZ, where ZZZ is the suffix of your config-file, e.g. FMI.cray_xt5m.pathscale.mpi+openmp.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The operation sequence is as follows:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure [options] -P $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\ncd $HARMONIE_SRC/../makeup.FMI.cray_xt5m.pathscale.mpi+openmp/src\ngmake","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The drawback with this approach is that whenever there is an update in the master source directories, you need to run lengthy configure in order to rsync the working directory up to date. We may need to introduce a separate command for this to avoid full rerun of configure.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"You can also use lowercase -p option with argument pointing to a directory-root, where to compile:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure [options] -p /working/path $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\ncd /working/path/src\ngmake","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Now, it is important to understand that this /working/path has no connection to version handling i.e. if you change something in your master copy (say : issue a svn up-command), then your working directory remains unaltered. To synchronize it, do the following:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd /working/path/src\ngmake rsync","category":"page"},{"location":"Build/Build_with_makeup/#More-details","page":"Makeup","title":"More details","text":"","category":"section"},{"location":"Build/Build_with_makeup/#Re-running-configure","page":"Makeup","title":"Re-running configure","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Afterwards you can rerun configure as many times as you wish. Please note that the very first time is always slowed (maybe 10 minutes) as interface blocks for arp/ and ald/ projects are generated.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Usually running configure many times is not necessary – not even when you have changed your config-file (!) – except when interface blocks needs to be updated/re-created (-c or -g options). For example, when subroutine/function call argument list has changed. Then the whole config+build sequence can be run under $HARMONIE_SRC as follows:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n# -c option: Check if *some* interface blocks need regeneration and regenerate\n$HARMONIE_MAKEUP/configure -c $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\n# -g option: Force to regenerate interface blocks \n# $HARMONIE_MAKEUP/configure -g $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\ngmake","category":"page"},{"location":"Build/Build_with_makeup/#Changing-the-number-of-tasks-for-compilation","page":"Makeup","title":"Changing the number of tasks for compilation","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The number of tasks used for gmake-compilations is set by default to 8. See NPES parameter in $HARMONIE_MAKEUP/defaults.mk To change the default, you can have two choices:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Add NPES to your config-file, for example set it to 2:\nNPES=2\nInvoke gmake with NPES parameter, e.g. set it to 10:\ngmake NPES=10","category":"page"},{"location":"Build/Build_with_makeup/#Inserting-DRHOOK-for-Meso-projects","page":"Makeup","title":"Inserting DRHOOK for Meso-projects","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"To insert DrHook profiling automatically for mpa/ and mse/ projects, reconfigure with -H option:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure -H $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"You can also pick and choose either mpa/ or mse/ projects with -h option (can be supplied several times):","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure -h mpa $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\n$HARMONIE_MAKEUP/configure -h mse $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\n# The following are the same as if the option -H was used\n$HARMONIE_MAKEUP/configure -h mpa -h mse -h surfex $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\n$HARMONIE_MAKEUP/configure -h mpa:mse:surfex $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"In the future it may not be necessary to insert DrHook automagically, if the insertion has been done in the svn (version handling) level.","category":"page"},{"location":"Build/Build_with_makeup/#Speeding-up-compilations-by-use-of-RAM-disk","page":"Makeup","title":"Speeding up compilations by use of RAM-disk","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"To further speedup compilation and if you have several GBytes of Linux RAM-disk (/dev/shm) available, do the following:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Create your personal RAM-disk subdirectory and check available disk space\nmkdir /dev/shm/$USER\ndf -kh /dev/shm/$USER\nReconfigure with RAM-disk either by defining LIBDISK in your config-file or running\ncd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure -L /dev/shm/$USER $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\nAlso define TMPDIR to point to /dev/shm/$USER to allow compiler specific temporary files on RAM-disk\n# In ksh/bash-shells:\nexport TMPDIR=/dev/shm/$USER\ngmake 2>&1 | tee logfile\n# In csh/tcsh-shells:\nsetenv TMPDIR /dev/shm/$USER\ngmake |& tee logfile","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Please note that the step-2 creates all libraries AND executablus under the directory pointed by the -L argument. Object files and modules still, however, are placed under corresponding source directories.","category":"page"},{"location":"Build/Build_with_makeup/#What-if-you-run-out-of-RAM-disk-space-?","page":"Makeup","title":"What if you run out of RAM-disk space ?","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Sometimes you may find that the disk space becomes limited in /dev/shm/$USER. Then you have an option to supply LIBDISK parameter directly to gmake-command without need to reconfigure:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"gmake LIBDISK=`pwd`","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"This usually increases the throughput time as creation of the AROME executable to disk rather than RAM-disk may be 5-10 times slower. But at least you won't run out of disk space.","category":"page"},{"location":"Build/Build_with_makeup/#How-is-ODB-related-stuff-handled-?","page":"Makeup","title":"How is ODB related stuff handled ?","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The Observational DataBase (ODB) is a complicated beast for good reasons. Unlike any other project, which produce just one library per project, correct use of ODB in variational data assimilation requires several libraries.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The trick to manage this with MAKEUP is to create a bunch of symbolic links pointing to $HARMONIE_SRC/odb/ -project directory. There will be one (additional) library for each link. And then we choose carefully the correct subdirectories and source codes therein to be compiled for each library.","category":"page"},{"location":"Build/Build_with_makeup/#Specific-ODB-libraries,-their-meaning-and-the-source-files-included","page":"Makeup","title":"Specific ODB-libraries, their meaning & the source files included","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Library Description Source files\nlibodb ODB core library lib/ & aux/ : [a-z]*.F90 [a-z]*.c\n module/ & pandor/module : *.F90\nlibodbport Interface between IFS (ARPEGE/ALADIN/AROME) & ODB cma2odb/ & bufr2odb/ : *.F90\n – also contains BUFR2ODB routines pandor/extrtovs & pandor/fcq & pandor/mandalay : *.F90\nlibodbdummy ODB-related dummies lib/ : [A-Z]*.F90 [A-Z]*.c\nlibodbmain ODB tools, main programs (C & Fortran) tools/ : [A-Z]*.F90 *.c *.F\nlibPREODB ERA40 database (not needed, but good for debugging) ddl.PREODB/*.sql , ddl.PREODB/*.ddl\nlibCCMA Compressed Central Memory Array database (minimization) ddl.CCMA/*.sql , ddl.CCMA/*.ddl\nlibECMA Extended Central Memory Array database (obs. screening) ddl.ECMA/*.sql , ddl.ECMA/*.ddl\nlibECMASCR Carbon copy of ECMA for obs. load balancing between PEs ddl.ECMASCR/*.sql , ddl.ECMASCR/*.ddl","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"From the file $HARMONIE_MAKEUP/configure you can also find how different files are nearly hand-picked for particular libraries. Search for block","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":" if [[ \"$d\" = @(odb|odbport|odbdummy|odbmain)]] ; then\n case \"$d\" in\n odb) case \"$i\" in\n lib|aux) files=$(\\ls -C1 [a-z]*.F90 [a-z]*.c 2>/dev/null) ;;\n module|pandor/module) files=$(\\ls -C1 *.F90 2>/dev/null) ;;\n esac ;;\n odbport) case \"$i\" in\n cma2odb|bufr2odb) files=$(\\ls -C1 *.F90 2>/dev/null) ;;\n pandor/extrtovs|pandor/fcq|pandor/mandalay) files=$(\\ls -C1 *.F90 2>/dev/null) ;;\n esac ;;\n odbdummy) [[ \"$i\" != \"lib\"]] || files=$(\\ls -C1 [A-Z]*.F90 [A-Z]*.c 2>/dev/null) ;;\n odbmain) [[ \"$i\" != \"tools\"]] || files=$(\\ls -C1 [A-Z]*.F90 *.c *.F 2>/dev/null) ;;\n esac\n elif [[ \"$d\" = @($case_odbs)]] ; then\n [[ \"$i\" != \"ddl.$d\"]] || {\n files=$(\\ls -C1 *.ddl *.sql 2>/dev/null)\n mkdepend=$CMDROOT/sfmakedepend_ODB\n }\n else\n ... ","category":"page"},{"location":"Build/Build_with_makeup/#Handling-SQL-query-and-data-layout-files","page":"Makeup","title":"Handling SQL-query and data layout files","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"For SQL-query compilations (ODB/SQL queries are translated into C-code for greater performance), odb98.x SQL-compiler executable is also built as a first thing in the MAKEUP process.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Queries and data definition layouts (DDL-files) are always under /ddl./ directory.","category":"page"},{"location":"Build/Build_with_makeup/#Miscellaneous-stuff","page":"Makeup","title":"Miscellaneous stuff","text":"","category":"section"},{"location":"Build/Build_with_makeup/#Selective-compilation","page":"Makeup","title":"Selective compilation","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"It is very easy to deviate from the generic compilation options for certain source files or even projects. If you want to change compiler option (say) from -O3 to -O2 for routine src/arp/pp_obs/pppmer.F90, you can add the following lines at the end of your config-file:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"pppmer.o: FCFLAGS := $(subst -O3,-O2,$(FCFLAGS))","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"If you want to apply this to all pppmer*.F90-routines, then you need to enter the following \"wildcard\"-sequence:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"pppme%.o: FCFLAGS := $(subst -O3,-O2,$(FCFLAGS))","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Note by the way that for some reason we need to use pppme%.o as the more natural (from Unix) pppmer%.o would choose only routines pppmertl.F90 and pppmerad.F90, not the routine pppmer.F90 at all!","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Applying different compiler flags for project (say) arp only, then one can put the following at the end of config-file:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"ifeq ($(PROJ),arp)\n%.o: FCFLAGS := $(subst -O3,-O2,$(FCFLAGS))\nendif","category":"page"},{"location":"Build/Build_with_makeup/#(Re-)building-just-one-project","page":"Makeup","title":"(Re-)building just one project","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Sometime you could opt for rebuilding only (say) the xrd-project i.e. libxrd.a. This can be done as follows:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"gmake PROJ=xrd","category":"page"},{"location":"Build/Build_with_makeup/#Cleaning-up-files","page":"Makeup","title":"Cleaning up files","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"You can clean up by","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"gmake clean","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"... or selectively just the project arp:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"gmake PROJ=arp clean","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"This clean does not wipe out makefiles i.e. you don't have to rerun configure after this.","category":"page"},{"location":"Build/Build_with_makeup/#Restoring-and-cleaning-up-the-state-of-HARMONIE_SRC","page":"Makeup","title":"Restoring and cleaning up the state of $HARMONIE_SRC","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The following command you can run only once before issuing another configure command. It will remove all related object and executable files as well as generated makefiles, logfiles etc. stuff which was generated by MAKEUP's configure :","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\ngmake veryclean\n\n# .. or alternatively :\n$HARMONIE_MAKEUP/unconfigure","category":"page"},{"location":"Build/Build_with_makeup/#Ignoring-errors","page":"Makeup","title":"Ignoring errors","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Sometimes it is useful to enforce compilations even if one or more routines fail to compile. In such cases recommended syntax is:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"gmake -i\n\n# or not to mess up the output, use just one process for compilations\n\ngmake NPES=1 -i","category":"page"},{"location":"Build/Build_with_makeup/#Creating-precompiled-installation","page":"Makeup","title":"Creating precompiled installation","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"If you want to provide precompiled libraries, objects, source code to other users so that they do not have to start compilation from scratch, then make a distribution or precompiled installation as follows:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"gmake PRECOMPILED=/a/precompiled/rootdir precompiled","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"After this the stuff you just compiled ends up in directory /a/precompiled/rootdir with two subdirectories : src/ and util/. All executables are currently removed.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"You can repeat this call, and it will just rsync the modified bits.","category":"page"},{"location":"Build/Build_with_makeup/#Update/check-your-interface-blocks-outside-configure","page":"Makeup","title":"Update/check your interface blocks outside configure","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The configure has options -c or -g to check up or enforce for (re-)creation of interface blocks of projects arp and ald. To avoid full and lengthy configure-run, you can just do the following:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"gmake intfb","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/#Monitoring-Harmonie-suites-with-Teleport","page":"Teleport","title":"Monitoring Harmonie suites with Teleport","text":"","category":"section"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"In order to monitor the progress of your Harmonie suite(s) at ECMWF the ecFlow GUI ecflow_ui can be used directly from your local PC/server. This relies on teleport and ssh port forwarding which is described in more detail below. ","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/#Open-Teleport-connection","page":"Teleport","title":"Open Teleport connection","text":"","category":"section"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"This relies on a Teleport connection to ECMWF. Further details on Teleport are available here:","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"[itops@reaserve ~]$ tsh status\n> Profile URL: https://jump.ecmwf.int:443\n Logged in as: itops@met.ie\n Cluster: jump.ecmwf.int\n Roles: *\n Logins: duit\n Valid until: 2021-03-23 22:00:35 +0000 UTC [valid for 11h21m0s]\n Extensions: permit-X11-forwarding, permit-agent-forwarding, permit-port-forwarding, permit-pty\n\n\n* RBAC is only available in Teleport Enterprise\n https://gravitational.com/teleport/docs/enterprise\n[itops@reaserve ~]$ ","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"In order to open a new Teleport connection execute the following and submit credential via browser:","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"[ewhelan@reaserve ~]$ tsh login --proxy=jump.ecmwf.int:433","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/#Log-in","page":"Teleport","title":"Log in","text":"","category":"section"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"To log in to ECMWF's Atos:","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"[itops@reaserve ~]$ ssh -X hpc-login","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"The Teleport connection to ECMWF is configured as follows:","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"[ewhelan@reaserve ~]$ cat .ssh/config \nHost hpc-login\n User dui\n IdentityFile ~/.tsh/keys/jump.ecmwf.int/eoin.whelan@met.ie\n ProxyCommand bash -c \"tsh login; ssh -W %h:%p %r@jump.ecmwf.int\"\n[ewhelan@reaserve ~]$ ","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/#Open-ecFlow-ports","page":"Teleport","title":"Open ecFlow ports","text":"","category":"section"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"The following opens ports to ECMWF (dui, ECF_PORT=3141) ecFlow server. Based on instructions provided by [https://confluence.ecmwf.int/display/ECFLOW/Teleport+-+using+local+ecflow_ui]. In a new terminal:","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"ssh hpc-login -C -N -L 3141:ecflow-gen-dui-001:3141","category":"page"},{"location":"DataAssimilation/ObservationOperators/#Observation-operators","page":"HOP_DRIVER","title":"Observation operators","text":"","category":"section"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"This documentation summarises the observation operator in HARMONIE and the use of the HOP_DRIVER tool. The test harness, HOP_DRIVER, calls the observation operator and generates FG departures without calling any model code or initialising any model modules. Firstly, the IFS is used to dump a single-observation gom_plus to file from the 1st trajectory of an experiment. Dumping multiple observations would require a more complex and full-featured dump (good file format, multi-process parallel). For code refactoring HOP_DRIVER can be used to test changes to the observation operator of a particular observation type.","category":"page"},{"location":"DataAssimilation/ObservationOperators/#HARMONIE-and-HOP_DRIVER","page":"HOP_DRIVER","title":"HARMONIE and HOP_DRIVER","text":"","category":"section"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"The HOP_DRIVER program was first added to CY42R2 code. The tool was initially implemented to test refactoring of the IFS observation operator code src/arpifs/op_obs/hop.F90. Instructions on how to prepare the code and run HOP_DRIVER using HARMONIE are outlined below. Presentation made at [wiki:HirlamMeetings/ModelMeetings/ObOpWorkshop OOPS Observation Operator Workshop] may provide some useful background information.","category":"page"},{"location":"DataAssimilation/ObservationOperators/#Comments-on-the-branch","page":"HOP_DRIVER","title":"Comments on the branch","text":"","category":"section"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"Code changes were required in order to compile cy42r2bf.04 + mods (provided by MF/ECMWF) in the HARMONIE system: [14312], [14325], [14326], [14330], [14331], [14332], [14333], [14334].\nChanges were made to makeup in order to compile HOP_DRIVER correctly: [14310], [14327], [14328], [14329], [14335], [14362], [14382], [14392].\nIncluded in [14362] is a change to ODBSQLFLAGS which is set to ODBSQLFLAGS=-O3 -C -UCANARI -DECMWF $(ODBEXTRAFLAGS) in order to use ECMWF flavoured ODB used by HOP_DRIVER\nOn cca GNU compilers 4.9 are not fully supported, ie I had to build GRIB-API and NetCDF locally using gcc/gfortran 4.9 on cca\nAn environment variable, HOPDIR, is used to define the location of necessary input data for HOP_DRIVER\nAn environment variable, HOPCOMPILER, is used by the HOP_driver script to define the compiler used. This is used to compare results.","category":"page"},{"location":"DataAssimilation/ObservationOperators/#HOPOBS:-amsua","page":"HOP_DRIVER","title":"HOPOBS: amsua","text":"","category":"section"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"Currently there is only one observation type, AMSU-A (HOPOBS=amsua), available for testing with HOP_DRIVER. Alan Geer (ECMWF) has already carried out the refactoring of the HOP code related to AMSU-A observations. A single observation is provided in the ECMA and is used to test the refactoring of the HOP code. To carry out the testing of the amsua refactoring HOPOBS should be set to amsua in ecf/config_exp.h.","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"reportype@hdr obstype@hdr sensor@hdr statid@hdr stalt@hdr date@hdr time@hdr degrees(lat) degrees(lon) report_status@hdr datum_status@body obsvalue@body varno@body vertco_type@body\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 12 173.28 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 12 158.86 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 3 227.40 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 3 260.82 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 256.90 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 239.60 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 12 NULL 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 3 217.69 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 209.39 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 214.05 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 223.02 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 234.42 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 245.14 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 257.18 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 12 227.91 119 3","category":"page"},{"location":"DataAssimilation/ObservationOperators/#HOP_DRIVER","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"","category":"section"},{"location":"DataAssimilation/ObservationOperators/#Using-HOP_DRIVER","page":"HOP_DRIVER","title":"Using HOP_DRIVER","text":"","category":"section"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"With LHOP_RESULTS=.TRUE. HOP_DRIVER will write results to a file called hop_results${MYPROC} for comparison between online and offline results. (The results file is opened by src/arpifs/var/taskob.F90. HOP_DRIVER results are written to hop_results${MYPROC} in src/arpifs/op_obs/hop.F90:","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":" :\n :\nIF(LHOP_RESULTS) THEN\n!$OMP CRITICAL\n ! Output for comparison between online and offline results:\n WRITE(CFILENAME,'(\"hop_results\",I4.4)') MYPROC\n OPEN(NEWUNIT=IU,FILE=CFILENAME,POSITION='APPEND',ACTION='WRITE',FORM='FORMATTED')\n DO JOBS = 1,KDLEN\n DO JBODY=1,IMXBDY\n IF (JBODY>ICMBDY(JOBS)) CYCLE\n IBODY = ROBODY%MLNKH2B(JOBS)+(JBODY-1)\n WRITE(IU,'(6I8,2F30.14)') MYPROC, KSET, JOBS, NINT(ROBHDR%DATA(JOBS,ROBHDR%SEQNO_AT_HDR)),&\n & NINT(ROBODY%DATA(IBODY,ROBODY%VERTCO_REFERENCE_1_AT_BODY)), &\n & NINT(ROBODY%DATA(IBODY,ROBODY%VARNO_AT_BODY)), ZHOFX(JOBS,JBODY), ZXPPB(JOBS,JBODY)\n\n ENDDO\n ENDDO\n CLOSE(IU)\n!$OMP END CRITICAL\nENDIF\n :\n :","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"The HOPdriver script (based a script provided by MF) sorts the contents of the `hopresults0001` file for comparison with some results made available by ECMWF/MF:","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":" :\n :\n#\n# Check HOP_DRIVER results (available for gfotran and intel)\n#\nln -s $HOPDIR/${HOPOBS}/results.$HOPCOMPILER .\ncat hop_results* | sort -k1,1n -k2,2n -k3,3n -k5,5n -k6,6n > results.driver\necho\ncmp -s results.$HOPCOMPILER results.driver\nif [ $? -eq 0] ; then\n echo \"RESULTS ARE STRICTLY IDENTICAL TO THE REFERENCE FOR HOPCOMPILER=$HOPCOMPILER :-)\"\nelse\n echo Compare exactly against the results dumped from hop:\n echo \"xxdiff results.$HOPCOMPILER results.driver &\"\n diff results.$HOPCOMPILER results.driver\n exit 1\nfi\n :\n :","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"On cca you will find useful output from HOP_DRIVER in cca:$TEMP/hm_home/rfexp/archive/HOPDRIVEROUT:","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"fort.4\nNODE.001_01\nhop_results0001\nresults.gfortran\nresults.driver","category":"page"},{"location":"DataAssimilation/ObservationOperators/#The-code","page":"HOP_DRIVER","title":"The code","text":"","category":"section"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"HOP_DRIVER is a short program written by Deborah Salmond (ECMWF) to test code changes made to the observation operator. The program src/arpifs/programs/hop_driver.F90 is summarised here.","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"The program sets up the model geometry and observations:","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":" :\n :\nCALL GEOMETRY_SET(YRGEOMETRY)\nCALL MODEL_SET(YRMODEL)\n\nCALL IFS_INIT('gc7a')\n\nCALL SUINTDYN\n\nCALL SUGEOMETRY(YRGEOMETRY) !From GEOMETRY_SETUP\n\nCALL SURIP(YRGEOMETRY%YRDIM) !From MODEL_CREATE\n\n! Set up Observations, Sets\nCALL SUDIMO(YRGEOMETRY,NULOUT) !From SU0YOMB\nCALL SUOAF !From SU0YOMB\nCALL SUALOBS !From SU0YOMB\nCALL SURINC !From SU0YOMB\nCALL SETUP_TESTVAR !From SU0YOMB\nCALL SUOBS(YRGEOMETRY) !From CNT1\nCALL ECSET(-1,NOBTOT,0) !From OBSV\nCALL SUPHEC(YRGEOMETRY,NULOUT)\n\n! Setup varbc (from cnt1.F90) and read VARBC.cycle\nCALL YVARBC%SETUP_TRAJ\n :\n :","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"HOP_DRIVER then loops over the number of observation sets (NSETOT) and reads a GOM PLUS for each observation set. HRETR and HOP are then called:","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":" :\n :\nDO ISET=1,NSETOT\n IDLEN = MLNSET(ISET)\n IMXBDY = MAX(MMXBDY(ISET),1)\n\n ALLOCATE(ZHOFX(IDLEN,IMXBDY))\n ZHOFX=RMDI\n\n ! READ GOM_PLUS FROM DUMP\n CALL GOM_PLUS_READ_DUMP(YGP5,ISET)\n\n IF(IDLEN /= YGP5%NDLEN) THEN\n CALL ABOR1('Sets are incompatible')\n ENDIF\n\n :\n :\n :\n\n CALL HRETR(YRGEOMETRY%YRDIMV,IDLEN,IMXBDY,ISET,1,YGP5,YVARBC)\n\n CALL HOP(YRGEOMETRY%YRDIMV,YGP5,YVARBC,IDLEN,IMXBDY,ISET,1,LDOOPS=.TRUE.,PHOFX=ZHOFX)\n\n !write(0,*)'ZHOFX',ZHOFX\n DEALLOCATE(ZHOFX)\n\n CALL GOM_PLUS_DESTROY(YGP5)\n\nENDDO\n\n :\n :","category":"page"},{"location":"EPS/Setup/#eps-setup","page":"Setup","title":"Setup","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/#boundary-file-preparation","page":"Preparation","title":"Preparation of initial and boundary files","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/#Introduction","page":"Preparation","title":"Introduction","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"HARMONIE can be coupled with external models as IFS, ARPEGE, HIRLAM. Internally it is possible to nest the different ALADIN/ALARO/AROME with some restrictions. In the following we describe the host initial and boundary files are generated depending on different configurations. Boundary file preparation basically includes two parts: forecast file fetching and boundary file generation.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"The ECFLOW tasks for initial and boundary preparation","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#Boundary-strategies","page":"Preparation","title":"Boundary strategies","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"There are a number of ways to chose which forecast lengths you use as boundaries. The strategy is determined by BDSTRATEGY in ecf/config_exp.h and there are a number of strategies implemented.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"available : Search for available files in BDDIR adn try to keep forecast consistency. This is ment to be used operationally since it will at least keep your run going, but with old boundaries, if no new boundaries are available.\nsimulate_operational : Mimic the behaviour of the operational runs using ECMWF 6h old boundaries.\nsame_forecast : Use all boundaries from the same forecast, start from analysis\nanalysis_only : Use only analyses as boundaries. Note that BDINT cannot be shorter than the frequency of the analyses.\nlatest : Use the latest possible boundary with the shortest forecast length\nRCR_operational : Mimic the behaviour of the RCR runs, ie\n12h old boundaries at 00 and 12 and\n06h old boundaries at 06 and 18\njb_ensemble : Same as same_forecast but used for JB-statistics generation. With this you should export JB_ENS_MEMBER=some_number\neps_ec : ECMWF EPS members (on reduced Gaussian grid). It is only meaningful with ENSMSEL non-empty, i.e., ENSSIZE > 0","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"All the strategies are defined in scr/Boundary_strategy.pl. The script generates a file bdstrategy in your working directory that could look like:","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":" Boundary strategy\n\n DTG: 2011090618\n LL: 36\n BDINT: 3\n BDCYCLE: 6\n STRATEGY: simulate_operational\n BDDIR: /scratch/snh/hm_home/alaro_37h1_trunk/ECMWF/archive/@YYYY@/@MM@/@DD@/@HH@\nHOST_MODEL: ifs\nINT_BDFILE: /scratch/snh/hm_home/alaro_37h1_trunk/20110906_18/ELSCFHARMALBC@NNN@\n\n# The output bdstrategy file has the format of \n# NNN|YYYYMMDDHH INT_BDFILE BDFILE BDFILE_REQUEST_METHOD \n# where \n# NNN is the input hour\n# YYYYMMDDHH is the valid hour for this boundary\n# INT_BDFILE is the final boundary file\n# BDFILE is the input boundary file\n# BDFILE_REQUEST_METHOD is the method to the request BDFILE from e.g. MARS, ECFS or via scp\n\nSURFEX_INI| /scratch/snh/hm_home/alaro_37h1_trunk/20110906_18/SURFXINI.lfi \n000|2011090618 /scratch/snh/hm_home/alaro_37h1_trunk/20110906_18/ELSCFHARMALBC000 /scratch/snh/hm_home/alaro_37h1_trunk/ECMWF/archive/2011/09/06/12/fc20110906_12+006 MARS_umbrella -d 20110906 -h 12 -l 6 -t\n003|2011090621 /scratch/snh/hm_home/alaro_37h1_trunk/20110906_18/ELSCFHARMALBC001 /scratch/snh/hm_home/alaro_37h1_trunk/ECMWF/archive/2011/09/06/12/fc20110906_12+009 MARS_umbrella -d 20110906 -h 12 -l 9 -t\n...","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Meaning that the if the boundary file is not found under BDDIR the command MARS_umbrella -d YYYYMMDD -h HH -l LLL -t BDDIR will be executed. A local interpretation could be to search for external data if your file is not on BDDIR. Like the example from SMHI:","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":" Boundary strategy\n\n DTG: 2011090112\n LL: 24\n BDINT: 3\n BDCYCLE: 06\n STRATEGY: latest\n BDDIR: /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/g05a/archive/@YYYY@/@MM@/@DD@/@HH@\nHOST_MODEL: hir\nINT_BDFILE: /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/20110901_12/ELSCFHARMALBC@NNN@\n EXT_BDDIR: smhi_file:/data/arkiv/field/f_archive/hirlam/G05_60lev/@YYYY@@MM@/G05_@YYYY@@MM@@DD@@HH@00+@LLL@H00M\nEXT_ACCESS: scp\n\n# The output bdstrategy file has the format of \n# NNN|YYYYMMDDHH INT_BDFILE BDFILE BDFILE_REQUEST_METHOD \n# where \n# NNN is the input hour\n# YYYYMMDDHH is the valid hour for this boundary\n# INT_BDFILE is the final boundary file\n# BDFILE is the input boundary file\n# BDFILE_REQUEST_METHOD is the method to the request BDFILE from e.g. MARS, ECFS or via scp\n\n# hh_offset is 0 ; DTG is \nSURFEX_INI| /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/20110901_12/SURFXINI.lfi \n000|2011090112 /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/20110901_12/ELSCFHARMALBC000 /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/g05a/archive/2011/09/01/12/fc20110901_12+000 scp smhi:/data/arkiv/field/f_archive/hirlam/G05_60lev/201109/G05_201109011200+000H00M \n003|2011090115 /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/20110901_12/ELSCFHARMALBC001 /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/g05a/archive/2011/09/01/12/fc20110901_12+003 scp smhi:/data/arkiv/field/f_archive/hirlam/G05_60lev/201109/G05_201109011200+003H00M ","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"In this example an scp from smhi will be executed if the expected file is not in BDDIR. There are a few environment variables that one can play with in sms/confi_exp.h that deals with the initial and boundary files","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"HOST_MODEL : Tells the origin of your boundary data * ifs : ecmwf data * hir : hirlam data * ald : Output from aladin physics, this also covers arpege data after fullpos processing. * ala : Output from alaro physics * aro : Output from arome physics\nBDINT : Interval of boundaries in hours\nBDLIB : Name of the forcing experiment. Set\nECMWF to use MARS data\nRCRa to use RCRa data from ECFS\nOther HARMONIE/HIRLAM experiment\nBDDIR : The path to the boundary file. In the default location BDDIR=$HM_DATA/${BDLIB}/archive/@YYYY@/@MM@/@DD@/@HH@ the file retrieved from e.g. MARS will be stored in a separate directory. On could also consider to configure this so that all the retrieved files are located in your working directory $WRK. Locally this points to the directory where you have all your common boundary HIRLAM or ECMWF files.\nINT_BDFILE : is the full path of the interpolated boundary files. The default setting is to let the boundary file be removed by directing it to $WRK.\nINT_SINI_FILE : The full path of the initial surfex file. ","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"There are a few optional environment variables that could be used that are not visible in config_exp.h ","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"EXT_BDDIR : External location of boundary data. If not set rules are depending on HOST_MODEL\nEXT_ACCESS : Method for accessing external data. If not set rules are depending on HOST_MODEL\nBDCYCLE : Assimilation cycle interval of forcing data, default is 6h.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"More about this can be bounds in the Boundary_strategy.pl script.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"The bdstrategy file is parsed by the script ExtractBD. ","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"scr/ExtractBD Checks if data are on BDDIR otherwise copy from EXT_BDDIR. The operation performed can be different depending on HOST and HOST_MODEL. IFS data at ECMWF are extracted from MARS, RCR data are copied from ECFS.\nInput parameters: Forecast hour\nExecutables: none.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"In case data should be retrieved from MARS there is also a stage step. When calling MARS with the stage command we ask MARS to make sure data are on disk. In HARMONIE we ask for all data for one day of r forecasts ( normally four cycles ) at the time. ","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#Near-real-time-aerosols","page":"Preparation","title":"Near real time aerosols","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"The use of near real time aerosols require the presence of aerosol fields in the boundary files.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"BDAERO : Origin of the aerosol fields\nnone : no aerosols (default configuration)\ncams : aerosol from CAMS.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"A bdstrategycams file is generated. After the data is retrieved, the files are merge with the files from the HOSTMODEL to get the final boundary conditions files.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#Initial-and-Boundary-file-generation","page":"Preparation","title":"Initial and Boundary file generation","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"To be able to start the model we need the variables defining the model state.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"T,U,V,PS in spectral space\nQ in gridpoint or spectral space","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Optional:","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Q,,l,,, Q,,i,,, Q,,r,,, Q,,g,,, Q,,s,,, Q,,h,,\nTKE","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"For the surface we need the different state variables for the different tiles. The scheme selected determines the variables.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Boundary files (coupling files) for HARMONIE are prepared in two different ways depending on the nesting procedure defined by HOST_MODEL.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#Using-gl","page":"Preparation","title":"Using gl","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"If you use data from HIRLAM or ECMWF gl_grib_api will be called to generate boundaries. The generation can be summarized in the following steps:","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Setup geometry and what kind of fields to read depending on HOST_MODEL\nRead the necessary climate data from a climate file\nTranslate and interpolate the surface variables horizontally if the file is to be used as an initial file. All interpolation respects land sea mask properties. The soil water is not interpolated directly but interpolated using the Soil Wetness Index to preserve the properties of the soil between different models. The treatment of the surface fields is only done for the initial file.\nHorizontal interpolation of upper air fields as well as restaggering of winds.\nVertical interpolation using the same method (etaeta) as in HIRLAM\nConserve boundary layer structure\nConserve integrated quantities\nOutput to an FA file ( partly in spectral space )","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"gl_grib_api is called by the script scr/gl_bd where we make different choices depending on PHYSICS and HOST_MODEL","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"When starting a forecast there are options to whether e.g. cloud properties and TKE should be read from the initial/boundary file through NREQIN and NCOUPLING. At the moment these fields are read from the initial file but not coupled to. gl reads them if they are available in the input files and sets them to zero otherwise. For a Non-Hydrostatic run the non-hydrostatic pressure departure and the vertical divergence are demanded as an initial field. The pressure departure is by definition zero if you start from a non-hydrostatic mode and since the error done when disregarding the vertical divergence is small it is also set to zero in gl. There are also a choice in the forecast model to run with Q in gridpoint or in spectral space.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"It's possible to use an input file without e.g. the uppermost levels. By setting LDEMAND_ALL_LEVELS=.FALSE. the missing levels will be ignored. This is used at some institutes to reduce the amount of data transferred for the operational runs. ","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#Using-fullpos","page":"Preparation","title":"Using fullpos","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"If you use data generated by HARMONIE you will use fullpos to generate boundaries and initial conditions. Here we will describe how it's implemented in HARMONIE but there are also good documentation on the gmapdoc site.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"fullpos","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"In HARMONIE it is done by the script scr/E927. It contains the following steps:","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Fetcht climate files. Fullpos needs a climate file and the geometry definition for both the input and output domains. \nSet different moist variables in the namelists depending if your run AROME or ALADIN/ALARO.\nCheck if input data has Q in gridpoint or spectral space.\nDemand NH variables if we run NH.\nDetermine the number of levels in the input file and extract the correct levels from the definition in scr/Vertical_level.pl\nRun fullpos","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"E927 is also called from 4DVAR when the resolution is changed between the inner and outer loops.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#Generation-of-initial-data-for-SURFEX","page":"Preparation","title":"Generation of initial data for SURFEX","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"For SURFEX we have to fill the different tiles with correct information from the input data. This is called the PREP step in the SURFEX context. scr/Prep_ini_surfex creates an initial SURFEX file from an FA file if you run with SURFACE=surfex. ","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Read more about SURFEX","category":"page"},{"location":"ClimateGeneration/DownloadInputData/#download-input-data","page":"Input Data","title":"Download input data","text":"","category":"section"},{"location":"ClimateGeneration/DownloadInputData/","page":"Input Data","title":"Input Data","text":"Before you can start running HARMONIE experiments some input data (external from the code repository) needs to be available on your platform. The input data contains physiography data, topography information and climatological values determined from a one year ARPEGE assimilation experiment with a resolution of T79.","category":"page"},{"location":"ClimateGeneration/DownloadInputData/","page":"Input Data","title":"Input Data","text":" E923_DATA-harmonie-43h2.1.tar.gz: Climate and physiography data for atmospheric climate generation (E923)\n PGD-harmonie-43h2.1.tar.gz: Physiography data for SURFEX (PGD)\nGMTED2010-harmonie-43h2.1.tar.gz : Digital elevation model from UGS\n SOILGRID-harmonie-43h2.1.tar.gz: Soil type data from SOILGRIDS\n sat-harmonie-43h2.1.tar.gz: Constants for satellite information\nrttov7L54-harmonie-43h2.1.tar.gz : RTTOV constants\nECOCLIMAP second generation is available from here. It's also available on hpc-login:/ec/res4/hpcperm/hlam/data/climate/ECOCLIMAP2G\ntestbed-harmonie-43h2.1.tar.gz: Test data set with boundaries and observations for a small 50x50 domain]","category":"page"},{"location":"EPS/SPP/#spp","page":"SPP","title":"SPP in HarmonEPS","text":"","category":"section"},{"location":"EPS/SPP/#SPP-options-in-HARMONIE","page":"SPP","title":"SPP options in HARMONIE","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The Stochastically Perturbed Parameterizations scheme (SPP) introduces stochastic perturbations to values of chosen closure parameters representing efficiencies or rates of change in parameterized atmospheric (sub)processes. See here for more information. SPP is available since cy40h1.1.1.","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SPP is activated by setting SPP=yes in ecf/config_exp.h","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SPP uses the Stochastic Pattern Generator (SPG). The pattern characteristics are set by the following settings in config_exp.h:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":" SDEV_SPP = 1.0 # Standard deviation of the pattern\n TAU_SPP = 43200 # Time scale (seconds)\n XLCOR_SPP = 200000 # Length scale (m)\n SPGQ_SPP = 0.5 # Controls small vs. large scales \n SPGADTMIN_SPP=0.15 # initialization to ensure stationary statistics from the start of the integration\n SPGADTMAX_SPP=3.0 # initialization to ensure stationary statistics from the start of the integration\n NPATFR_SPP=-1 # Frequency to evolve pattern: >0 in timesteps, <0 in hours","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The parameters that can be perturbed are: ","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Perturbation Description Perturbs\nP1: LPERT_PSIGQSAT Perturb saturation limit sensitivity VSIGQSAT\nP3: LPERT_CLDDPTHDP Perturb threshold cloud thickness used in shallow/deep convection decision RFRMIN(20)\nP4: LPERT_ICE_CLD_WGT Perturb cloud ice content impact on cloud thickness RFRMIN(21)\nP5: LPERT_ICENU Perturb ice nuclei RFRMIN(9)\nP6: LPERT_KGN_ACON Perturb Kogan autoconversion speed RFRMIN(10)\nP7: LPERT_KGN_SBGR Perturb Kogan subgrid scale (cloud fraction) sensitivity RFRMIN(11)\nP8: LPERT_RADGR Perturb graupel impact on radiation RADGR\nP9: LPERT_RADSN Perturb snow impact on radiation RADSN\nP10:LPERT_RFAC_TWOC Perturb top entrainment RFAC_TWO_COEF\nP11:LPERT_RZC_H Perturb stable conditions length scale RZC_H\nP12:LPERT_RZL_INF Asymptotic free atmospheric length scale RZL_INF\nP13:LPERT_RSWINHF Short wave inhomogeneity factor RSWINHF\nP14:LPERT_RLWINHF Long wave inhomogeneity factor RLWINHF\nP15:LPERT_ALPHA Cloud droplet gamma distribution parameters alpha (over sea) ALPHA\nP16:LPERT_RZNUC Cloud droplet gamma distribution parameters nu (over land) RZNUC\nP17:LPERT_RZMFDRY Parameter for dry mass flux RZMFDRY\nP18:LPERT_RZMBCLOSURE Closure parameter for moist mass flux RZMBCLOSURE","category":"page"},{"location":"EPS/SPP/#Main-settings","page":"SPP","title":"Main settings","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The following describes the namelist flags for SPP. Namelist flags for SPP are found in the namelist NAMSPP in nam/harmonie_namelists.pm","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Activate perturbation of a parameter by setting LPERT_[PARAMETER] to TRUE in harmonie_namelists.pm, e.g.:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":" NAMSPP=>{\n 'LPERT_PSIGQSAT' => '.TRUE.,',\n ...\n },","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The size of the perturbation (the standard deviation of the parameter distribution) is set by CMPERT_[PARAMETER], e.g.:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":" NAMSPP=>{\n 'CMPERT_PSIGQSAT' => '0.3,',\n ...\n },","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The min/max range of each perturbed parameter can be controlled by the CLIP_[PARAMETER] namelist variable where the limits are specified as e.g.:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"NAMSPP=>{\n'CLIP_PSIGQSAT' => '0.0,0.1',\n...\n },","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Two distributions for the parameter values are possible: lognormal and pseudo uniform. Note: when a pseudo uniform distribution is used, the distribution may extend to negative values, which should be avoided. This can be assured by setting a clipping range (see above). Set LUNIFORM_[PARAMETER] to FALSE to use lognormal and to TRUE to use pseudo uniform, e.g.:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"NAMSPP =>{\n'LUNIFORM_PSIGQSAT' => '.FALSE.,',\n...\n },","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"When pseudo uniform is chosen, it is possible to shift the distribution by setting UNIFORM_OFFSET_[PARAMETER], where offset 0.5 is default, <0.5 moves the distribution to the right and >0.5 moves the distribution to the left e.g.:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"NAMSPP =>{\n'UNIFORM_OFFSET_PSIGQSAT' => '0.45,',\n...\n },","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"When lognormal distribution is chosen, the flag LLNN_MEAN1_[PARAMETER] decides if the mean or the median of the distribution corresponds to the unperturbed, deterministic value of the parameter. Set to FALSE to use the median and to TRUE to use the mean, e.g.:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"NAMSPP =>{\n'LLNN_MEAN1_PSIGQSAT' => '.TRUE.,',\n...\n },","category":"page"},{"location":"EPS/SPP/#Correlation-of-patterns","page":"SPP","title":"Correlation of patterns","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Any arbitrary number of parameters can be correlated by setting MP_X, where X is the name of the parameter, to the same number in in NAMSPP. Anticorrelation can be achieved by setting IC_X=-1. I.e. if we set","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"NAMSPP =>{\n...\n 'MP_RZC_H' => '77,',\n 'MP_RZL_INF' => '77,',\n 'MP_KGN_ACON' => '99',\n 'MP_KGN_SBGR' => '99,',\n 'IC_KGN_SBGR' => '-1,',\n...\n },","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"we will correlate RZC_H and RZL_INF and anticorrelate KGN_ACON with KGN_SBGR. The latter is achieved by setting IC_KGN_SBGR=-1. Note that the number for MP_X is used as an ID and should not be considered as a sequence number. It does not control the order of the patterns in the output or similar.","category":"page"},{"location":"EPS/SPP/#Define-the-time-and-length-scales-for-an-individual-pattern","page":"SPP","title":"Define the time and length scales for an individual pattern","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The pattern timescale TAU and lengthscale XLCOR are defined in NAMSPP and are then valid for all patterns. To specify the patterns individually we can set something like:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"NAMSPP =>{\n...\n 'YSPP_CONFIG_PAR(1)%TAG' => '\\'PSIGQSAT\\'',\n 'YSPP_CONFIG_PAR(1)%TAU' => '21600',\n 'YSPP_CONFIG_PAR(1)%XLCOR' => '150000',\n 'YSPP_CONFIG_PAR(2)%TAG' => '\\'KGN_ACON\\'',\n 'YSPP_CONFIG_PAR(2)%TAU' => '10800',\n 'YSPP_CONFIG_PAR(2)%XLCOR' => '350000',\n...\n },","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"where TAG is the name of the parameter to be perturbed. ","category":"page"},{"location":"EPS/SPP/#Recommended-SPP-settings-(cy43):","page":"SPP","title":"Recommended SPP settings (cy43):","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"CMPERT needs to be tuned for each parameter. CMPERT1 in the table below is the value that gives the range of values for the parameters recommended by the physics experts (when a lognormal distribution is used, if not stated otherwise). CMPERT is the value recommended for use. Tuning is ongoing, hence not all recommendations are in place yet. The well tested settings are in bold, preliminary suggestions are in italic.","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Perturbation Det. value Recom. range CMPERT (CMPERT1) Distribution Offset Mean Median Clipping Corr.\nPSIGQSAT 0.02 0-0.06 0.6 (0.3) Log-normal N.A. Mean No N.A.\nCLDDPTHDP 4000 0-10000 0.6 (0.3) Log-normal N.A. Mean No N.A.\nICE_CLD_WGT 1 0.2-2 1.2 (0.3) Uniform 0.5 N.A. 0.01 - 10 N.A.\nICENU 1 0.01-100 TBD (1.05) TBD TBD TBD TBD N.A.\nKGN_ACON 10 1-100 TBD (0.75) TBD TBD TBD TBD N.A.\nKGN_SBGR 1[1] 0.01-1 TBD (0.3) TBD TBD TBD 0., 1. N.A.\nRADGR 0.5 0-1 TBD (0.4) TBD TBD TBD 0., 2. N.A.\nRADSN 1 0-2 TBD (0.35) TBD TBD TBD 0., 2. N.A.\nRFAC_TWOC 2 0.5-3 TBD (0.3) TBD TBD TBD TBD N.A.\nRZC_H 0.11 0.1-0.2 1.05 (0.3) Uniform 0.475 N.A. 0.001, 100 Yes\nRZL_INF 40 20-200 0.45 (0.45) Log-normal N.A. Mean No Yes\nLPERT_RSWINHF 1? 0.95-1 Not tested \nLPERT_RLWINHF 1? 0.95-1 Not tested \nLPERT_ALPHA 3 0.2-5 1.4 (0.3) Uniform 0.5 N.A. TBD N.A.\nLPERT_RZNUC 3 0.2-10 0.6 (0.3) Log-normal N.A. Mean No N.A.\nLPERT_RZMFDRY 1 0.2-2 0.8 (0.3) Log-normal N.A. Mean No N.A.\nLPERT_RZMBCLOSURE 0.35 0.05-0.7 0.8 (0.3) Log-normal N.A. Mean No N.A.","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"[1]: Default/deterministic value of 1 and recommended range of 0.01-1 means the deterministic value is at the high end of the distribution. ","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"For more SPP details see src/arpifs/module/spp_mod.F90","category":"page"},{"location":"EPS/SPP/#Tendency-and-pattern-diagnostics","page":"SPP","title":"Tendency and pattern diagnostics","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Diagnostic output of tendencies and SPP patterns can be activated by setting TEND_DIAG=yes in ecf/config_exp.h. Activation gives six new 3D-fields","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"FANAME Description\nSNNNPTENDU U-component tendencies\nSNNNPTENDV V-component tendencies\nSNNNPTENDT Temperature tendencies\nSNNNPTENDR Moisture tendencies\nSNNNMULNOISE SPPT pattern, same for all levels\nSNNNSPP_PATTERN SPP pattern, distribution as explained below","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The raw and scaled patterns are stored in the vertical column of SNNNSPP_PATTERN using the index given for Diagnostic number in the SPP initialization. Thus, with the standard settings and the correlated pattern exampel above we get in the standard log file NODE.001_01 available in the Forecast task output :","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"...\nPSIGQSAT pattern/diagnostic numbers are: 1 1\nCLDDPTHDP pattern/diagnostic numbers are: 2 2\nICE_CLD_WGT pattern/diagnostic numbers are: 3 3\nICENU pattern/diagnostic numbers are: 4 4\nKGN_ACON pattern/diagnostic numbers are: 5 5\nKGN_SBGR pattern/diagnostic numbers are: 5 6\nRADGR pattern/diagnostic numbers are: 6 7\nRADSN pattern/diagnostic numbers are: 7 8\nRFAC pattern/diagnostic numbers are: 8 9\nRZC_H pattern/diagnostic numbers are: 9 10\nRZCL_INF pattern/diagnostic numbers are: 9 11\n...","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The raw pattern is stored as 2N-1 and the scaled one as 2N where N is the Diagnostic number given in the log file. This gives us the following table:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Perturbation raw pattern scaled pattern\nPSIGQSAT S001SPP_PATTERN S002SPP_PATTERN\nCLDDPTHDP S003SPP_PATTERN S004SPP_PATTERN\nICE_CLD_WGT S005SPP_PATTERN S006SPP_PATTERN\nICENU S007SPP_PATTERN S008SPP_PATTERN\nKGN_ACON S009SPP_PATTERN S010SPP_PATTERN\nKGN_SBGR S011SPP_PATTERN S012SPP_PATTERN\nRADGR S013SPP_PATTERN S014SPP_PATTERN\nRADSN S015SPP_PATTERN S016SPP_PATTERN\nRFAC S017SPP_PATTERN S018SPP_PATTERN\nRZC_H S019SPP_PATTERN S020SPP_PATTERN\nRZL_INF S021SPP_PATTERN S022SPP_PATTERN","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"where the numbering may depend on the number of actively perturbed parameters.","category":"page"},{"location":"EPS/SPP/#Cy40h111-settings-(NB-only-log-normal-distribution-possible)","page":"SPP","title":"Cy40h111 settings (NB only log-normal distribution possible)","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"In config_exp.h:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SDEV_SPP = 3.0 # Standard deviation of the pattern","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Perturbation Det. value Recommended range CMPERT (CMPERT1) Clipping range Mean or median\nLPERT_PSIGQSAT 0.02, but set to 0.03 0-0.06 0.4 (0.1) No Mean\nLPERT_CLDDPTHDP 4000 1000-8000 0.4 (0.1) No Mean\nLPERT_ICE_CLD_WGT 1 0-2 0.4 (0.1) No Mean\nLPERT_ICENU 1 0.1-10 0.7 (0.35) No Median\nLPERT_KGN_ACON 10 2-50 0.5 (0.25) No Mean\nLPERT_KGN_SBGR 1, but set to 0.5 0.01-1 0.2 (0.1) 0.0 - 1.0 Mean\nLPERT_RADGR 0, but set to 0.5 0-1 0.3 (0.15) 0.0 - 2.0 Mean\nLPERT_RADSN 0, but set to 0.5 0-1 0.3 (0.15) 0.0 - 2.0 Mean\nLPERT_RFAC_TWOC 2 0.5-3 0.4 (0.1) No Mean\nLPERT_RZC_H 0.15 0.1-0.25 0.4 (0.1) No Mean\nLPERT_RZL_INF 100 30-300 0.6 (0.15) No Mean","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"In cy 40 the output of patterns and tendencies was as follows:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The raw and scaled patterns are stored in the vertical column of SNNNEZDIAG01 using the index given in the SPP initialization. Thus","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"...\nKGET_SEED_SPP: PSIGQSAT 10000 1841082593\n pattern 1 for PSIGQSAT using seed 1841082593\nKGET_SEED_SPP: CLDDPTH 10002 570790063\n pattern 2 for CLDDPTH using seed 570790063\nKGET_SEED_SPP: CLDDPTHDP 10004 980493159\n pattern 3 for CLDDPTHDP using seed 980493159\nKGET_SEED_SPP: ICE_CLD_WGT 10008 1362729695\n pattern 4 for ICE_CLD_WGT using seed 1362729695\n...","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"would give us","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Perturbation raw pattern scaled pattern\nPSIGQSAT S001EZDIAG01 S002EZDIAG01\nCLDDPTH S003EZDIAG01 S004EZDIAG01\nCLDDPTHDP S005EZDIAG01 S006EZDIAG01\nICE_CLD_WGT S007EZDIAG01 S008EZDIAG01","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"and so on","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SPPT pattern EZDIAG02 (same in all levels)","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SPP tendencies PtendU EZDIAG03","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SPP tendencies PtendV EZDIAG04","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SPP tendencies PtendT EZDIAG05","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SPP tendencies PtendQ EZDIAG06","category":"page"},{"location":"EPS/SPP/#Suggestions-for-parameters-to-include-in-SPP:","page":"SPP","title":"Suggestions for parameters to include in SPP:","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Parameter Description Deterministic value cy43 Suggested range of values suggestion for parameter to correlate with Person responsible for implementing\n Terminal fall velocities of rain, snow and graupel Sibbo\nRFRMIN(39) Depo_rate_graupel RFRMIN 39 and 40 should approximately respect log10C = -3.55 x + 3.89, see eq. 6.2 on p. 108 in the meso-NH documentation: [https://hirlam.org/trac/attachment/wiki/HarmonieSystemDocumentation/EPS/SPP/sciICE3doc_p3.pdf Doc] Pirkka\nRFRMIN(40) Depo_rate_snow) RFRMIN 39 and 40 should approximately respect log10C = -3.55 x + 3.89, see eq. 6.2 on p. 108 in the meso-NH documentation: [https://hirlam.org/trac/attachment/wiki/HarmonieSystemDocumentation/EPS/SPP/sciICE3doc_p3.pdf Doc] Pirkka\nRFRMIN(16) Distr_snow_c to be correlated with RFRMIN(17) \nRFRMIN(17) Distr_snow_x to be correlated with RFRMIN(16) ","category":"page"},{"location":"EPS/SPP/#Experiments","page":"SPP","title":"Experiments","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"List with cy43h22 experiments is here: [wiki:HarmonieSystemDocumentation/EPS/ExplistSPPcy43 List of experiments]","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"A guide for running the tuning experiments is here: [wiki:HarmonieSystemDocumentation/EPS/HowtoSPPcy43 Guide]","category":"page"},{"location":"EPS/Howto/#eps-howto","page":"Howto","title":"How to run an ensemble experiment","text":"","category":"section"},{"location":"EPS/Howto/#Simple-configuration","page":"Howto","title":"Simple configuration","text":"","category":"section"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"Running an ensemble experiment is not very different from running a deterministic one. The basic instructions about setup are the same and will not be repeated here. ","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"What is different is that in ecf/config_exp.h one needs to pay attention to this particular section:","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"# *** Ensemble mode general settings. ***\n# *** For member specific settings use msms/harmonie.pm ***\nENSMSEL= # Ensemble member selection, comma separated list, and/or range(s):\n # m1,m2,m3-m4,m5-m6:step mb-me == mb-me:1 == mb,mb+1,mb+2,...,me\n # 0=control. ENSMFIRST, ENSMLAST, ENSSIZE derived automatically from ENSMSEL.\nENSINIPERT= # Ensemble perturbation method (bnd). Not yet implemented: etkf, hmsv, slaf.\nENSCTL= # Which member is my control member? Needed for ENSINIPERT=bnd. See harmonie.pm.\nENSBDMBR= # Which host member is used for my boundaries? Use harmonie.pm to set.\nENSMFAIL= # Failure tolerance for all members. Not yet implemented.\nENSMDAFAIL= # Failure tolerance for members doing own DA. Not yet implemented.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"In addition one should also look at BDSTRATEGY, choose eps_ec if you want to use EC EPS at the boundaries (this option gets the EC EPS data from the GLAMEPS ECFS archive). If you want to use SLAF see here.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"What really triggers EPS mode is having a non-empty ENSMSEL (ensemble member selection). The reason the specification looks a bit complicated is that our ensemble members do not necessarily have to be numbered consecutively from 0 or 1 and up, but can also be specified with steps. The rationale behind this is that we may want to e.g. downscale a subset of the 51 ECMWF EPS members, but not necessarily starting from their lowest number or taking them consecutively. ENSMSEL is a heritage from the Hirlam EPS system and has been retained in Harmonie.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"In the simplest case of consecutive numbering, say we want a control run (member 0) and 20 perturbed members. We can then put","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"ENSMSEL=0-20","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"Now assume that we still have a control and 20 members, but that we want to take only every second pair of the host EPS members, i.e., take 0,1,2, skip 3,4, take 5,6, skip 7,8 and so on. The following specifications are then equivalent:","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"ENSMSEL=0,1,2,5,6,9,10,13,14,17,18,21,22,25,26,29,30,33,34,37,38\nENSMSEL=0,1-37:4,2-38:4","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"In the second version we use the step option, so our list is 0, 1 to 37 in steps of 4 and 2 to 38 in steps of 4. The system will take care of transforming this into an ascending list for easier handling within the script system, but we don't have to worry about that.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"The ENSMSEL selection is still not totally flexible. It would not be possible to have more than one of our members having boundaries from the same member of the host model. This might be relevant in the case of multiple physics, and multiple control members. For this reason the variable ENSBDMBR has also been added (in [10953]). The usage of this variable is explained in the next section (advanced configuration).","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"For the rest of the ENS... variables, not everything planned is implemented by the time of this writing. The only valid choice (except empty) for ENSINIPERT (initial state perturbation method) is \"bnd\". This option means to take the perturbations of the first (interpolated) boundary file, and add these perturbations to a reference (control) analysis. This will involve the script scr/PertAna, a section of its header is quoted below:","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"#| Different perturbation methods are distinguished by\n#| ENSINIPERT. This script implements ENSINIPERT=bnd\n#|\n#| bnd: boundary data mode\n#| an($ENSMBR) = an(cntrl) + bnd1($ENSMBR) - bnd1(cntrl)\n#| where bnd1 denotes the first boundary file","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"Which member is the control member is specified by the variable ENSCTL.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"But how to specify that one (or more) member(s) run assimilation and others do not, or in other words, how to specify member specific values to the variables in config_exp.h? This is the topic of the next section.","category":"page"},{"location":"EPS/Howto/#Advanced-configuration,-member-specific-settings","page":"Howto","title":"Advanced configuration, member specific settings","text":"","category":"section"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"It would perhaps have been possible to also have member specific configuration in config_exp.h, but since perl is more flexible with lists than the shell, and since perl is already used extensively in the Harmonie system, it was decided to extend the handling of the template definition files in mini-SMS in such a way that every tdf can now also have an associated perl module to help in its interpretation. And, since after the changesets [10930] and [10932] there is no separate tdf for HarmonEPS anymore (harmonie.tdf is used also for EPS runs), the file that is used for member specific settings is thus msms/harmonie.pm.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"The idea of harmonie.pm is to be able to override some of the environment variables of config_exp.h with new values for selected members of the ensemble. This is achieved by populating the perl hash %env with key => value pairs. The keys are names of environment variables, like ANAATMO, ANASURF, PHYSICS etc. Only names that are present and exported in config_exp.h should be used as keys. Values can take four different forms:","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"A hash, i.e., a new set of key => value pairs. The syntax in this case is { m1 => val1, m2 => val2, ... }. The numbers m1, m2, etc. must be member numbers given in ENSMSEL. Order is irrelevant, and only members with values different from the default need be listed of course.\nAn array, where indices implicitly run from 0 and up. The syntax in this case is [ val0, val1, val2, ...]. Here the array should have as many values as members given in ENSMSEL, but if not, missing values will be recycled from the start of the array (as many times as necessary). Thus, using arrays will give values to all members, and order is important.\nA scalar (string). This string is subject to variable substitution, i.e., any occurrence of the substring @EEE@ will be replaced by the relevant 3-digit ensemble member number.\nA subroutine (reference), syntax is typically sub { my $mbr = shift; return \"something dependent on $mbr\"; }. The arguments given to the subroutine are the \"args\" of the invoking &Env('SOMEVAR',args) call (see below).","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"In addition to the hash %env, harmonie.pm also contains a subroutine Env. In msms/harmonie.tdf many earlier occurences of $ENV{SOMEVAR} have now been replaced by subroutine calls &Env('SOMEVAR','@EEE@'). The @EEE@ argument will be replaced by the relevant member number before invocation, and Env will check the hash %env for a member specific setting to possibly return instead of the default value $ENV{SOMEVAR}. There should normally be no need to make changes to the subroutine Env, putting entries into the hash %env ought to be enough.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"Note also that not every occurrence of $ENV{...} has been replaced by a corresponding &Env(...) in harmonie.tdf, only those variables that are most likely to have variations among members are changed. If you need variations in e.g. $HOST_MODEL, then harmonie.tdf needs to be updated so that those variations are respected within the ensemble (EEE) loops.","category":"page"},{"location":"EPS/Howto/#An-example","page":"Howto","title":"An example","text":"","category":"section"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"We will now look at one particular example, in order to (hopefully) make the descriptions above a bit more clear. Our intent is to have an ensemble with a mix of members with AROME and ALARO physics, with one control member and 10 perturbed members for each. The control members will both do their own 3DVAR assimilation, while perturbed members will have ANAATMO=blending. But with ENSINIPERT=bnd, the control analysis will be used also by the perturbed members. All members will do surface assimilation, but the forecast interval differs. The control members have a forecast interval of 6 hours (because of the 3D-Var), while the perturbed members have FCINT=12. To achieve this, we have the following settings in config_exp.h:","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"ANAATMO=blending\nANASURF=CANARI_OI_MAIN\nFCINT=12\nBDSTRATEGY=eps_ec\nENSMSEL=0-21\nENSINIPERT=bnd\nENSCTL=\nENSBDMBR=","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"In harmonie.pm our %env looks as follows:","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"%env = (\n 'ANAATMO' => { 0 => '3DVAR', 1 => '3DVAR' },\n 'FCINT' => { 0 => 6, 1 => 6 },\n 'PHYSICS' => [ 'arome','alaro','alaro','arome'],\n 'ENSCTL' => [ '000', '001', '001', '000'],\n 'ENSBDMBR' => [ 0, 0, 1..20],\n\n### Normally NO NEED to change the variables below\n 'ARCHIVE' => '${ARCHIVE}mbr@EEE@/',\n 'CLIMDIR' => '$CLIMDIR/mbr@EEE@',\n 'OBDIR' => '$OBDIR/mbr@EEE@',\n 'VFLDEXP' => '${EXP}mbr@EEE@',\n 'BDDIR' => sub { my $mbr = shift;\n if ($ENV{COMPCENTRE} eq 'ECMWF') {\n return '$BDDIR/mbr'.sprintf('%03d',$mbr);\n } else {\n return '$BDDIR/mbr'.sprintf('%03d',&Env('ENSBDMBR',$mbr));\n }\n },\n 'FirstHour' => sub { my $mbr = shift;\n return $ENV{StartHour} % &Env('FCINT',$mbr);\n }\n );","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"ANAATMO is straightforward, only the control members need an exception from blending, so using a hash is most appropriate. Similarly for FCINT. For PHYSICS we have used an array and the fact that the array will be recycled. Thus member 0 will be the AROME control, while member 1 will be the ALARO control. The reason why we did not simply put a 2-element array [ 'arome','alaro'] to be repeated is that since the ECMWF perturbations come in +/- pairs, we don't want all the '+' perturbations to be always with the same physics (and the '-' perturbations with the other type). Therefore, we added a second pair with the order reversed, to alternate +/- perturbations between AROME and ALARO members. ENSCTL follows the same pattern as PHYSICS. Note the need for 3-digit numbers in ENSCTL, at present this is necessary to avoid parsing errors in the preparation step of mini-SMS.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"Note also how we have used ENSBDMBR. For both the AROME control (member 0) and ALARO control (member 1), we have used the EC EPS control member 0 to provide boundaries. The syntax 1..20 is a perl shorthand for the list 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"Note added after changeset [12537]: The setting of ENSBDMBR created a race condition in the boundary extraction for runs at ECMWF. This is hopefully solved by the new definition for BDDIR, which makes use of the possibility of having a subroutine to compute the member specific settings. Another example where a subroutine came out handy was for the setting of FirstHour.","category":"page"},{"location":"EPS/Howto/#Further-reading","page":"Howto","title":"Further reading","text":"","category":"section"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"More specific instructions and information about known problems can be found here.","category":"page"},{"location":"Build/Build_with_cmake/#Build-with-CMake","page":"CMake","title":"Build with CMake","text":"","category":"section"},{"location":"Build/Build_with_cmake/#Background","page":"CMake","title":"Background","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"CMake is a build system generator supporting multiple build systems and programming languages, specifically Fortran is a first-class citizen there, allowing, for example, out-of-the-box handling of the inter-module dependencies. A build system generator there means that description of the build procedure written in the CMake-script language is used by the cmake tool to generate the actual build system, for example using Unix Makefiles or Ninja generator. Thus, all modifications should be performed on the CMake-script level and not within the generated build system as these changes will be overwritten when re-running cmake at some point.","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"Why providing yet another alternative for building HARMONIE-AROME? Well, makeup does a very good job building the system, however it's an in-house solution which has a number of limitations:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"makeup is an in-house build system, so there are components that require more maintenance compared to a standardized build tool\nmakeup uses a considerable number of sequential steps, which increase the total build time\nthe configure step takes quite some time, although in some cases it can be skipped, but users have to remember when they must re-run configure and this dependency is not enforced by makeup\nnot all the dependencies are tracked by makeup, for example updating configure files does not trigger a re-build","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"In an attempt to fix these limitation of makeup, CMake was chosen as an alternative. CMake has a mature Fortran support and improves upon some shortcomings of makeup with little effort (well, it obviously has its own fair share of quirks, but that's a different story...). Additionally, using CMake allows us to enforce usage requirements and dependencies between different components of HARMONIE-AROME, for example, it's a good idea to ensure that SURFEX routines do not directly call cloud microphysics functions. Currently makeup does not enforce these boundaries and this task is left to the developers who implement the new code. Of course, something like this can also be implemented with makeup, but it would require considerable development efforts.","category":"page"},{"location":"Build/Build_with_cmake/#Getting-started-with-CMake","page":"CMake","title":"Getting started with CMake","text":"","category":"section"},{"location":"Build/Build_with_cmake/#Selecting-the-CMake-based-build-system-when-installing-HARMONIE-AROME","page":"CMake","title":"Selecting the CMake-based build system when installing HARMONIE-AROME","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"If all the config files are available, building HARMONIE-AROME with CMake should be as simple as setting the BUILD_WITH variable when invoking Harmonie:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"config-sh/Harmonie install BUILD_WITH=cmake","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"or alternatively, setting the desired option in ecf/config_exp.h.","category":"page"},{"location":"Build/Build_with_cmake/#Building-HARMONIE-AROME-with-CMake-from-the-command-line","page":"CMake","title":"Building HARMONIE-AROME with CMake from the command line","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"Sometimes calling Harmonie install is not the best choice and one might want to compile the code from the command line. In this case compilation of HARMONIE-AROME with CMake consists of three individual steps:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"compiling the auxiliary libraries (gribex and such)\ncompiling the main code of HARMONIE-AROME\noptionally, compile some additional tools (for example, gl)","category":"page"},{"location":"Build/Build_with_cmake/#1.-Compiling-the-auxiliary-libraries","page":"CMake","title":"1. Compiling the auxiliary libraries","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"This step is rather straightforward, assuming that HARMONIE-AROME code is located under the path stored in the HM_LIB environment variable one can adapt the following snippet to compile all the required libraries:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"CMAKE_FLAGS=\"-DCONFIG_FILE=\"\nINSTALL_DIR=\"\"\n\nAUX_LIBS='bufr_405 gribex_370 rgb_001 dummies_006/mpidummy'\nfor project in $AUX_LIBS; do\n echo \"Compiling $project\"\n current_project_dir=$HM_LIB/util/auxlibs/$project\n current_build_dir=\"build-`echo $project | sed 's|/|-|g'`\"\n\n mkdir -p $current_build_dir && cd $current_build_dir\n\n # CMake build type can be changed to Debug, if needed\n cmake $current_project_dir -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR $CMAKE_FLAGS\n # Here -j tells CMake how many parallel compilation processes to use\n cmake --build . --target install -j16\n\n cd ..\ndone","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"If a specific config file is not there, you can try your luck with using generic config files provided for different compiler types. To do so, just drop the -DCONFIG_FILE from the list of CMake command line arguments and CMake will try to load a suitable configuration file, if available.","category":"page"},{"location":"Build/Build_with_cmake/#2.-Compiling-the-main-code-of-HARMONIE-AROME","page":"CMake","title":"2. Compiling the main code of HARMONIE-AROME","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"Following the procedure described in the previous step, one can use a similar approach to compile the main code (here, one of the generic configuration files is used, of course it can be replaced with a different one or dropped but it should be the same config file which was used to compile auxiliary libraries):","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"mkdir build && cd build\n# Configure and generate the build system\ncmake $HM_LIB/src \\\n -G Ninja # Use Ninja to build HARMONIE-AROME, drop to build with Makefiles\n -DCMAKE_BUILD_TYPE=Release \\\n -DCONFIG_FILE=$HM_LIB/util/util/cmake/config.GNU.cmake \\\n -Dbufr_DIR=$INSTALL_DIR/lib/cmake/bufr \\\n -Dgribex_DIR=$INSTALL_DIR/lib/cmake/gribex \\\n -Drgb_DIR=$INSTALL_DIR/lib/cmake/rgb \\\n -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR\n\n# Build and install HARMONIE-AROME\ncmake --build . --target install -j16","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"note: Note\nObviously, when compiling from command line, additional command line arguments might be provided to CMake at the configure step as needed. However, a preferred solution is to use a configuration file to handle as much of the machine-specific details as possible.","category":"page"},{"location":"Build/Build_with_cmake/#3.-Compiling-the-tools","page":"CMake","title":"3. Compiling the tools","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"The approach is the same as with the main code, however, you might want to add -Dharmonie_DIR=$INSTALL_DIR/lib/cmake/harmonie if the tool in question needs HARMONIE-AROME libraries for compilation.","category":"page"},{"location":"Build/Build_with_cmake/#Configuration-files","page":"CMake","title":"Configuration files","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"Configuration files, similarly to makeup, are used to provide compilation flags, define external libraries to use when compiling the code et cetera. Thus, having a correct configuration file is one of the key elements of successful building HARMONIE-AROME. The CMake-based build system of HARMONIE-AROME uses configuration files written in JSON format. JSON was chosen to make these files more declarative and, hopefully, easier to maintain and modify than plain CMake-script-based files would be.","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"The main config file, which is used to build auxiliary libraries and the main HARMONIE-AROME code should be placed under util/cmake/config directory. This file has a following top-level structure:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"{\n \"build_tools\":[],\n \"dependencies\":[],\n \"programs\":[],\n \"configure\":{},\n \"compile\":[],\n \"compile_single\":[],\n \"compile_double\":[],\n \"custom_compile\":{},\n \"link\":[]\n}","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"there all the sections except configure, custom_compile and link are mandatory. In the following a detailed description of all the config file section is provided.","category":"page"},{"location":"Build/Build_with_cmake/#The-build_tools-section","page":"CMake","title":"The build_tools section","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"This section lists the external tools required for compiling HARMONIE-AROME, excluding compilers. Currently, this section should always contain the two following entries: FLEX and BISON, but in future this list might be extended. So, currently this section is always defined as:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"\"build_tools\":[\"BISON\", \"FLEX\"]","category":"page"},{"location":"Build/Build_with_cmake/#The-dependencies-section","page":"CMake","title":"The dependencies section","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"This section provides a list of external (external here means \"not found within the src directory of HARMONIE-AROME\", so, for example, gribex is also an external library for CMake build) libraries required to compile and link HARMONIE-AROME code. Since finding a correct library can be a tricky task, this section allows a number of options for specifying external dependencies:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"You can completely rely on CMake and delegate it all the work for finding a dependency. In this case, a dependency is added as a simple string to the dependencies section, for example:\n\"dependencies\":[\"OpenMP\", \"LAPACK\"]\nThis option is for packages like OpenMP which do not involve finding libraries located in unusual places as often happens when using environment modules.\nYou can still rely on CMake to find the package, but provide a bit of detail on how to find it. In this case a dependency is added as a JSON object of the following form (using the NetCDF library as an example):\n{\n \"pkg\":\"NetCDF\",\n \"use_cmake_config\":false,\n \"components\":[\"C\",\"Fortran\"],\n \"hints\":[\"$ENV{NETCDF_DIR}\",\"$ENV{NETCDF_F_DIR}\"],\n \"cmake\":{\"NETCDF_USE_DEFAULT_PATHS\":true}\n}\nThere the use_cmake_config field tells CMake which mechanism it should use to find the library in question. When use_cmake_config is set to true CMake will look for CMake configuration files installed with the library, which is a recommended option in modern CMake. Even though it's a recommended by the CMake authors option, not all the libraries provide CMake configuration files so just setting use_cmake_config to true does not work all the time (at least it works for the auxiliary libraries compiled with CMake). You might want to provide a -D_DIR= as an argument to the cmake command when configuring the build if CMake fails to find a package.\nAnother alternative is setting use_cmake_config to false, then CMake will try to find the required dependency using the hand-written scripts provided by the authors of CMake (or found under the util/cmake directory of HARMONIE-AROME). These scripts usually do quite some work trying to find a dependency and sometimes fail even if library is there, for example when it's located in a very unusual place or has an unexpected pkg-config name.\nWhen using \"use_cmake_config\":false one may add a components list, if only a language-specific version of the dependency is wanted. For example, having:\n{\"pkg\":\"NetCDF\", \"use_cmake_config\":false, \"components\":[\"C\"]}\nCMake would not try to find the Fortran version of NetCDF library, which can be useful sometimes. Use this option of defining external dependencies for such libraries as MPI, which can have multiple vendors and subtle differences between libraries provided (for example CMake should be able to figure out the correct MPI libraries for both MPICH and Open-MPI).\nThe hints list tells CMake which directories it should check when looking for a library.\nnote: Note\nElements of the hints list are simply added to the _ROOT CMake variable. If CMake's find_package() does not use this variable providing hints would have no effect.\nFinally, the cmake section provides a key-value set of elements, which will be converted to corresponding CMake variables set before calling find_package(). Thus, it can be used to control the behaviour of find_package.\nnote: Note\nVariables set in the cmake section are local to the current package and do not modify the global scope.\nWhen nothing of the above works, you can provide all the flags manually. To do so, use the following form for a dependency entry:\n{\n \"pkg\":\"HDF5\",\n \"raw_lib\":{\n \"include\":\"$ENV{HDF5_DIR}/include\",\n \"lib_directory\":\"$ENV{HDF5_DIR}/lib\",\n \"lib\":[\"-lhdf5hl_fortran\", \"-lhdf5_fortran\", \"-lhdf5_hl\", \"-lhdf5\"]\n }\n}\nwhere the raw_lib component provides all the needed include and link directories as well as the link libraries. If some some fields of the raw_lib object are unneeded they can be set to null:\n{\"pkg\":\"rt\", \"raw_lib\":{\"include\":null, \"lib_directory\":null, \"lib\":\"-lrt\"}}\nNote that all the members of the raw_lib object can be defined as lists:\n{\n \"pkg\":\"HDF5\",\n \"raw_lib\":{\n \"include\":[\"$ENV{HDF5_DIR}/include\",\"$ENV{HDF5_DIR}/include_fortran\"],\n \"lib_directory\":[\"$ENV{HDF5_DIR}/lib\",\"$ENV{HDF5_DIR}/lib64\"],\n \"lib\":[\"-lhdf5hl_fortran\", \"-lhdf5_fortran\", \"-lhdf5_hl\", \"-lhdf5\"]\n }\n}\nWhen providing the required libraries in the lib section one can skip the -l prefix, thus having \"lib\":\"-lrt\" and \"lib\":\"rt\" would have the same effect.\nSometimes it can be useful to define a dummy library in CMake without actually looking for the library files, for example when compiling a tool which uses only a subset of HARMONIE-AROME libraries. When loading HARMONIE-AROME as a CMake package all the targets associated with external dependencies should be present, but some of these dependencies might be not needed for successful linking (or these are added implicitly by the programming environment and adding them for the second time in CMake won't make any difference). In this case you can use the following:\n{\"pkg\":\"gribex\", \"dummy\":true}","category":"page"},{"location":"Build/Build_with_cmake/#The-programs-section","page":"CMake","title":"The programs section","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"This section provides a list of HARMONIE-AROME programs to build (excluding MASTERODB which is always built by the CMake build system), for example:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"\"programs\":[\"BATOR\", \"oulan\", \"ioassign\", \"LSMIX\"]","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"CMake will try to find the corresponding Fortran source files and will complain if unable to do so. Currently it is not possible to explicitly tell CMake via JSON config which program should be compiled from which source file. If CMake is unable to figure out how to compile a program the CMake-code should be altered to tell it how to do so.","category":"page"},{"location":"Build/Build_with_cmake/#The-configure-section","page":"CMake","title":"The configure section","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"This section provides various configure-time flags controlling the build system or selecting features. Currently in the main HARMONIE-AROME config file this section is defined as follows:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"\"configure\":{\n \"use_flexfix\":true\n , \"precision\":\"double\"\n},","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"There the use_flexfix option controls the usage of the flexfix wrapper, set it to true to use the flexfix wrapper when generating lexers for the Blacklist and ODB compilers. Having use_flexfix as false results in using the flex tool directly. The precision option controls the floating point precision of the build, with possible values of double and single. This is a mandatory option, removing it would result in a CMake fatal error at the configure time.","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"The configure file for gl has the following options in the configure section:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"\"configure\":{\n \"use_aladin\":true\n , \"use_netcdf\":true\n , \"check_preferlocalconcepts_bug\":true\n}","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"Set use_aladin to true to compile with FA support (requires HARMONIE-AROME libraries). Set use_netcdf to true to enable NetCDF support in gl. Set check_preferlocalconcepts_bug to true to perform a configure-time auto-detection test checking whether the supplied eccodes version is affected by the preferLocalConcepts bug. This test can be skipped, although in such a case corresponding CPP definitions should be manually added to the config file if a 'bad' eccodes version is used.","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"note: Note\nIf an option is removed from the configure section it will be treated by CMake as set to false in case of boolean flags or empty string for string options.","category":"page"},{"location":"Build/Build_with_cmake/#Adding-a-new-configure-option","page":"CMake","title":"Adding a new configure option","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"There's no predefined list of configure section members of CMake JSON config, any element found in this section will be available as CONFIG_