From 59949662b05ce28eb8fa81ae77bffb4c3a2c22aa Mon Sep 17 00:00:00 2001 From: "Documenter.jl" Date: Wed, 6 Nov 2024 14:12:29 +0000 Subject: [PATCH] build based on d6c76564 --- previews/PR1129/.documenter-siteinfo.json | 2 +- .../BoundaryFilePreparation/index.html | 4 +- .../PR1129/Build/Build_with_cmake/index.html | 4 +- .../PR1129/Build/Build_with_makeup/index.html | 4 +- .../ClimateGeneration/index.html | 2 +- .../DownloadInputData/index.html | 2 +- .../ClimateSimulation/index.html | 2 +- .../PR1129/DataAssimilation/CHKEVO/index.html | 4 +- .../PR1129/DataAssimilation/DFS/index.html | 2 +- .../DataAssimilation/DaAlgorithms/index.html | 4 +- .../DigitalFilterInitialization/index.html | 2 +- .../DataAssimilation/LSMIXandJk/index.html | 2 +- .../PR1129/DataAssimilation/MTEN/index.html | 4 +- .../DataAssimilation/NWECHKEVO/index.html | 4 +- .../ObservationOperators/index.html | 4 +- .../DataAssimilation/Screening/index.html | 2 +- .../DataAssimilation/SingleObs/index.html | 4 +- .../StructureFunctions/index.html | 4 +- .../Surface/CANARI/index.html | 4 +- .../Surface/CANARI_EKF_SURFEX/index.html | 2 +- .../Surface/CANARI_OI_MAIN/index.html | 2 +- .../Surface/SurfaceAnalysis/index.html | 4 +- previews/PR1129/EPS/BDSTRATEGY/index.html | 2 +- previews/PR1129/EPS/Howto/index.html | 4 +- .../EPS/SLAF/Get_pertdia.pl.pm/index.html | 2 +- previews/PR1129/EPS/SLAF/index.html | 4 +- previews/PR1129/EPS/SPP/index.html | 4 +- .../PR1129/EPS/SPPImplementation/index.html | 2 +- previews/PR1129/EPS/SPPT/index.html | 2 +- previews/PR1129/EPS/Setup/index.html | 2 +- previews/PR1129/EPS/System/index.html | 4 +- .../ConfigureYourExperiment/index.html | 4 +- .../How_to_use_hires_topography/index.html | 4 +- .../ModelDomain/index.html | 4 +- .../Namelists/index.html | 4 +- .../PlatformConfiguration/index.html | 4 +- .../UpdateNamelists/index.html | 2 +- .../UseofObservation/index.html | 4 +- .../VerticalGrid/index.html | 2 +- .../namelist_sfx_forecast/index.html | 2 +- previews/PR1129/ForecastModel/DDH/index.html | 4 +- .../PR1129/ForecastModel/Forecast/index.html | 2 +- .../ForecastModel/ForecastSettings/index.html | 2 +- previews/PR1129/ForecastModel/HR/index.html | 4 +- .../NearRealTimeAerosols/index.html | 2 +- .../PR1129/ForecastModel/OCDN2/index.html | 4 +- .../ForecastModel/Outputlist/index.html | 4 +- .../SingleColumnModel/Forcing/index.html | 4 +- .../SingleColumnModel/MUSC/index.html | 4 +- .../SingleColumnModel/MUSC_EMS/index.html | 4 +- .../SingleColumnModel/MUSC_vars/index.html | 2 +- .../PR1129/ForecastModel/WindFarms/index.html | 4 +- .../PR1129/Observations/Aeolus/index.html | 4 +- previews/PR1129/Observations/Amv/index.html | 4 +- previews/PR1129/Observations/Ascat/index.html | 2 +- previews/PR1129/Observations/Atovs/index.html | 4 +- previews/PR1129/Observations/Bator/index.html | 4 +- previews/PR1129/Observations/Cope/index.html | 4 +- previews/PR1129/Observations/GNSS/index.html | 4 +- previews/PR1129/Observations/Iasi/index.html | 4 +- previews/PR1129/Observations/Modes/index.html | 4 +- .../Observations/ObservationData/index.html | 4 +- .../ObservationPreprocessing/index.html | 2 +- previews/PR1129/Observations/Oulan/index.html | 4 +- .../PR1129/Observations/RadarData/index.html | 2 +- previews/PR1129/Observations/SYNOP/index.html | 4 +- previews/PR1129/Observations/Scatt/index.html | 4 +- .../PR1129/Observations/Seviri/index.html | 4 +- previews/PR1129/Overview/Binaries/index.html | 2 +- previews/PR1129/Overview/Content/index.html | 2 +- .../PR1129/Overview/FileFormats/index.html | 2 +- previews/PR1129/Overview/Source/index.html | 2 +- previews/PR1129/Overview/da_graph/index.html | 2 +- .../PostProcessing/Diagnostics/index.html | 4 +- .../PostProcessing/FileConversions/index.html | 4 +- .../PR1129/PostProcessing/Fullpos/index.html | 4 +- .../PostProcessing/Interpolation/index.html | 2 +- previews/PR1129/PostProcessing/gl/index.html | 4 +- .../PR1129/PostProcessing/xtool/index.html | 4 +- .../PR1129/SuiteManagement/ECFLOW/index.html | 4 +- .../PR1129/System/Build_local_docs/index.html | 4 +- previews/PR1129/System/DrHook/index.html | 4 +- .../System/ECMWF/ECMWF_teleport/index.html | 4 +- .../ECMWF/RunningHarmonieOnAtos/index.html | 8 +- .../GitDeveloperDocumentation/index.html | 4 +- .../PR1129/System/HarmonieTestbed/index.html | 4 +- .../System/Local/QuickStartLocal/index.html | 4 +- previews/PR1129/System/MFaccess/index.html | 4 +- .../PR1129/System/ReleaseProcess/index.html | 2 +- .../PR1129/System/StandaloneOdb/index.html | 4 +- .../System/TheHarmonieScript/index.html | 4 +- .../PR1129/System/UpdateNamelists/index.html | 4 +- .../UsingSubmodulesinHarmonie/index.html | 151 ++++++++++++++++++ .../AllobsVerification/index.html | 4 +- .../CommonVerification/index.html | 2 +- .../Extract4verification/index.html | 4 +- previews/PR1129/Verification/HARP/index.html | 2 +- .../PR1129/Verification/Obsmon/index.html | 4 +- .../Verification/Verification/index.html | 2 +- .../PR1129/Visualization/EPyGrAM/index.html | 4 +- previews/PR1129/assets/README/index.html | 2 +- previews/PR1129/index.html | 2 +- previews/PR1129/objects.inv | Bin 16636 -> 17071 bytes previews/PR1129/references/index.html | 2 +- previews/PR1129/search_index.js | 2 +- 105 files changed, 320 insertions(+), 169 deletions(-) create mode 100644 previews/PR1129/System/UsingSubmodulesinHarmonie/index.html diff --git a/previews/PR1129/.documenter-siteinfo.json b/previews/PR1129/.documenter-siteinfo.json index f5a745932..95beae72c 100644 --- a/previews/PR1129/.documenter-siteinfo.json +++ b/previews/PR1129/.documenter-siteinfo.json @@ -1 +1 @@ -{"documenter":{"julia_version":"1.10.4","generation_timestamp":"2024-10-24T06:29:08","documenter_version":"1.5.0"}} \ No newline at end of file +{"documenter":{"julia_version":"1.10.4","generation_timestamp":"2024-11-06T14:12:09","documenter_version":"1.5.0"}} \ No newline at end of file diff --git a/previews/PR1129/Boundaries/BoundaryFilePreparation/index.html b/previews/PR1129/Boundaries/BoundaryFilePreparation/index.html index 3e9e335dc..c583437be 100644 --- a/previews/PR1129/Boundaries/BoundaryFilePreparation/index.html +++ b/previews/PR1129/Boundaries/BoundaryFilePreparation/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Preparation of initial and boundary files

Introduction

HARMONIE can be coupled with external models as IFS, ARPEGE, HIRLAM. Internally it is possible to nest the different ALADIN/ALARO/AROME with some restrictions. In the following we describe the host initial and boundary files are generated depending on different configurations. Boundary file preparation basically includes two parts: forecast file fetching and boundary file generation.

The ECFLOW tasks for initial and boundary preparation

Boundary strategies

There are a number of ways to chose which forecast lengths you use as boundaries. The strategy is determined by BDSTRATEGY in ecf/config_exp.h and there are a number of strategies implemented.

  • available : Search for available files in BDDIR adn try to keep forecast consistency. This is ment to be used operationally since it will at least keep your run going, but with old boundaries, if no new boundaries are available.
  • simulate_operational : Mimic the behaviour of the operational runs using ECMWF 6h old boundaries.
  • same_forecast : Use all boundaries from the same forecast, start from analysis
  • analysis_only : Use only analyses as boundaries. Note that BDINT cannot be shorter than the frequency of the analyses.
  • latest : Use the latest possible boundary with the shortest forecast length
  • RCR_operational : Mimic the behaviour of the RCR runs, ie
  • 12h old boundaries at 00 and 12 and
  • 06h old boundaries at 06 and 18
  • jb_ensemble : Same as same_forecast but used for JB-statistics generation. With this you should export JB_ENS_MEMBER=some_number
  • eps_ec : ECMWF EPS members (on reduced Gaussian grid). It is only meaningful with ENSMSEL non-empty, i.e., ENSSIZE > 0

All the strategies are defined in scr/Boundary_strategy.pl. The script generates a file bdstrategy in your working directory that could look like:

 Boundary strategy
+

Preparation of initial and boundary files

Introduction

HARMONIE can be coupled with external models as IFS, ARPEGE, HIRLAM. Internally it is possible to nest the different ALADIN/ALARO/AROME with some restrictions. In the following we describe the host initial and boundary files are generated depending on different configurations. Boundary file preparation basically includes two parts: forecast file fetching and boundary file generation.

The ECFLOW tasks for initial and boundary preparation

Boundary strategies

There are a number of ways to chose which forecast lengths you use as boundaries. The strategy is determined by BDSTRATEGY in ecf/config_exp.h and there are a number of strategies implemented.

  • available : Search for available files in BDDIR adn try to keep forecast consistency. This is ment to be used operationally since it will at least keep your run going, but with old boundaries, if no new boundaries are available.
  • simulate_operational : Mimic the behaviour of the operational runs using ECMWF 6h old boundaries.
  • same_forecast : Use all boundaries from the same forecast, start from analysis
  • analysis_only : Use only analyses as boundaries. Note that BDINT cannot be shorter than the frequency of the analyses.
  • latest : Use the latest possible boundary with the shortest forecast length
  • RCR_operational : Mimic the behaviour of the RCR runs, ie
  • 12h old boundaries at 00 and 12 and
  • 06h old boundaries at 06 and 18
  • jb_ensemble : Same as same_forecast but used for JB-statistics generation. With this you should export JB_ENS_MEMBER=some_number
  • eps_ec : ECMWF EPS members (on reduced Gaussian grid). It is only meaningful with ENSMSEL non-empty, i.e., ENSSIZE > 0

All the strategies are defined in scr/Boundary_strategy.pl. The script generates a file bdstrategy in your working directory that could look like:

 Boundary strategy
 
        DTG: 2011090618
         LL: 36
@@ -57,4 +57,4 @@
 /

From src/arpifs/module/yommcc.F90:

! LMCC01_MSE = .T.   ===> THE CLIM.FIELD(S) ARE READ IN LBC FILE AND USED IN SURFEX
  :
 ! LMCCECSST =.T. ===> SST FROM ECMWF (SST-ANA COMB with surf temp over seaice)
-!           =.F. ===> SST FROM SURFTEMPERATURE
+! =.F. ===> SST FROM SURFTEMPERATURE
diff --git a/previews/PR1129/Build/Build_with_cmake/index.html b/previews/PR1129/Build/Build_with_cmake/index.html index 7a8bfd63a..97e405a86 100644 --- a/previews/PR1129/Build/Build_with_cmake/index.html +++ b/previews/PR1129/Build/Build_with_cmake/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Build with CMake

Background

CMake is a build system generator supporting multiple build systems and programming languages, specifically Fortran is a first-class citizen there, allowing, for example, out-of-the-box handling of the inter-module dependencies. A build system generator there means that description of the build procedure written in the CMake-script language is used by the cmake tool to generate the actual build system, for example using Unix Makefiles or Ninja generator. Thus, all modifications should be performed on the CMake-script level and not within the generated build system as these changes will be overwritten when re-running cmake at some point.

Why providing yet another alternative for building HARMONIE-AROME? Well, makeup does a very good job building the system, however it's an in-house solution which has a number of limitations:

  • makeup is an in-house build system, so there are components that require more maintenance compared to a standardized build tool
  • makeup uses a considerable number of sequential steps, which increase the total build time
  • the configure step takes quite some time, although in some cases it can be skipped, but users have to remember when they must re-run configure and this dependency is not enforced by makeup
  • not all the dependencies are tracked by makeup, for example updating configure files does not trigger a re-build

In an attempt to fix these limitation of makeup, CMake was chosen as an alternative. CMake has a mature Fortran support and improves upon some shortcomings of makeup with little effort (well, it obviously has its own fair share of quirks, but that's a different story...). Additionally, using CMake allows us to enforce usage requirements and dependencies between different components of HARMONIE-AROME, for example, it's a good idea to ensure that SURFEX routines do not directly call cloud microphysics functions. Currently makeup does not enforce these boundaries and this task is left to the developers who implement the new code. Of course, something like this can also be implemented with makeup, but it would require considerable development efforts.

Getting started with CMake

Selecting the CMake-based build system when installing HARMONIE-AROME

If all the config files are available, building HARMONIE-AROME with CMake should be as simple as setting the BUILD_WITH variable when invoking Harmonie:

config-sh/Harmonie install BUILD_WITH=cmake

or alternatively, setting the desired option in ecf/config_exp.h.

Building HARMONIE-AROME with CMake from the command line

Sometimes calling Harmonie install is not the best choice and one might want to compile the code from the command line. In this case compilation of HARMONIE-AROME with CMake consists of three individual steps:

  1. compiling the auxiliary libraries (gribex and such)
  2. compiling the main code of HARMONIE-AROME
  3. optionally, compile some additional tools (for example, gl)

1. Compiling the auxiliary libraries

This step is rather straightforward, assuming that HARMONIE-AROME code is located under the path stored in the HM_LIB environment variable one can adapt the following snippet to compile all the required libraries:

CMAKE_FLAGS="-DCONFIG_FILE=<path to your JSON config>"
+

Build with CMake

Background

CMake is a build system generator supporting multiple build systems and programming languages, specifically Fortran is a first-class citizen there, allowing, for example, out-of-the-box handling of the inter-module dependencies. A build system generator there means that description of the build procedure written in the CMake-script language is used by the cmake tool to generate the actual build system, for example using Unix Makefiles or Ninja generator. Thus, all modifications should be performed on the CMake-script level and not within the generated build system as these changes will be overwritten when re-running cmake at some point.

Why providing yet another alternative for building HARMONIE-AROME? Well, makeup does a very good job building the system, however it's an in-house solution which has a number of limitations:

  • makeup is an in-house build system, so there are components that require more maintenance compared to a standardized build tool
  • makeup uses a considerable number of sequential steps, which increase the total build time
  • the configure step takes quite some time, although in some cases it can be skipped, but users have to remember when they must re-run configure and this dependency is not enforced by makeup
  • not all the dependencies are tracked by makeup, for example updating configure files does not trigger a re-build

In an attempt to fix these limitation of makeup, CMake was chosen as an alternative. CMake has a mature Fortran support and improves upon some shortcomings of makeup with little effort (well, it obviously has its own fair share of quirks, but that's a different story...). Additionally, using CMake allows us to enforce usage requirements and dependencies between different components of HARMONIE-AROME, for example, it's a good idea to ensure that SURFEX routines do not directly call cloud microphysics functions. Currently makeup does not enforce these boundaries and this task is left to the developers who implement the new code. Of course, something like this can also be implemented with makeup, but it would require considerable development efforts.

Getting started with CMake

Selecting the CMake-based build system when installing HARMONIE-AROME

If all the config files are available, building HARMONIE-AROME with CMake should be as simple as setting the BUILD_WITH variable when invoking Harmonie:

config-sh/Harmonie install BUILD_WITH=cmake

or alternatively, setting the desired option in ecf/config_exp.h.

Building HARMONIE-AROME with CMake from the command line

Sometimes calling Harmonie install is not the best choice and one might want to compile the code from the command line. In this case compilation of HARMONIE-AROME with CMake consists of three individual steps:

  1. compiling the auxiliary libraries (gribex and such)
  2. compiling the main code of HARMONIE-AROME
  3. optionally, compile some additional tools (for example, gl)

1. Compiling the auxiliary libraries

This step is rather straightforward, assuming that HARMONIE-AROME code is located under the path stored in the HM_LIB environment variable one can adapt the following snippet to compile all the required libraries:

CMAKE_FLAGS="-DCONFIG_FILE=<path to your JSON config>"
 INSTALL_DIR="<directory where the auxiliary libraries should be installed>"
 
 AUX_LIBS='bufr_405 gribex_370 rgb_001 dummies_006/mpidummy'
@@ -100,4 +100,4 @@
 set(Fortran_DEFAULT_FLOAT_64 "-fdefault-double-8 -fdefault-real-8")
 
 set(Fortran_DEFAULT_INT_32   "")
-set(Fortran_DEFAULT_INT_64   "-fdefault-integer-8")

When running cmake configure, and depending on the build precision, a subset of these flags is added to the CMAKE_Fortran_FLAGS variable thus affecting all the Fortran targets. Currently, DEFAULT_INT variables are not used in CMake build, but are provided for consistency.

Note

When creating FortranCompilerFlags.<compiler type>.cmake, <compiler type> should follow the naming provided by CMAKE_Fortran_COMPILER_ID, for example, GNU for gfortran and Intel for ifort. See the CMake documentation for a list of all supported compiler vendors.

Note on generating different build systems with CMake

CMake is a build system generator and it can create different native build systems from the same CMakeLists.txt. The full list of supported generators is available in the CMake documentation, however in practice when building HARMONIE-AROME on a Linux machine (or on a UNIX-like one in general) there are two options: the Unix Makefiles generator and the Ninja generator:

  • Unix Makefiles generator produces a build system based on the standard makefiles and does not use "exotic" tools. This is a default generator for CMake running on Linux and it usually works pretty well. However, when building with Unix Makefiles, CMake relies on its own Fortran parsers to scan the source tree and determine the build dependencies. Thus, in some rare cases of heavy CPP usage in Fortran code CMake can get inter-module dependencies wrong. The Unix Makefiles build is not parallel by default but it can be controlled, as with any conventional makefile-based build, by passing the desired -j flags to make. Additionally, when invoking the build via cmake --build command, a -j (or --parallel) flag can be used for setting the number of parallel jobs in a build-system-agnostic way, see CMake documentation.

  • Ninja is a modern alternative to Make. Ninja is built with focus on speed and Ninja build is parallel by default, however, unlike Make, the build files for Ninja are very cumbersome to hand-write and they are usually machine-generated. When building Fortran code with CMake Ninja generator, an explicit preprocessing step is added, thus the inter-module dependencies should be always correct (or at least these corner cases where Unix Makefiles struggles to get correct dependencies are handled correctly by Ninja). In some cases using Ninja generator can reduce the build time due to better parallelization of the build, however since Ninja has a separate preprocessing step, it generates more output and, if the file system is a bottleneck, Ninja build can be slower than Unix Makefiles build. Using the Ninja generator in CMake requires the ninja tool to be available in the $PATH at the configure time.

Note

Specific CMake generator can be selected at the configure time by passing the correct -G <gen> flag to cmake. For example, cmake -G Ninja <...other CMake args...> or cmake -G "Unix Makefiles" <...other CMake args...>.

Practical considerations

When to re-run CMake configure in my experiment?

In principle, it should be enough to run CMake configure only once to generate the build system and after that any modification of the source code or configuration files should be detected by the build system triggering the required re-build steps. The only time, when CMake configure should be explicitly re-run is when you add a new source file to HARMONIE-AROME. The current implementation of the CMake build scans the file system looking for the source files to compile, so just putting a new file under, say, src/surfex/SURFEX/ and re-running the build isn't enough since this new file would be still unknown to the build system, thus the need of rerunning the configure step first.

I added some code and CMake build stopped working

Unlike makeup, CMake build for HARMONIE-AROME enforces inter-project boundaries and each project has an explicit list of its dependencies. For example, it is not possible to use modules from arpifs in surfex, but it is possible to use mse modules. If after a code modification CMake starts complaining about missing module files, then it means that this modification violates the project dependencies in the build. To fix this problem, please update your changeset to use only the available modules. If you believe that your modification is sound with respect to inter-project dependencies of HARMONIE-AROME and it's the CMake build which misses a dependency, please open a new GitHub issue explaining the problem.

Can I move/copy my build directory to another directory and re-use it?

No, it's generally a bad idea. CMake loves absolute paths and uses them in many parts of the generated build system, thus simply moving the build directory would break the build.

Something went wrong and CMake doesn't behave anymore, can I refresh the build without nuking the whole build directory?

You can try deleting just the CMakeCache.txt file from the build directory.

CMake picks a wrong compiler

Sometimes CMake selects a system default compiler instead of the compiler provided, for example, by loading a module. There are a few options available to force CMake to use a specific compiler, a straightforward one is to set the compiler via commonly-used environment variables (for example, export FC=ifort for a Fortran compiler). Another way, is to set the correct compilers in command-line arguments when configuring the CMake build (for example adding -DCMAKE_Fortran_COMPILER=ifort to the list of CMake arguments). CMake recognizes CMAKE_<LANG>_COMPILER passed from the command line where <LANG> can be Fortran, C or CXX.

Can I get more verbose output when compiling with CMake?

To get detailed information about individual steps and commands issued when compiling HARMONIE-AROME with CMake add -v to your build command:

cmake --build . --target install -v

Is there a way to visualise dependencies between individual targets of HARMONIE-AROME in CMake build?

Since all the inter-target dependencies are defined in CMake scripts it can be useful to have an option to produce a graphical overview of the dependency graph of HARMONIE-AROME without grepping all the CMakeLists.txt files. This can be achieved by adding the --graphviz=<output file name> to the list of CMake arguments, for example:

cmake $HM_LIB/src --graphviz=harmonie.dot

then the produced dependency graph can be visualized using the dot tool:

dot -Tx11 harmonie.dot

The full dependency graph may be very cluttered and take quite some time to render, so it might be a good idea to plot dependencies of a single target, for example:

dot -Tx11 harmonie.dot.surf-static

See the CMake documentation on graphviz for additional information about fine-tuning of the generated graphs.

I need more information about CMake, where do I find documentation?

CMake documentation portal is a great source of detailed information about the various aspects of the CMake build system.

+set(Fortran_DEFAULT_INT_64 "-fdefault-integer-8")

When running cmake configure, and depending on the build precision, a subset of these flags is added to the CMAKE_Fortran_FLAGS variable thus affecting all the Fortran targets. Currently, DEFAULT_INT variables are not used in CMake build, but are provided for consistency.

Note

When creating FortranCompilerFlags.<compiler type>.cmake, <compiler type> should follow the naming provided by CMAKE_Fortran_COMPILER_ID, for example, GNU for gfortran and Intel for ifort. See the CMake documentation for a list of all supported compiler vendors.

Note on generating different build systems with CMake

CMake is a build system generator and it can create different native build systems from the same CMakeLists.txt. The full list of supported generators is available in the CMake documentation, however in practice when building HARMONIE-AROME on a Linux machine (or on a UNIX-like one in general) there are two options: the Unix Makefiles generator and the Ninja generator:

  • Unix Makefiles generator produces a build system based on the standard makefiles and does not use "exotic" tools. This is a default generator for CMake running on Linux and it usually works pretty well. However, when building with Unix Makefiles, CMake relies on its own Fortran parsers to scan the source tree and determine the build dependencies. Thus, in some rare cases of heavy CPP usage in Fortran code CMake can get inter-module dependencies wrong. The Unix Makefiles build is not parallel by default but it can be controlled, as with any conventional makefile-based build, by passing the desired -j flags to make. Additionally, when invoking the build via cmake --build command, a -j (or --parallel) flag can be used for setting the number of parallel jobs in a build-system-agnostic way, see CMake documentation.

  • Ninja is a modern alternative to Make. Ninja is built with focus on speed and Ninja build is parallel by default, however, unlike Make, the build files for Ninja are very cumbersome to hand-write and they are usually machine-generated. When building Fortran code with CMake Ninja generator, an explicit preprocessing step is added, thus the inter-module dependencies should be always correct (or at least these corner cases where Unix Makefiles struggles to get correct dependencies are handled correctly by Ninja). In some cases using Ninja generator can reduce the build time due to better parallelization of the build, however since Ninja has a separate preprocessing step, it generates more output and, if the file system is a bottleneck, Ninja build can be slower than Unix Makefiles build. Using the Ninja generator in CMake requires the ninja tool to be available in the $PATH at the configure time.

Note

Specific CMake generator can be selected at the configure time by passing the correct -G <gen> flag to cmake. For example, cmake -G Ninja <...other CMake args...> or cmake -G "Unix Makefiles" <...other CMake args...>.

Practical considerations

When to re-run CMake configure in my experiment?

In principle, it should be enough to run CMake configure only once to generate the build system and after that any modification of the source code or configuration files should be detected by the build system triggering the required re-build steps. The only time, when CMake configure should be explicitly re-run is when you add a new source file to HARMONIE-AROME. The current implementation of the CMake build scans the file system looking for the source files to compile, so just putting a new file under, say, src/surfex/SURFEX/ and re-running the build isn't enough since this new file would be still unknown to the build system, thus the need of rerunning the configure step first.

I added some code and CMake build stopped working

Unlike makeup, CMake build for HARMONIE-AROME enforces inter-project boundaries and each project has an explicit list of its dependencies. For example, it is not possible to use modules from arpifs in surfex, but it is possible to use mse modules. If after a code modification CMake starts complaining about missing module files, then it means that this modification violates the project dependencies in the build. To fix this problem, please update your changeset to use only the available modules. If you believe that your modification is sound with respect to inter-project dependencies of HARMONIE-AROME and it's the CMake build which misses a dependency, please open a new GitHub issue explaining the problem.

Can I move/copy my build directory to another directory and re-use it?

No, it's generally a bad idea. CMake loves absolute paths and uses them in many parts of the generated build system, thus simply moving the build directory would break the build.

Something went wrong and CMake doesn't behave anymore, can I refresh the build without nuking the whole build directory?

You can try deleting just the CMakeCache.txt file from the build directory.

CMake picks a wrong compiler

Sometimes CMake selects a system default compiler instead of the compiler provided, for example, by loading a module. There are a few options available to force CMake to use a specific compiler, a straightforward one is to set the compiler via commonly-used environment variables (for example, export FC=ifort for a Fortran compiler). Another way, is to set the correct compilers in command-line arguments when configuring the CMake build (for example adding -DCMAKE_Fortran_COMPILER=ifort to the list of CMake arguments). CMake recognizes CMAKE_<LANG>_COMPILER passed from the command line where <LANG> can be Fortran, C or CXX.

Can I get more verbose output when compiling with CMake?

To get detailed information about individual steps and commands issued when compiling HARMONIE-AROME with CMake add -v to your build command:

cmake --build . --target install -v

Is there a way to visualise dependencies between individual targets of HARMONIE-AROME in CMake build?

Since all the inter-target dependencies are defined in CMake scripts it can be useful to have an option to produce a graphical overview of the dependency graph of HARMONIE-AROME without grepping all the CMakeLists.txt files. This can be achieved by adding the --graphviz=<output file name> to the list of CMake arguments, for example:

cmake $HM_LIB/src --graphviz=harmonie.dot

then the produced dependency graph can be visualized using the dot tool:

dot -Tx11 harmonie.dot

The full dependency graph may be very cluttered and take quite some time to render, so it might be a good idea to plot dependencies of a single target, for example:

dot -Tx11 harmonie.dot.surf-static

See the CMake documentation on graphviz for additional information about fine-tuning of the generated graphs.

I need more information about CMake, where do I find documentation?

CMake documentation portal is a great source of detailed information about the various aspects of the CMake build system.

diff --git a/previews/PR1129/Build/Build_with_makeup/index.html b/previews/PR1129/Build/Build_with_makeup/index.html index cc6e29633..842fe5d5b 100644 --- a/previews/PR1129/Build/Build_with_makeup/index.html +++ b/previews/PR1129/Build/Build_with_makeup/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Building with MAKEUP

Background

Makeup is an alternative mechanism to build the HARMONIE system Instead of using GMKPACK to build the libraries and binaries, standard GNU make (gmake) procedures are used, making build of executables an easier task. Also parallel make comes for free, thus enhanced turn-around time for build process. Furthermore, rebuilds and change of compiler flags – either per project and/or per source files basis – are now trivial to do.

MAKEUP very quickly

The process of using the MAKEUP system in stand-alone fashion is described next.

Lets define two helper variables for the presentation purposes:

The variable $HARMONIE_SRC refers to the directory, where the AROME source code is situated. Another variable $HARMONIE_MAKEUP refers to the directory, where build configuration files and MAKEUP's scripts are located.

#!sh
+

Building with MAKEUP

Background

Makeup is an alternative mechanism to build the HARMONIE system Instead of using GMKPACK to build the libraries and binaries, standard GNU make (gmake) procedures are used, making build of executables an easier task. Also parallel make comes for free, thus enhanced turn-around time for build process. Furthermore, rebuilds and change of compiler flags – either per project and/or per source files basis – are now trivial to do.

MAKEUP very quickly

The process of using the MAKEUP system in stand-alone fashion is described next.

Lets define two helper variables for the presentation purposes:

The variable $HARMONIE_SRC refers to the directory, where the AROME source code is situated. Another variable $HARMONIE_MAKEUP refers to the directory, where build configuration files and MAKEUP's scripts are located.

#!sh
 # In ksh/bash
 export HARMONIE_SRC=/some/path/harmonie/src
 export HARMONIE_MAKEUP=/some/path/harmonie/util/makeup
@@ -79,4 +79,4 @@
 
 # or not to mess up the output, use just one process for compilations
 
-gmake NPES=1 -i

Creating precompiled installation

If you want to provide precompiled libraries, objects, source code to other users so that they do not have to start compilation from scratch, then make a distribution or precompiled installation as follows:

gmake PRECOMPILED=/a/precompiled/rootdir precompiled

After this the stuff you just compiled ends up in directory /a/precompiled/rootdir with two subdirectories : src/ and util/. All executables are currently removed.

You can repeat this call, and it will just rsync the modified bits.

Update/check your interface blocks outside configure

The configure has options -c or -g to check up or enforce for (re-)creation of interface blocks of projects arp and ald. To avoid full and lengthy configure-run, you can just do the following:

gmake intfb
+gmake NPES=1 -i

Creating precompiled installation

If you want to provide precompiled libraries, objects, source code to other users so that they do not have to start compilation from scratch, then make a distribution or precompiled installation as follows:

gmake PRECOMPILED=/a/precompiled/rootdir precompiled

After this the stuff you just compiled ends up in directory /a/precompiled/rootdir with two subdirectories : src/ and util/. All executables are currently removed.

You can repeat this call, and it will just rsync the modified bits.

Update/check your interface blocks outside configure

The configure has options -c or -g to check up or enforce for (re-)creation of interface blocks of projects arp and ald. To avoid full and lengthy configure-run, you can just do the following:

gmake intfb
diff --git a/previews/PR1129/ClimateGeneration/ClimateGeneration/index.html b/previews/PR1129/ClimateGeneration/ClimateGeneration/index.html index 8d08d73a0..122599c09 100644 --- a/previews/PR1129/ClimateGeneration/ClimateGeneration/index.html +++ b/previews/PR1129/ClimateGeneration/ClimateGeneration/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Generation of climate and physiography files

Introduction

The generation of climate files includes two parts. The first part is the generation of climate files for the atmospheric model, the so called e923 configuration. The second part is the generation of the physiography information for SURFEX. In the following we describe how it is implemented in HARMONIE.

Input data for climate generation

The location of your input data for the climate generation is defined by the HM_CLDATA environment variable defined in the config-sh/config.yourhost. At ECMWF the climate data is stored on Atos here: hpc-login:/ec/res4/hpcperm/hlam/data/climate

Information on what data to download is available here. The input data contains physiography data, topography information and climatological values determined from a one year ARPEGE assimilation experiment with a resolution of T79. Climatological aerosol optical depths (tegen) or vertically integrated aerosol mass based on CAMS reanalysis 2003-2022 (camscms), can be included in the monthly climate files.

In the current version the option to use pre-generated climate files has been introduced to save time for quick experiments. To use pre-generated domains you need to set USE_REF_CLIMDIR=yes in Env_system. The regenerated domains location is defined in config_exp.h and in ECMWF are located in REF_CLIMDIR=ec:/hlam/harmonie_climdir/release-43h2.1.rc1/$DOMAIN/$ECOCLIMAP_VERSION.

Preparation of SURFEX physiography file

SURFEX needs information about the distribution of different available tiles like nature, sea, water and town. The nature tile also needs information about type of vegetation and soiltypes. The main input sources for this are found at SURFEX physiographic maps.

The data base for SURFEX-file preparation is located under HM_CLDATA/PGD

  • ecoclimats_v2.* : Landtypes
  • gtopo30.* : Topography
  • sand_fao.* : Soil type distribution
  • clay_fao.* : Soil type distribution

The generation of SURFEX physiography file (PGD.lfi) is done in scr/Prepare_pgd. The script creates the namelist OPTIONS.nam based on the DOMAIN settings in scr/Harmonie_domains.pm. Note that the SURFEX domain is only created over the C+I area. In the namelist we set which scheme that should be activated for each tile.

Tile
PHYSICSNatureSeaWaterTown
AROMEISBASEAFLXWATFLXTEB
ALAROISBASEAFLXWATFLXTown as rock

The program PGD produces one SURFEX physiography file PGD.lfi, which is stored in CLIMDIR directory.

To make sure we have the same topography input for the atmospheric part we call Prepare_pgd two times. One time to produce a PGD.lfi for SURFEX and a second time to produce a PGD.fa file that can be used as input for the climate generation described below. Note that for the atmosphere the topography will be spectrally filtered and the resulting topography will be imposed on SURFEX again.

Generation of non SURFEX monthly climate files

These files contain, among others, the surface elevation, land-sea mask, climatological aerosol and several near-surface variables for ALADIN/ALARO systems that may run without SURFEX. Climatological aerosol can be aerosol optical depth@550 nm - Tegen or CAMS, in the future also vertically integrated aerosol mass mixing ratios based on CAMS reanalysis.

scr/Climate is a script, which prepares climate file(s) for prefered forecast range. Climate files are produced for past, present and following month. The outline of Climate is as follows:

  • Check if climate files already exists.
  • Creation of namelists. The definition of domain and truncation values is taken from src/Harmonie_domains.pm.
  • Part 0: Read the PGD.fa file generated by SURFEX and write it to Neworog
  • Part 1: Filter Neworog to target grid with spectral smoothing to remove 2dx waves.
  • Part 2: generation of surface, soil and vegetation variables, without annual variation.
  • Part 3: creation of monthly climatological values and modification of albedo and emissivity according to the climatology of sea-ice limit.
  • Part 4: definition and modification of the vegetation and surface characteristics
  • Part 5: modification of fields created by step 2 and 4 over land from high resolution datasets (for each month)
  • Part 6: modification of climatological values

The result is climate files for the previous, current and next month. The files are named after their month like m01, m02 - m12 and stored in CLIMDIR.

Further reference e923

+

Generation of climate and physiography files

Introduction

The generation of climate files includes two parts. The first part is the generation of climate files for the atmospheric model, the so called e923 configuration. The second part is the generation of the physiography information for SURFEX. In the following we describe how it is implemented in HARMONIE.

Input data for climate generation

The location of your input data for the climate generation is defined by the HM_CLDATA environment variable defined in the config-sh/config.yourhost. At ECMWF the climate data is stored on Atos here: hpc-login:/ec/res4/hpcperm/hlam/data/climate

Information on what data to download is available here. The input data contains physiography data, topography information and climatological values determined from a one year ARPEGE assimilation experiment with a resolution of T79. Climatological aerosol optical depths (tegen) or vertically integrated aerosol mass based on CAMS reanalysis 2003-2022 (camscms), can be included in the monthly climate files.

In the current version the option to use pre-generated climate files has been introduced to save time for quick experiments. To use pre-generated domains you need to set USE_REF_CLIMDIR=yes in Env_system. The regenerated domains location is defined in config_exp.h and in ECMWF are located in REF_CLIMDIR=ec:/hlam/harmonie_climdir/release-43h2.1.rc1/$DOMAIN/$ECOCLIMAP_VERSION.

Preparation of SURFEX physiography file

SURFEX needs information about the distribution of different available tiles like nature, sea, water and town. The nature tile also needs information about type of vegetation and soiltypes. The main input sources for this are found at SURFEX physiographic maps.

The data base for SURFEX-file preparation is located under HM_CLDATA/PGD

  • ecoclimats_v2.* : Landtypes
  • gtopo30.* : Topography
  • sand_fao.* : Soil type distribution
  • clay_fao.* : Soil type distribution

The generation of SURFEX physiography file (PGD.lfi) is done in scr/Prepare_pgd. The script creates the namelist OPTIONS.nam based on the DOMAIN settings in scr/Harmonie_domains.pm. Note that the SURFEX domain is only created over the C+I area. In the namelist we set which scheme that should be activated for each tile.

Tile
PHYSICSNatureSeaWaterTown
AROMEISBASEAFLXWATFLXTEB
ALAROISBASEAFLXWATFLXTown as rock

The program PGD produces one SURFEX physiography file PGD.lfi, which is stored in CLIMDIR directory.

To make sure we have the same topography input for the atmospheric part we call Prepare_pgd two times. One time to produce a PGD.lfi for SURFEX and a second time to produce a PGD.fa file that can be used as input for the climate generation described below. Note that for the atmosphere the topography will be spectrally filtered and the resulting topography will be imposed on SURFEX again.

Generation of non SURFEX monthly climate files

These files contain, among others, the surface elevation, land-sea mask, climatological aerosol and several near-surface variables for ALADIN/ALARO systems that may run without SURFEX. Climatological aerosol can be aerosol optical depth@550 nm - Tegen or CAMS, in the future also vertically integrated aerosol mass mixing ratios based on CAMS reanalysis.

scr/Climate is a script, which prepares climate file(s) for prefered forecast range. Climate files are produced for past, present and following month. The outline of Climate is as follows:

  • Check if climate files already exists.
  • Creation of namelists. The definition of domain and truncation values is taken from src/Harmonie_domains.pm.
  • Part 0: Read the PGD.fa file generated by SURFEX and write it to Neworog
  • Part 1: Filter Neworog to target grid with spectral smoothing to remove 2dx waves.
  • Part 2: generation of surface, soil and vegetation variables, without annual variation.
  • Part 3: creation of monthly climatological values and modification of albedo and emissivity according to the climatology of sea-ice limit.
  • Part 4: definition and modification of the vegetation and surface characteristics
  • Part 5: modification of fields created by step 2 and 4 over land from high resolution datasets (for each month)
  • Part 6: modification of climatological values

The result is climate files for the previous, current and next month. The files are named after their month like m01, m02 - m12 and stored in CLIMDIR.

Further reference e923

diff --git a/previews/PR1129/ClimateGeneration/DownloadInputData/index.html b/previews/PR1129/ClimateGeneration/DownloadInputData/index.html index f01359c42..eab068ae0 100644 --- a/previews/PR1129/ClimateGeneration/DownloadInputData/index.html +++ b/previews/PR1129/ClimateGeneration/DownloadInputData/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Download input data

Before you can start running HARMONIE experiments some input data (external from the code repository) needs to be available on your platform. The input data contains physiography data, topography information and climatological values determined from a one year ARPEGE assimilation experiment with a resolution of T79.

+

Download input data

Before you can start running HARMONIE experiments some input data (external from the code repository) needs to be available on your platform. The input data contains physiography data, topography information and climatological values determined from a one year ARPEGE assimilation experiment with a resolution of T79.

diff --git a/previews/PR1129/ClimateSimulations/ClimateSimulation/index.html b/previews/PR1129/ClimateSimulations/ClimateSimulation/index.html index daa5061da..9bcf9493a 100644 --- a/previews/PR1129/ClimateSimulations/ClimateSimulation/index.html +++ b/previews/PR1129/ClimateSimulations/ClimateSimulation/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
+
diff --git a/previews/PR1129/DataAssimilation/CHKEVO/index.html b/previews/PR1129/DataAssimilation/CHKEVO/index.html index b9fe18f44..8ee650473 100644 --- a/previews/PR1129/DataAssimilation/CHKEVO/index.html +++ b/previews/PR1129/DataAssimilation/CHKEVO/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

ECHKEVO

Introduction

This page describes how to activate CHKEVO for diagnosing forecast model spin-up of pressure. This diagnostic is available in trunk from r16488. Yann Michel (MF) kindly suggested some of the changes required. The diagnostics are generated as part of a forecast model run up to 3 h or 6 h. A known problem is that the method fails when the first lateral boundary conditions are read by the model. The suggestion is to use BDINT=3 and forecast length 3 h. FULL-POS should also be deactivated in config_exp.h.

Preparations

It is assumed you already have a well defined experiment called your_exp. The following instructions are valid for a 3h diagnostic forecast.

NAMCHK namelist

diff --git a/previews/PR1129/DataAssimilation/DFS/index.html b/previews/PR1129/DataAssimilation/DFS/index.html index 9bab64993..ef2b0bbe9 100644 --- a/previews/PR1129/DataAssimilation/DFS/index.html +++ b/previews/PR1129/DataAssimilation/DFS/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
+
diff --git a/previews/PR1129/DataAssimilation/DaAlgorithms/index.html b/previews/PR1129/DataAssimilation/DaAlgorithms/index.html index 6f551da88..c4e4be203 100644 --- a/previews/PR1129/DataAssimilation/DaAlgorithms/index.html +++ b/previews/PR1129/DataAssimilation/DaAlgorithms/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Data assimilation algorithms

3D-Var

The default upper-air data assimilation algorithm is three-dimensional variational assimilation (3D-Var). To use 3D-Var no changes to ecf/config_exp.h should be required assuming structure function data files are available for your domain. Structure function input is defined in scr/include.ass.

Settings

List of 3D-Var settings that the user should be concerned about.

4D-Var

With 43h2.2 four-dimensional variational assimilation (4D-Var) is available as a non-default option. In order to setup an experiment to use 4D-Var one should issue the following commands:

mkdir -p /path/to/home/hm_home/MY_EXP
+

Data assimilation algorithms

3D-Var

The default upper-air data assimilation algorithm is three-dimensional variational assimilation (3D-Var). To use 3D-Var no changes to ecf/config_exp.h should be required assuming structure function data files are available for your domain. Structure function input is defined in scr/include.ass.

Settings

List of 3D-Var settings that the user should be concerned about.

4D-Var

With 43h2.2 four-dimensional variational assimilation (4D-Var) is available as a non-default option. In order to setup an experiment to use 4D-Var one should issue the following commands:

mkdir -p /path/to/home/hm_home/MY_EXP
 cd /path/to/home/hm_home/MY_EXP
 /path/to/Harmonie/config-sh/Harmonie setup -r /path/to/Harmonie -c AROME_4DVAR

To use 4D-Var no changes to ecf/config_exp.h should be required assuming suitable structure function data files are available for your domain (see also ILRES in the settings section). Structure function input is defined in scr/include.ass.

Settings

List of 4D-Var settings that the user should be concerned about.

With the following settings the working of the 4D-Var can be changed. Defaults values are given

NOUTERLOOP=2                            # number of 4DVAR outer loops
 ILRES=6,3                               # Resolution (in parts of full) of outer loops as compared to the forecast resolution. The domain NLATxNLON should have the property that for the settings of ILRES  both NLON/ILRES and NLAT/ILRES are of the form  5^c^ 3^d^ 2^e^, where c, d and e are integers >= 0 and e>=1.
@@ -79,4 +79,4 @@
 },
 

Maybe the only other information that is required to effectively use this VC algorithm, concerns the parameters :

'VCWEIGHTHD' => '-1.50,',
 'VCWEIGHTT' => '-1.50,',
-'VCWEIGHTPS' => '-1.50,',

They enable the flexibility of considering different weights for different 3DVAR analysis increments. When VC operates with a low VCWEIGHT value (strongly constrained mode), it can remove some overfitting to wind, temperature and/or surface pressure observations, and this may produce an apparent degradation in operational verification curves close to t=0. These three parameters permit adjust individually each variable. Note that negative values ( as in the default ) automatically reverts to the VCWEIGHT value.

+'VCWEIGHTPS' => '-1.50,',

They enable the flexibility of considering different weights for different 3DVAR analysis increments. When VC operates with a low VCWEIGHT value (strongly constrained mode), it can remove some overfitting to wind, temperature and/or surface pressure observations, and this may produce an apparent degradation in operational verification curves close to t=0. These three parameters permit adjust individually each variable. Note that negative values ( as in the default ) automatically reverts to the VCWEIGHT value.

diff --git a/previews/PR1129/DataAssimilation/DigitalFilterInitialization/index.html b/previews/PR1129/DataAssimilation/DigitalFilterInitialization/index.html index fffaba25b..61d1dbc9c 100644 --- a/previews/PR1129/DataAssimilation/DigitalFilterInitialization/index.html +++ b/previews/PR1129/DataAssimilation/DigitalFilterInitialization/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Digital Filter Initialization

Digital Filter Initialization (DFI) is documented by Météo France here. This wiki page is based on the "Version cycle 40t1" document available on the gmapdoc web page. By default HARMONIE does not use DFI.

DFI

The use (or not) of DFI is controlled by the variable DFI in ecf/config_exp.h. By default it is set to none.

  • idfi, incremental DFI
  • fdfi, full DFI
  • none - no initialization (default)

scr/Dfi is the script which calls the model in order to carry out DFI.

References

+

Digital Filter Initialization

Digital Filter Initialization (DFI) is documented by Météo France here. This wiki page is based on the "Version cycle 40t1" document available on the gmapdoc web page. By default HARMONIE does not use DFI.

DFI

The use (or not) of DFI is controlled by the variable DFI in ecf/config_exp.h. By default it is set to none.

  • idfi, incremental DFI
  • fdfi, full DFI
  • none - no initialization (default)

scr/Dfi is the script which calls the model in order to carry out DFI.

References

diff --git a/previews/PR1129/DataAssimilation/LSMIXandJk/index.html b/previews/PR1129/DataAssimilation/LSMIXandJk/index.html index 449fce74b..e17ae6380 100644 --- a/previews/PR1129/DataAssimilation/LSMIXandJk/index.html +++ b/previews/PR1129/DataAssimilation/LSMIXandJk/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Jk as a pre-mixing method

The 3D-Var cost function including the Jk term can be written:

\[J(x) = J_b + J_o + J_k = \frac{1}{2} (x - x_b)^{\rm T} B^{-1}(x - x_b) + \frac{1}{2} (y - Hx)^{\rm T}R^{-1}(y - Hx) + \frac{1}{2} (x - x_{LS})^{\rm T} V^{-1}(x - x_{LS})\]

Setting the gradient to zero, we have at the optimal $x$:

\[\nabla J = B^{-1}(x - x_b) - H^{\rm T}R^{-1}(y - Hx) + V^{-1}(x - x_{LS}) = 0 \]

or

\[\left[B^{-1} + V^{-1} + H^{\rm T}R^{-1}H\right] \left(x - x_b \right) = H^{\rm T}R^{-1}(y - Hx_b) + V^{-1}(x_{LS} - x_b). \]

Equivalent pre-mixed first guess

Assume now that $\widetilde{x_b}$ is some yet unknown, pre-mixed field depending on $x_b$ and $x_{LS}$ that we want to determine. By adding and subtracting identical terms to the gradient equation, we have

\[B^{-1}(x - x_b + \widetilde{x_b} - \widetilde{x_b}) - H^{\rm T}R^{-1}(y - Hx + H\widetilde{x_b} - H\widetilde{x_b}) + V^{-1}(x - x_{LS} + \widetilde{x_b} - \widetilde{x_b}) = 0,\]

which, when reorganized gives

\[\left[B^{-1} + V^{-1} + H^{\rm T}R^{-1}H \right] \left(x - \widetilde{x_b}\right) = H^{\rm T}R^{-1}(y - H\widetilde{x_b}) + B^{-1}(x_b - \widetilde{x_b}) + V^{-1}(x_{LS} - \widetilde{x_b}). \]

If the last two terms on the right hand side add up to zero, i.e.,

\[B^{-1}(x_b - \widetilde{x_b}) + V^{-1}(x_{LS} - \widetilde{x_b}) = 0, \]

which means that

\[\widetilde{x_b} = [B^{-1} + V^{-1}]^{-1} ( B^{-1} x_b + V^{-1} x_{LS} ), \]

then we see that by using this mixed first guess the Jk term can be omitted, provided we use a modified B-matrix with the property that

\[\widetilde{B}^{-1} = B^{-1} + V^{-1}. \]

By writing

\[B^{-1} + V^{-1} = B^{-1}(B + V)V^{-1} = V^{-1}(B + V)B^{-1} \]

we easily see by simply inverting that

\[\widetilde{B} = [B^{-1} + V^{-1}]^{-1} = B(B + V)^{-1}V = V(B + V)^{-1}B. \]

To conclude, a 3D-Var minimization with Jk is equivalent to a minimization without the Jk term, provided that one pre-mixes the two first guess fields according to

\[\widetilde{x_b} = [B^{-1} + V^{-1}]^{-1} ( B^{-1} x_b + V^{-1} x_{LS} ) = \widetilde{B}( B^{-1} x_b + V^{-1} x_{LS} ) = V(B + V)^{-1}x_b + B(B + V)^{-1}x_{LS} \]

and use the following covariance matrix for this mixed first guess:

\[\widetilde{B} = [B^{-1} + V^{-1}]^{-1} = B(B + V)^{-1}V = V(B + V)^{-1}B. \]

Whether this is implementable in practice is a different story, it just shows the theoretical equivalence, and how LSMIXBC should ideally be done if Jk is the right answer.

+

Jk as a pre-mixing method

The 3D-Var cost function including the Jk term can be written:

\[J(x) = J_b + J_o + J_k = \frac{1}{2} (x - x_b)^{\rm T} B^{-1}(x - x_b) + \frac{1}{2} (y - Hx)^{\rm T}R^{-1}(y - Hx) + \frac{1}{2} (x - x_{LS})^{\rm T} V^{-1}(x - x_{LS})\]

Setting the gradient to zero, we have at the optimal $x$:

\[\nabla J = B^{-1}(x - x_b) - H^{\rm T}R^{-1}(y - Hx) + V^{-1}(x - x_{LS}) = 0 \]

or

\[\left[B^{-1} + V^{-1} + H^{\rm T}R^{-1}H\right] \left(x - x_b \right) = H^{\rm T}R^{-1}(y - Hx_b) + V^{-1}(x_{LS} - x_b). \]

Equivalent pre-mixed first guess

Assume now that $\widetilde{x_b}$ is some yet unknown, pre-mixed field depending on $x_b$ and $x_{LS}$ that we want to determine. By adding and subtracting identical terms to the gradient equation, we have

\[B^{-1}(x - x_b + \widetilde{x_b} - \widetilde{x_b}) - H^{\rm T}R^{-1}(y - Hx + H\widetilde{x_b} - H\widetilde{x_b}) + V^{-1}(x - x_{LS} + \widetilde{x_b} - \widetilde{x_b}) = 0,\]

which, when reorganized gives

\[\left[B^{-1} + V^{-1} + H^{\rm T}R^{-1}H \right] \left(x - \widetilde{x_b}\right) = H^{\rm T}R^{-1}(y - H\widetilde{x_b}) + B^{-1}(x_b - \widetilde{x_b}) + V^{-1}(x_{LS} - \widetilde{x_b}). \]

If the last two terms on the right hand side add up to zero, i.e.,

\[B^{-1}(x_b - \widetilde{x_b}) + V^{-1}(x_{LS} - \widetilde{x_b}) = 0, \]

which means that

\[\widetilde{x_b} = [B^{-1} + V^{-1}]^{-1} ( B^{-1} x_b + V^{-1} x_{LS} ), \]

then we see that by using this mixed first guess the Jk term can be omitted, provided we use a modified B-matrix with the property that

\[\widetilde{B}^{-1} = B^{-1} + V^{-1}. \]

By writing

\[B^{-1} + V^{-1} = B^{-1}(B + V)V^{-1} = V^{-1}(B + V)B^{-1} \]

we easily see by simply inverting that

\[\widetilde{B} = [B^{-1} + V^{-1}]^{-1} = B(B + V)^{-1}V = V(B + V)^{-1}B. \]

To conclude, a 3D-Var minimization with Jk is equivalent to a minimization without the Jk term, provided that one pre-mixes the two first guess fields according to

\[\widetilde{x_b} = [B^{-1} + V^{-1}]^{-1} ( B^{-1} x_b + V^{-1} x_{LS} ) = \widetilde{B}( B^{-1} x_b + V^{-1} x_{LS} ) = V(B + V)^{-1}x_b + B(B + V)^{-1}x_{LS} \]

and use the following covariance matrix for this mixed first guess:

\[\widetilde{B} = [B^{-1} + V^{-1}]^{-1} = B(B + V)^{-1}V = V(B + V)^{-1}B. \]

Whether this is implementable in practice is a different story, it just shows the theoretical equivalence, and how LSMIXBC should ideally be done if Jk is the right answer.

diff --git a/previews/PR1129/DataAssimilation/MTEN/index.html b/previews/PR1129/DataAssimilation/MTEN/index.html index 1a91b250d..2f339905d 100644 --- a/previews/PR1129/DataAssimilation/MTEN/index.html +++ b/previews/PR1129/DataAssimilation/MTEN/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Moist Total Energy Norm (MTEN) diagnostic

MTEN shows the sensitivity of the forecast model to different observations withdrawn from the full analysis system. There are two ways of computing the MTEN diagnostic: A special branch was created in CY40 (see below) where the MTEN diagnostic can be requested. This approach uses Harmonie ensemble system to perform series of observation denial independent runs. This means that the following settings are used in msms/harmonie.pm

    'ENSBDMBR' => [ 0 ],
+

Moist Total Energy Norm (MTEN) diagnostic

MTEN shows the sensitivity of the forecast model to different observations withdrawn from the full analysis system. There are two ways of computing the MTEN diagnostic: A special branch was created in CY40 (see below) where the MTEN diagnostic can be requested. This approach uses Harmonie ensemble system to perform series of observation denial independent runs. This means that the following settings are used in msms/harmonie.pm

    'ENSBDMBR' => [ 0 ],
     'ENSCTL'   => [ '000',  '001',  '002',  '003', '004', '005', '006', '007' ],
     'AIRCRAFT_OBS' => [ 0, 1, 1, 1, 1, 1, 1, 1],
     'BUOY_OBS'     => [ 1, 0, 1, 1, 1, 1, 1, 1],
@@ -55,4 +55,4 @@
 
     done
   done
-

See (Storto and Randriamampianina, 2010) for more details.

+

See (Storto and Randriamampianina, 2010) for more details.

diff --git a/previews/PR1129/DataAssimilation/NWECHKEVO/index.html b/previews/PR1129/DataAssimilation/NWECHKEVO/index.html index 4879a0e7f..9bced4d60 100644 --- a/previews/PR1129/DataAssimilation/NWECHKEVO/index.html +++ b/previews/PR1129/DataAssimilation/NWECHKEVO/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

NWECHKEVO

Introduction

The new utility NWECHKEVO was introduced in order to make the generation of diagnostics for the study of spin-up in dynamics effects more efficient. The utility CHKEVO turned out to slow down the forecast run to unpractical times. NWECHKEVO produces timeseries for the variables log(Ps), horiz. vorticity, horiz. divergence, vertical divergence, pressure departure and temperature for the first 180 timesteps of integration at timestep resolution. These timeseries are produced at selected points within the domain and at all levels for the last upper-air five variables (HVor, HDiv, VDiv, PD and T).

Preparations. NAMCHK namelist

The user must select a list of points at which locations he wants the diagnostics to be generated. The coordinates are given in GPx and GPy coordinates, not geographical coordinates. These co-ordinates are then introduced in the namelist NAMCHK as in the following example

NAMCHK=>{
+

NWECHKEVO

Introduction

The new utility NWECHKEVO was introduced in order to make the generation of diagnostics for the study of spin-up in dynamics effects more efficient. The utility CHKEVO turned out to slow down the forecast run to unpractical times. NWECHKEVO produces timeseries for the variables log(Ps), horiz. vorticity, horiz. divergence, vertical divergence, pressure departure and temperature for the first 180 timesteps of integration at timestep resolution. These timeseries are produced at selected points within the domain and at all levels for the last upper-air five variables (HVor, HDiv, VDiv, PD and T).

Preparations. NAMCHK namelist

The user must select a list of points at which locations he wants the diagnostics to be generated. The coordinates are given in GPx and GPy coordinates, not geographical coordinates. These co-ordinates are then introduced in the namelist NAMCHK as in the following example

NAMCHK=>{
   'NGPCHK' => '17,',
   'NXCHK(1:17)' => '263,335,447,525,606,390,420,540,644,333,509,329,388,480,266,259,271,',
   'NYCHK(1:17)' => '462,472,469,398,388,406,325,284,300,293,243,215,167,178,358,279,200,',
@@ -53,4 +53,4 @@
 NWECHKEVO:UA 13 001 003   0.79264193785264E-05  -0.15031046611816E-04   0.21385134119954E+03  -0.33856415073342E-04   0.42661347477312E-05
 NWECHKEVO:UA 13 001 004   0.21090675053822E-05   0.31713133370971E-05   0.21377935010403E+03  -0.40445121858208E-04  -0.54989449665528E-05
 NWECHKEVO:UA 13 001 005   0.30451493480920E-04  -0.18284403001908E-04   0.21545646796919E+03  -0.42130887042681E-04   0.14684047934687E-04
-....

up to timestep 180 (hard-coded,the first 3 hours if timestep 1 minute)

Plotting

The results are easily plotted with any graphs utility (e.g. gnuplot)

+....

up to timestep 180 (hard-coded,the first 3 hours if timestep 1 minute)

Plotting

The results are easily plotted with any graphs utility (e.g. gnuplot)

diff --git a/previews/PR1129/DataAssimilation/ObservationOperators/index.html b/previews/PR1129/DataAssimilation/ObservationOperators/index.html index 6ca1519e9..081d2405d 100644 --- a/previews/PR1129/DataAssimilation/ObservationOperators/index.html +++ b/previews/PR1129/DataAssimilation/ObservationOperators/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Observation operators

This documentation summarises the observation operator in HARMONIE and the use of the HOP_DRIVER tool. The test harness, HOP_DRIVER, calls the observation operator and generates FG departures without calling any model code or initialising any model modules. Firstly, the IFS is used to dump a single-observation gom_plus to file from the 1st trajectory of an experiment. Dumping multiple observations would require a more complex and full-featured dump (good file format, multi-process parallel). For code refactoring HOP_DRIVER can be used to test changes to the observation operator of a particular observation type.

HARMONIE and HOP_DRIVER

The HOP_DRIVER program was first added to CY42R2 code. The tool was initially implemented to test refactoring of the IFS observation operator code src/arpifs/op_obs/hop.F90. Instructions on how to prepare the code and run HOP_DRIVER using HARMONIE are outlined below. Presentation made at [wiki:HirlamMeetings/ModelMeetings/ObOpWorkshop OOPS Observation Operator Workshop] may provide some useful background information.

Comments on the branch

  • Code changes were required in order to compile cy42r2bf.04 + mods (provided by MF/ECMWF) in the HARMONIE system: [14312], [14325], [14326], [14330], [14331], [14332], [14333], [14334].
  • Changes were made to makeup in order to compile HOP_DRIVER correctly: [14310], [14327], [14328], [14329], [14335], [14362], [14382], [14392].
  • Included in [14362] is a change to ODBSQLFLAGS which is set to ODBSQLFLAGS=-O3 -C -UCANARI -DECMWF $(ODBEXTRAFLAGS) in order to use ECMWF flavoured ODB used by HOP_DRIVER
  • On cca GNU compilers 4.9 are not fully supported, ie I had to build GRIB-API and NetCDF locally using gcc/gfortran 4.9 on cca
  • An environment variable, HOPDIR, is used to define the location of necessary input data for HOP_DRIVER
  • An environment variable, HOPCOMPILER, is used by the HOP_driver script to define the compiler used. This is used to compare results.

HOPOBS: amsua

Currently there is only one observation type, AMSU-A (HOPOBS=amsua), available for testing with HOP_DRIVER. Alan Geer (ECMWF) has already carried out the refactoring of the HOP code related to AMSU-A observations. A single observation is provided in the ECMA and is used to test the refactoring of the HOP code. To carry out the testing of the amsua refactoring HOPOBS should be set to amsua in ecf/config_exp.h.

reportype@hdrobstype@hdrsensor@hdrstatid@hdrstalt@hdrdate@hdrtime@hdrdegrees(lat)degrees(lon)report_status@hdrdatum_status@bodyobsvalue@bodyvarno@bodyvertco_type@body
100773' 4'832800!20140131215914-29.59060.3113112173.281193
100773' 4'832800!20140131215914-29.59060.3113112158.861193
100773' 4'832800!20140131215914-29.59060.311313227.401193
100773' 4'832800!20140131215914-29.59060.311313260.821193
100773' 4'832800!20140131215914-29.59060.311311256.901193
100773' 4'832800!20140131215914-29.59060.311311239.601193
100773' 4'832800!20140131215914-29.59060.3113112NULL1193
100773' 4'832800!20140131215914-29.59060.311313217.691193
100773' 4'832800!20140131215914-29.59060.311311209.391193
100773' 4'832800!20140131215914-29.59060.311311214.051193
100773' 4'832800!20140131215914-29.59060.311311223.021193
100773' 4'832800!20140131215914-29.59060.311311234.421193
100773' 4'832800!20140131215914-29.59060.311311245.141193
100773' 4'832800!20140131215914-29.59060.311311257.181193
100773' 4'832800!20140131215914-29.59060.3113112227.911193

HOP_DRIVER

Using HOP_DRIVER

With LHOP_RESULTS=.TRUE. HOP_DRIVER will write results to a file called hop_results${MYPROC} for comparison between online and offline results. (The results file is opened by src/arpifs/var/taskob.F90. HOP_DRIVER results are written to hop_results${MYPROC} in src/arpifs/op_obs/hop.F90:

 :
+

Observation operators

This documentation summarises the observation operator in HARMONIE and the use of the HOP_DRIVER tool. The test harness, HOP_DRIVER, calls the observation operator and generates FG departures without calling any model code or initialising any model modules. Firstly, the IFS is used to dump a single-observation gom_plus to file from the 1st trajectory of an experiment. Dumping multiple observations would require a more complex and full-featured dump (good file format, multi-process parallel). For code refactoring HOP_DRIVER can be used to test changes to the observation operator of a particular observation type.

HARMONIE and HOP_DRIVER

The HOP_DRIVER program was first added to CY42R2 code. The tool was initially implemented to test refactoring of the IFS observation operator code src/arpifs/op_obs/hop.F90. Instructions on how to prepare the code and run HOP_DRIVER using HARMONIE are outlined below. Presentation made at [wiki:HirlamMeetings/ModelMeetings/ObOpWorkshop OOPS Observation Operator Workshop] may provide some useful background information.

Comments on the branch

  • Code changes were required in order to compile cy42r2bf.04 + mods (provided by MF/ECMWF) in the HARMONIE system: [14312], [14325], [14326], [14330], [14331], [14332], [14333], [14334].
  • Changes were made to makeup in order to compile HOP_DRIVER correctly: [14310], [14327], [14328], [14329], [14335], [14362], [14382], [14392].
  • Included in [14362] is a change to ODBSQLFLAGS which is set to ODBSQLFLAGS=-O3 -C -UCANARI -DECMWF $(ODBEXTRAFLAGS) in order to use ECMWF flavoured ODB used by HOP_DRIVER
  • On cca GNU compilers 4.9 are not fully supported, ie I had to build GRIB-API and NetCDF locally using gcc/gfortran 4.9 on cca
  • An environment variable, HOPDIR, is used to define the location of necessary input data for HOP_DRIVER
  • An environment variable, HOPCOMPILER, is used by the HOP_driver script to define the compiler used. This is used to compare results.

HOPOBS: amsua

Currently there is only one observation type, AMSU-A (HOPOBS=amsua), available for testing with HOP_DRIVER. Alan Geer (ECMWF) has already carried out the refactoring of the HOP code related to AMSU-A observations. A single observation is provided in the ECMA and is used to test the refactoring of the HOP code. To carry out the testing of the amsua refactoring HOPOBS should be set to amsua in ecf/config_exp.h.

reportype@hdrobstype@hdrsensor@hdrstatid@hdrstalt@hdrdate@hdrtime@hdrdegrees(lat)degrees(lon)report_status@hdrdatum_status@bodyobsvalue@bodyvarno@bodyvertco_type@body
100773' 4'832800!20140131215914-29.59060.3113112173.281193
100773' 4'832800!20140131215914-29.59060.3113112158.861193
100773' 4'832800!20140131215914-29.59060.311313227.401193
100773' 4'832800!20140131215914-29.59060.311313260.821193
100773' 4'832800!20140131215914-29.59060.311311256.901193
100773' 4'832800!20140131215914-29.59060.311311239.601193
100773' 4'832800!20140131215914-29.59060.3113112NULL1193
100773' 4'832800!20140131215914-29.59060.311313217.691193
100773' 4'832800!20140131215914-29.59060.311311209.391193
100773' 4'832800!20140131215914-29.59060.311311214.051193
100773' 4'832800!20140131215914-29.59060.311311223.021193
100773' 4'832800!20140131215914-29.59060.311311234.421193
100773' 4'832800!20140131215914-29.59060.311311245.141193
100773' 4'832800!20140131215914-29.59060.311311257.181193
100773' 4'832800!20140131215914-29.59060.3113112227.911193

HOP_DRIVER

Using HOP_DRIVER

With LHOP_RESULTS=.TRUE. HOP_DRIVER will write results to a file called hop_results${MYPROC} for comparison between online and offline results. (The results file is opened by src/arpifs/var/taskob.F90. HOP_DRIVER results are written to hop_results${MYPROC} in src/arpifs/op_obs/hop.F90:

 :
  :
 IF(LHOP_RESULTS) THEN
 !$OMP CRITICAL
@@ -104,4 +104,4 @@
 ENDDO
 
  :
- :
+ :
diff --git a/previews/PR1129/DataAssimilation/Screening/index.html b/previews/PR1129/DataAssimilation/Screening/index.html index f0b80961e..b7f4b2376 100644 --- a/previews/PR1129/DataAssimilation/Screening/index.html +++ b/previews/PR1129/DataAssimilation/Screening/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Screening

Introduction

Screening (configuration 002 of ARPEGE/IFS model) carries out quality control of observations.

A useful presentation (Martin Ridal) from the "Hirlam-B Training Week on HARMONIE system" training course is available here: MR_screenandminim.pdf. Most of the information on this page is based on his presentation.

Inputs

  • First guess (the same file with 5 different names):

    • ICMSHMIN1INIT
    • ICMSHMIN1IMIN
    • ICMRFMIN10000
    • ELSCFMIN1ALBC000
    • ELSCFMIN1ALBC
  • Input/output ODB directory structure

    • ${d_DB}/ECMA
    • ${d_DB}/ECMA.${base1}
  • Constants and statistics (MAY NEED TO BE UPDATED)

    • correl.dat
    • sigmab.dat
    • rszcoef_fmt
    • errgrib
    • rt_coef_atovs_newpred_ieee.dat
    • bcor_noaa.dat
    • chanspec_noaa.dat
    • rmtberr_noaa.dat
    • cstlim_noaa.dat
  • Namelist: See %screening in nam/harmonie_namelists.pm

Screening tasks

(Based on Martin Ridal's presentation).

  • Preliminary check of observations
    • Check of completeness of the reports
    • Check if station altitude is present
    • Check of the reporting practice for SYNOP & TEMP mass observations
  • Blacklisting: A blacklist is applied to discard observations of known poor quality and/or that cannot be properly handled by the data assimilation. A selection of variables for assimilation is done using the data selection part of the blacklist file and the information hard-coded in Arpege/Aladin (orographic rejection limit, land-sea rejection...). Decisions based on the blacklist are feedback to the CMA. Blacklisting is defined in src/bla/mf_blacklist.b
  • Background quality control: flags are assigned to observations – 1 => probably correct, 2 => probably incorrect, 3 => incorrect.
  • Vertical consistency of multilevel report:
    • The duplicated levels, in multi-level reports, are removed from the reports
    • If 4 consecutive layers are found to be of suspicious quality then these layers are rejected
  • Removal of duplicated reports
    • In case of co-located airep reports of the same observation types (time, position), some or all of the content of one of the reports is rejected
  • Redundancy check
    • performed for active reports that are co-located and originate from the same station
    • LAND SYNOP: the report closest to the centre of the screening time window with most active data is retained
    • SHIP SYNOP: redundant if the moving platforms are within a circle of 1^o^ radius src/arpifs/obs_preproc/sufglim.F90 RSHIDIS = 111000._JPRB
    • TEMP and PILOT: same stations are considered at the same time in the redundancy check
    • A SYNOP mass observation is redundant if there are any TEMP geopotential height observations (made in the same time and the same station) that are no more than 50hPa above the SYNOP mass observation
  • Thinning: High resolution data needs to be reduced to reduce correlated errors and reduce the amount of data

Output

The quality control information will be put into the input ECMA ODB(s) and a newly created CCMA to used by the 3DVAR minimization.

A valuable summary about screening decisions can be found in HM_Date_YYYYMMDDHH.html:

  • Look for “SCREENING STATISTICS” to get:
    • STATUS summary
    • EVENT summary
    • Number of variables, departures and missing departures
    • Diagnostic JO-table
    • CCMA ODB and updated ECMA ODB

Screening Events listed under "EVENT SUMMARY OF REPORTS:"

Description
1NO DATA IN THE REPORT
2ALL DATA REJECTED
3BAD REPORTING PRACTICE
4REJECTED DUE TO RDB FLAG
5ACTIVATED DUE TO RDB FLAG
6ACTIVATED BY WHITELIST
7HORIZONTAL POSITION OUT OF RANGE
8VERTICAL POSITION OUT OF RANGE
9TIME OUT OF RANGE
10REDUNDANT REPORT
11REPORT OVER LAND
12REPORT OVER SEA
13MISSING STATION ALTITUDE
14MODEL SUR. TOO FAR FROM STAT. ALT.
15REPORT REJECTED THROUGH THE NAMELIST
16FAILED QUALITY CONTROL
+

Screening

Introduction

Screening (configuration 002 of ARPEGE/IFS model) carries out quality control of observations.

A useful presentation (Martin Ridal) from the "Hirlam-B Training Week on HARMONIE system" training course is available here: MR_screenandminim.pdf. Most of the information on this page is based on his presentation.

Inputs

  • First guess (the same file with 5 different names):

    • ICMSHMIN1INIT
    • ICMSHMIN1IMIN
    • ICMRFMIN10000
    • ELSCFMIN1ALBC000
    • ELSCFMIN1ALBC
  • Input/output ODB directory structure

    • ${d_DB}/ECMA
    • ${d_DB}/ECMA.${base1}
  • Constants and statistics (MAY NEED TO BE UPDATED)

    • correl.dat
    • sigmab.dat
    • rszcoef_fmt
    • errgrib
    • rt_coef_atovs_newpred_ieee.dat
    • bcor_noaa.dat
    • chanspec_noaa.dat
    • rmtberr_noaa.dat
    • cstlim_noaa.dat
  • Namelist: See %screening in nam/harmonie_namelists.pm

Screening tasks

(Based on Martin Ridal's presentation).

  • Preliminary check of observations
    • Check of completeness of the reports
    • Check if station altitude is present
    • Check of the reporting practice for SYNOP & TEMP mass observations
  • Blacklisting: A blacklist is applied to discard observations of known poor quality and/or that cannot be properly handled by the data assimilation. A selection of variables for assimilation is done using the data selection part of the blacklist file and the information hard-coded in Arpege/Aladin (orographic rejection limit, land-sea rejection...). Decisions based on the blacklist are feedback to the CMA. Blacklisting is defined in src/bla/mf_blacklist.b
  • Background quality control: flags are assigned to observations – 1 => probably correct, 2 => probably incorrect, 3 => incorrect.
  • Vertical consistency of multilevel report:
    • The duplicated levels, in multi-level reports, are removed from the reports
    • If 4 consecutive layers are found to be of suspicious quality then these layers are rejected
  • Removal of duplicated reports
    • In case of co-located airep reports of the same observation types (time, position), some or all of the content of one of the reports is rejected
  • Redundancy check
    • performed for active reports that are co-located and originate from the same station
    • LAND SYNOP: the report closest to the centre of the screening time window with most active data is retained
    • SHIP SYNOP: redundant if the moving platforms are within a circle of 1^o^ radius src/arpifs/obs_preproc/sufglim.F90 RSHIDIS = 111000._JPRB
    • TEMP and PILOT: same stations are considered at the same time in the redundancy check
    • A SYNOP mass observation is redundant if there are any TEMP geopotential height observations (made in the same time and the same station) that are no more than 50hPa above the SYNOP mass observation
  • Thinning: High resolution data needs to be reduced to reduce correlated errors and reduce the amount of data

Output

The quality control information will be put into the input ECMA ODB(s) and a newly created CCMA to used by the 3DVAR minimization.

A valuable summary about screening decisions can be found in HM_Date_YYYYMMDDHH.html:

  • Look for “SCREENING STATISTICS” to get:
    • STATUS summary
    • EVENT summary
    • Number of variables, departures and missing departures
    • Diagnostic JO-table
    • CCMA ODB and updated ECMA ODB

Screening Events listed under "EVENT SUMMARY OF REPORTS:"

Description
1NO DATA IN THE REPORT
2ALL DATA REJECTED
3BAD REPORTING PRACTICE
4REJECTED DUE TO RDB FLAG
5ACTIVATED DUE TO RDB FLAG
6ACTIVATED BY WHITELIST
7HORIZONTAL POSITION OUT OF RANGE
8VERTICAL POSITION OUT OF RANGE
9TIME OUT OF RANGE
10REDUNDANT REPORT
11REPORT OVER LAND
12REPORT OVER SEA
13MISSING STATION ALTITUDE
14MODEL SUR. TOO FAR FROM STAT. ALT.
15REPORT REJECTED THROUGH THE NAMELIST
16FAILED QUALITY CONTROL
diff --git a/previews/PR1129/DataAssimilation/SingleObs/index.html b/previews/PR1129/DataAssimilation/SingleObs/index.html index f8d96327e..8a9570cb0 100644 --- a/previews/PR1129/DataAssimilation/SingleObs/index.html +++ b/previews/PR1129/DataAssimilation/SingleObs/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Single observation impact experiment

General

The results of single observation impact experiment provide useful information of the observation operator and error statistics. Among others, it is a useful tool for diagnosing background error statistics. The procedure described below is the recommended one and it has been tested on HARMONIE harmonie-43h21. The example below with the new system is for a AROME domain covering Denmark (DOMAIN=DKCOEXP). Three TEMP observation types have been implemented in scr/Create_singe_obs as deviations to the background:

  • A temperature increase of 1K
  • A wind speed increase of 1 m/s
  • A specific humidity reduction to 90% of the background.

Illustrative example of single observation impact experiment on ECMWF

  1. Folow instructions to create an experiment e.g. at ECMWF.

  2. Edit ecf/config_exp.h as follows:

    • set ANASURF=none,
    • set SINGLEOBS=yes,
    • set LSMIXBC=no,
  3. Edit scr/include.ass as follows:

    • set USEOBSOUL=1,
  4. Do a checkout of bator_lectures_mod.F90

    ./Harmonie co src/odb/pandor/module/bator_lectures_mod.F90

    and change lines 296-300 to use the free format

    296 READ (NULOBI,*,iostat=iret) &
    +

    Single observation impact experiment

    General

    The results of single observation impact experiment provide useful information of the observation operator and error statistics. Among others, it is a useful tool for diagnosing background error statistics. The procedure described below is the recommended one and it has been tested on HARMONIE harmonie-43h21. The example below with the new system is for a AROME domain covering Denmark (DOMAIN=DKCOEXP). Three TEMP observation types have been implemented in scr/Create_singe_obs as deviations to the background:

    • A temperature increase of 1K
    • A wind speed increase of 1 m/s
    • A specific humidity reduction to 90% of the background.

    Illustrative example of single observation impact experiment on ECMWF

    1. Folow instructions to create an experiment e.g. at ECMWF.

    2. Edit ecf/config_exp.h as follows:

      • set ANASURF=none,
      • set SINGLEOBS=yes,
      • set LSMIXBC=no,
    3. Edit scr/include.ass as follows:

      • set USEOBSOUL=1,
    4. Do a checkout of bator_lectures_mod.F90

      ./Harmonie co src/odb/pandor/module/bator_lectures_mod.F90

      and change lines 296-300 to use the free format

      296 READ (NULOBI,*,iostat=iret) &
       297           & ival1,iotp,ioch,zval1,zval2,cval,ival2,ival3,zval3,inbw,ival1,ival1,&
       298           & (ztval(:),j=1,inbw)
       299 
      @@ -13,4 +13,4 @@
       37 } else {
       38  $nprocx=1;
       39  $nprocy=1;
      -40 }
    5. Launch the single observation impact experiment:

      ./Harmonie start DTG=2012061003 DTGEND=2012061006
    6. The resulting analysis file be found as $SCRATCH/hm_home/<exp>/archive/2012/06/10/06/MXMIN1999+0000. You can now diagnose the 3D-VAR analysis increments of the sinob-experiment taking the difference between the analysis MXMIN1999+0000 (analysis) and the first guess, $SCRATCH/hm_home/<exp>/archive/2012/06/10/03/ICMSHHARM+0003. Plot horizontal and vertical cross-sections of temperature and other variables using your favorite software (EpyGram for example).

    Note that you can change position of observation, observation error, variable to be observed etc. Investigate these options by taking a closer look at the script Create_single_obs.

    Read more about radiance single observation experiments here. In ec:/smx/sinob_wiki_ml you will also find OBSOUL_amsua7, a file for generating a satellati radiance amsu a channel 7 single observation impact experiment.

    +40 }
  5. Launch the single observation impact experiment:

    ./Harmonie start DTG=2012061003 DTGEND=2012061006
  6. The resulting analysis file be found as $SCRATCH/hm_home/<exp>/archive/2012/06/10/06/MXMIN1999+0000. You can now diagnose the 3D-VAR analysis increments of the sinob-experiment taking the difference between the analysis MXMIN1999+0000 (analysis) and the first guess, $SCRATCH/hm_home/<exp>/archive/2012/06/10/03/ICMSHHARM+0003. Plot horizontal and vertical cross-sections of temperature and other variables using your favorite software (EpyGram for example).

Note that you can change position of observation, observation error, variable to be observed etc. Investigate these options by taking a closer look at the script Create_single_obs.

Read more about radiance single observation experiments here. In ec:/smx/sinob_wiki_ml you will also find OBSOUL_amsua7, a file for generating a satellati radiance amsu a channel 7 single observation impact experiment.

diff --git a/previews/PR1129/DataAssimilation/StructureFunctions/index.html b/previews/PR1129/DataAssimilation/StructureFunctions/index.html index bc6d63efd..48ef69a9f 100644 --- a/previews/PR1129/DataAssimilation/StructureFunctions/index.html +++ b/previews/PR1129/DataAssimilation/StructureFunctions/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Derivation of Structure Functions

General

For each new model domain, in order to carry out upper air data assimilation (3DVAR or 4DVAR) one needs to generate background error covariances (generally referred to as structure functions). The recommended procedure is to use a two step approach. In step one you generate background error statistics by downscaling (this is needed since you do not have have statistics for your domain setup for this forecast model version and physics options, so that you cannot run data-assimilation (unless you use statistics from old system possibly derived from a slighthly different domain and with a different model version, which is not recommended). In step 2 you then use the statistics derived in step 1 to generate the final background error statistics files by applying ensemble data assimilation within the HARMONIE-AROME modelling system.

In step 1 structure functions are generated from difference fields from ensemble members of HARMONIE-AROME forecast. These are obtained from downscaling of ECMWF EDA ensemble forecast. To alleviate spin-up issues, these phase 1 downscaled HARMONIE-AROME forecasts are run up to 6 hours, and differences are generated from these. Using the ECMWF LBC data, 6h HARMONIE ensemble forecasts are initiated from ECMWF 6h forecasts daily from 00 UTC and 12 UTC, with ECMWF forecasts as initial and lateral boundary conditions. To obtain stable statistics, it is recomended to run 4 ensembles for two chosen one-month episode (s). The episodes should sample different seasons. Therefore it is recommended to run for one winter month and one summer month, for example June 2016 and January 2017. These periods are chosen so as to benefit from the latest upgrade to ECMWF's EDA system. Thereby we sample both seasonal (January, July) and daily (00 UTC and 12 UTC) variations. After running of the ensembles the archived results (6h forecasts) are processed to generate structure functions by running a program called 'festat'. Festat will be run automatically within the SMS system when DTGEND is approached by an experiment and the statistics will be based on difference files generated by intermediate program femars and stored on ecfs in ec:/$uid/harmonie/$exp/femars (software to generate binary GRIB files of forecasts differences after each cycle). This will mean that if you start by running a one month experiment for January the structure functions generated when you reach DTGEND will be for January. When you use the same experiment name and launch also an experiment for July you will when you reach DTGEND have background error statistics based on both January and July differences files (since both of those are now found in ec:/$uid/harmonie/$exp/femars). These combined winter/summer background error statistics files from phase one are final product from step 1 and can are the intermediate background error statistics files to plug into the HARMONIE-AROME data assimilation of step 2. It should be mentioned that there is a possibility for the more advanced user to run festat off-line and with any combinations of January-July forecast difference files from ec:/$uid/harmonie/$exp/femars. That will be described in ore detail further below and is something you might want to do with forecasts difference files generated from step 2 to produce monthly background error statistics files by combining in different ways.

In step 2 we run again two one-month ensemble experiments for the same Januar and July months again utilizing ECMWF EDA forecasts as lateral boundary conditions. Again you use 4 ensemble members. The important difference as compared to step 1 is that you now carry out ensemble-data assimilation also within the HARMONIE-AROME framework. You use the background error statistics from phase 1 and do the eda within a data assimilation cycle. This has the important advantage that you significantly reduce spinup caused by the HARMONIE-AROME model adjustments to ECMWF EDA starting initial states. Because of this we can in step 2 derive the statistics from +3h forecast difference (rather than +6 that is used in step 1).

Note that there are methods to circumvent step1 and to technically run 3/4DVAR using structure functions derived from another HARMONIE model domain. Such existing methods include aspects such as horizontal truncation or extrapolation of horizontal spectra and possibly vertical interpolation in between vertical level geometries. Since the recommended procedure is to use the two stp approach described above these alternative methods are not described in detail. Furthermore it should be noted that there are background error covariance related tuning coefficients REDNMC and REDZONE. Settings of values of these ae not covered here. If you have a new domain you will use the default value 0.6 for REDNMC and 100 for REDZONE which are considered apropiate values for the derivation of structure functions. If you re-derive your statistics for an existing domain you will use the REDNMC and REDZONE values as assigned in scr/include.ass.

There are various existing tools for investigating your newly derived structure functions and at the end of this page there are some documentation of existing tools and how to use them.

The procedure for generating structure functions from an ensemble of forecasts is described below for a AROME setup with 2.5 km horizontal resolution and 65 vertical levels. The experiment is run for a one mont winter-period of followed by a one month summer-period on the ECMWF computing system. Forecast differences are derived twice a day (00 forecasts from 12 UTC) from combinations of the four ensemble members. Besides the scientific recommendation to cover many different weather situations there is as well a matemathical constraint that the number of forecast difference files provided to festat needs to be larger than the number of vertical levels used in the forecast model integration. In the section below detailed instructions on how to generate the structure functions are given. The other sections deals with how to diagnose the structure functions recent and ongoing work and future development plans.

It is recommended for future coming enhancements regarding handling of B statistics and diagnostics of it to save all generated forecast difference files as well as stabal.cv, stabal.cvt and stabal.bal and generated .xy and .y files (.cvt .xy and .y for diagnotical puroposes):

Generating background error statistics (using 43h2.2)

The following instructions are valid for trunk and any 43h2.2 tags that have been created. These instructions will only work at ECMWF. If you do have a new domain (or are not sure) you should follow that route in step 1 below. New domain creation is described in ModelDomain which links to the useful Domain Creation Tool

STEP 1 Downscaling

  1. Create a new experiment on ECMWF:

    In case you do have an existing domain setup do:

    mkdir -p $HOME/hm_home/jbdownexp
    +

    Derivation of Structure Functions

    General

    For each new model domain, in order to carry out upper air data assimilation (3DVAR or 4DVAR) one needs to generate background error covariances (generally referred to as structure functions). The recommended procedure is to use a two step approach. In step one you generate background error statistics by downscaling (this is needed since you do not have have statistics for your domain setup for this forecast model version and physics options, so that you cannot run data-assimilation (unless you use statistics from old system possibly derived from a slighthly different domain and with a different model version, which is not recommended). In step 2 you then use the statistics derived in step 1 to generate the final background error statistics files by applying ensemble data assimilation within the HARMONIE-AROME modelling system.

    In step 1 structure functions are generated from difference fields from ensemble members of HARMONIE-AROME forecast. These are obtained from downscaling of ECMWF EDA ensemble forecast. To alleviate spin-up issues, these phase 1 downscaled HARMONIE-AROME forecasts are run up to 6 hours, and differences are generated from these. Using the ECMWF LBC data, 6h HARMONIE ensemble forecasts are initiated from ECMWF 6h forecasts daily from 00 UTC and 12 UTC, with ECMWF forecasts as initial and lateral boundary conditions. To obtain stable statistics, it is recomended to run 4 ensembles for two chosen one-month episode (s). The episodes should sample different seasons. Therefore it is recommended to run for one winter month and one summer month, for example June 2016 and January 2017. These periods are chosen so as to benefit from the latest upgrade to ECMWF's EDA system. Thereby we sample both seasonal (January, July) and daily (00 UTC and 12 UTC) variations. After running of the ensembles the archived results (6h forecasts) are processed to generate structure functions by running a program called 'festat'. Festat will be run automatically within the SMS system when DTGEND is approached by an experiment and the statistics will be based on difference files generated by intermediate program femars and stored on ecfs in ec:/$uid/harmonie/$exp/femars (software to generate binary GRIB files of forecasts differences after each cycle). This will mean that if you start by running a one month experiment for January the structure functions generated when you reach DTGEND will be for January. When you use the same experiment name and launch also an experiment for July you will when you reach DTGEND have background error statistics based on both January and July differences files (since both of those are now found in ec:/$uid/harmonie/$exp/femars). These combined winter/summer background error statistics files from phase one are final product from step 1 and can are the intermediate background error statistics files to plug into the HARMONIE-AROME data assimilation of step 2. It should be mentioned that there is a possibility for the more advanced user to run festat off-line and with any combinations of January-July forecast difference files from ec:/$uid/harmonie/$exp/femars. That will be described in ore detail further below and is something you might want to do with forecasts difference files generated from step 2 to produce monthly background error statistics files by combining in different ways.

    In step 2 we run again two one-month ensemble experiments for the same Januar and July months again utilizing ECMWF EDA forecasts as lateral boundary conditions. Again you use 4 ensemble members. The important difference as compared to step 1 is that you now carry out ensemble-data assimilation also within the HARMONIE-AROME framework. You use the background error statistics from phase 1 and do the eda within a data assimilation cycle. This has the important advantage that you significantly reduce spinup caused by the HARMONIE-AROME model adjustments to ECMWF EDA starting initial states. Because of this we can in step 2 derive the statistics from +3h forecast difference (rather than +6 that is used in step 1).

    Note that there are methods to circumvent step1 and to technically run 3/4DVAR using structure functions derived from another HARMONIE model domain. Such existing methods include aspects such as horizontal truncation or extrapolation of horizontal spectra and possibly vertical interpolation in between vertical level geometries. Since the recommended procedure is to use the two stp approach described above these alternative methods are not described in detail. Furthermore it should be noted that there are background error covariance related tuning coefficients REDNMC and REDZONE. Settings of values of these ae not covered here. If you have a new domain you will use the default value 0.6 for REDNMC and 100 for REDZONE which are considered apropiate values for the derivation of structure functions. If you re-derive your statistics for an existing domain you will use the REDNMC and REDZONE values as assigned in scr/include.ass.

    There are various existing tools for investigating your newly derived structure functions and at the end of this page there are some documentation of existing tools and how to use them.

    The procedure for generating structure functions from an ensemble of forecasts is described below for a AROME setup with 2.5 km horizontal resolution and 65 vertical levels. The experiment is run for a one mont winter-period of followed by a one month summer-period on the ECMWF computing system. Forecast differences are derived twice a day (00 forecasts from 12 UTC) from combinations of the four ensemble members. Besides the scientific recommendation to cover many different weather situations there is as well a matemathical constraint that the number of forecast difference files provided to festat needs to be larger than the number of vertical levels used in the forecast model integration. In the section below detailed instructions on how to generate the structure functions are given. The other sections deals with how to diagnose the structure functions recent and ongoing work and future development plans.

    It is recommended for future coming enhancements regarding handling of B statistics and diagnostics of it to save all generated forecast difference files as well as stabal.cv, stabal.cvt and stabal.bal and generated .xy and .y files (.cvt .xy and .y for diagnotical puroposes):

    Generating background error statistics (using 43h2.2)

    The following instructions are valid for trunk and any 43h2.2 tags that have been created. These instructions will only work at ECMWF. If you do have a new domain (or are not sure) you should follow that route in step 1 below. New domain creation is described in ModelDomain which links to the useful Domain Creation Tool

    STEP 1 Downscaling

    1. Create a new experiment on ECMWF:

      In case you do have an existing domain setup do:

      mkdir -p $HOME/hm_home/jbdownexp
       cd $HOME/hm_home/jbdownexp
       ~hlam/Harmonie setup -c JBDSC -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1 -d DOMAIN # where domain is the    name of your domain

      In case you are creating structure functions for a new domain (or you are not sure):

      mkdir -p $HOME/hm_home/jbdownexp
       cd $HOME/hm_home/jbdownexp
      @@ -90,4 +90,4 @@
        ecp stab_your_eda_exp.bal.gz ec:/smx/jbdata/. (with your own filename and directory)
        ```
       
      - also create a tar-file with all `*.xy`, `*.y`, `*.cv`, `*.bal` and `*.cvt` and put on ecfs for future diagnostical purposes) These new files are you final background error statistics to be diagnosed (compared with STEP 1 ones perhaps) and inserted to your data assimilation by modyfying `include.ass` (as in bullet 3 above) to point to your new files.

      Diagnosis of background error statistics

      1. Diagnosis of background error statistics is a rather complicated task. To get an idea of what the correlations and covariances should look like take a look in the article: Berre, L., 2000: Estimation of synoptic and meso scale forecast error covariances in a limited area model. Mon. Wea. Rev., 128, 644-667. Software for investigating and graphically illustrate different aspects of the background error statistics has been developed and statistics generated for different domains has been investigated using the AccordDaTools package. With this software you can also compare your newly generated background error statistics with the one generated for other HARMONIE domains. This will give you and idea if your statistics seems reasonable. For diagnosing the newly derived background error statistics follow these instructions:

      2. Get the code and scripts:

        • Download and install AccordDaTools following instructions in the README
        • Don't forget to add the package tools directory to your PATH:
        • export PATH=/path/to/da_tools:$PATH
      3. Run Jb diagnostics script:

        • For example for a new domain using horizontal grid-spacing of 2500 m and (Harmonie) 65 vertical levels:
          jbdiagnose -b jb_data/stab_IRELAND25_064_480.bal -c jb_data/stab_IRELAND25_064_480.cv -g 2500 -l harmL65 -e jbdiag_IRELAND25_064
        • The output will be made written to jbdiag_IRELAND25_064
      1. The AccordDaTools package also provides two tools for plotting the data produced by jbdiagnose, plotjbbal and plotjbdiag. plotjbbal plots Jb balances for different parameters. plotjbdiag produces spectral density (spdens) and vertical correlation (vercor) diagnostic plots for your structure funtions. For example:

        • plotjbbal:

          plotjbbal -t stdv -p QQ -r jbdiag_ -e IRELAND25_064
        • plotjbdiag:

          plotjbdiag -l 50 -t vercor -p QQ -r jbdiag_ -e IRELAND25_064

      Run 3DVAR/4DVAR with the new background error statistics

      1. create hm_home/jb_da. Then cd $HOME/hm_home/jb_da.

      2. create experiment by typing

        ~hlam/Harmonie setup -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1
      3. In scr/include.ass set JBDIR=ec:/$uid/jbdata (uid being your userid, in this example 'ec:/smx/jbdata') and f_JBCV is name of your .cv file in ec:/$uid/jbdata (without .gz) and f_JBBAL is 'name of your .bal file in ec:/$uid/jbdata (without .gz) (in this example, f_JBCV=stab_METCOOPD_65_20200601_360.cv, stab_METCOOPD_65_20200601_360.bal). Add these three lines instead of the three lines in include.ass that follows right after the elif statement: elif [ "$DOMAIN" = METCOOP25D]; then. If domain is other than METCOOP25D one has to look for the alternative name of the domain.

      4. From $HOME/hm_home/jb_da launch experiment by typing

        ~hlam/Harmonie start DTG=2021010100 DTGEND=2021010103
      5. The resulting analysis file be found under $TEMP/hm_home/jb_da/archive/2021/01/01/03 and it will be called MXMIN1999+0000 and on and ec:/$uid/harmonie/2021/01/01/03. To diagnose the 3D-VAR analysis increments of the jb_da-experiment, copy the files MXMIN1999+0000 (analysis) and ICMSHHARM+0003 (fg) to $SCRATCH. The first guess (background) file can be found on $TEMP/hm_home/jb_da/archive/2021/01/01/00 and ec:/$uid/harmonie/jb_da/2021/01/01/00. Convert from FA-file format to GRIB with the gl-software ($SCRATCH/hm_home/jb_da/bin/gl) by typing ./gl -p MXMIN1999+0000 and ./gl -p ICMSHANAL+0000. Then plot the difference between files file with your favorite software. Plot horizontal and vertical cross-sections of temperature and other variables using your favourite software (epygram for example).

      6. Now you have managed to insert the newly generated background error statistics to the assimilation system and managed to carry out a full scale data assimilation system and plot the analysis increments. The next natural step to further diagnose the background error statistics is to carry out a single observation impact experiment, utilizing your newly generated background error statistics. Note the variables REDNMC and REDZONE in include.ass. REDNMC is the scaling factor for the background error statistics (default value 0.6/0.9) for METCOOP25D/NEW_DOMAIN). REDZONE described how far from the lateral boundaries (in km) the observations need to be located to be assimilated (default value 150/100) for METCOOP25D/NEW_DOMAIN.

      In-line Interpolation and Extrapolation of Jb-statistics

      In case you do not have existing background error statistics derived for your domain there is a built technical possibility to use Jb-files from another domain derived with the same number of vertical levels. From this host Jb-files background error statistics are then interpolated or extrapolated to the current domain configuration. The assumption is then (which is in general questionable) that the statistics derived derived on the host domain is as well valid for the current domain. If the longest side of the host domain is shorter than the longest side of the current domain an extrapolation of background error covariance spectra is needed. Such extrapolation should be avoided over a wide range of wavenumbers. Therefore it is recommended that the longest side of the host Jb-file is as long or longer than the longest side of the current domain.The interpolation is invoked by in ecf/config_exp.h set JB_INTERPOL=yeś and JB_REF_DOMAIN=$HOST_JB, where $HOST_JB is for example METCOOP25B. These settings will activate runnning of script jbconv.sh (in case no Jb files present for current domain), called from Fetch_assim_data.

      On-going work & future developments

      Recent and on-going work as well as plans for future developments:

      • Ongoing-work regarding structure functions concerns investigations of effects on B statistics and data assimilation of the upper-level relaxation towards ecmwf at upper boundary condition through LUNBC=.true. Longer term research is towards flow dependent background error statistics and close link between the data assimilation and the ensemble forecasting system. Plans for future work also include adopting towards use of cy46 Festat.standalone, reading FA-files rather than femars-files. Here is a newly developed stand-alone tool for interpolation in of Jb-statistics as well between different vertical levels (not recommended) not yet publicly available and documented. Finally it should be mentioned that there are alternative methods to EDA for carrying out STEP 2 of teh background error statistics derivation. Such alternatives are BRAND and BREND and these have been tested and compared with EDA in various contexts, such as in reanalysis frameworks. The conclusion is that there are both pros and cons with BRAND as compared with EDA. The main conclusion is that both EDAand BRAND are hampered by the homogeneity and isotrophy assumptions in 3DVAR/4DVAR framework, so that differences are smaller than in hybrid DA frameworks. Therefore continued EDA/BRAND comparisons are carried out withing hybrid ensemble/da frameworks. Nevertherless we aim here to include as well instructions for optionally replacing STEP 2 EDA in procedure above with STEP 2 BRAND. As well we aim for introducing instructions for using extended complementary diagnosis tools for Jb statistics using fediacov tool and associated plotting scripts. Such tools do exist, but not yet publicly available and documented

      References

    + also create a tar-file with all `*.xy`, `*.y`, `*.cv`, `*.bal` and `*.cvt` and put on ecfs for future diagnostical purposes) These new files are you final background error statistics to be diagnosed (compared with STEP 1 ones perhaps) and inserted to your data assimilation by modyfying `include.ass` (as in bullet 3 above) to point to your new files.

    Diagnosis of background error statistics

    1. Diagnosis of background error statistics is a rather complicated task. To get an idea of what the correlations and covariances should look like take a look in the article: Berre, L., 2000: Estimation of synoptic and meso scale forecast error covariances in a limited area model. Mon. Wea. Rev., 128, 644-667. Software for investigating and graphically illustrate different aspects of the background error statistics has been developed and statistics generated for different domains has been investigated using the AccordDaTools package. With this software you can also compare your newly generated background error statistics with the one generated for other HARMONIE domains. This will give you and idea if your statistics seems reasonable. For diagnosing the newly derived background error statistics follow these instructions:

    2. Get the code and scripts:

      • Download and install AccordDaTools following instructions in the README
      • Don't forget to add the package tools directory to your PATH:
      • export PATH=/path/to/da_tools:$PATH
    3. Run Jb diagnostics script:

      • For example for a new domain using horizontal grid-spacing of 2500 m and (Harmonie) 65 vertical levels:
        jbdiagnose -b jb_data/stab_IRELAND25_064_480.bal -c jb_data/stab_IRELAND25_064_480.cv -g 2500 -l harmL65 -e jbdiag_IRELAND25_064
      • The output will be made written to jbdiag_IRELAND25_064
    1. The AccordDaTools package also provides two tools for plotting the data produced by jbdiagnose, plotjbbal and plotjbdiag. plotjbbal plots Jb balances for different parameters. plotjbdiag produces spectral density (spdens) and vertical correlation (vercor) diagnostic plots for your structure funtions. For example:

      • plotjbbal:

        plotjbbal -t stdv -p QQ -r jbdiag_ -e IRELAND25_064
      • plotjbdiag:

        plotjbdiag -l 50 -t vercor -p QQ -r jbdiag_ -e IRELAND25_064

    Run 3DVAR/4DVAR with the new background error statistics

    1. create hm_home/jb_da. Then cd $HOME/hm_home/jb_da.

    2. create experiment by typing

      ~hlam/Harmonie setup -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1
    3. In scr/include.ass set JBDIR=ec:/$uid/jbdata (uid being your userid, in this example 'ec:/smx/jbdata') and f_JBCV is name of your .cv file in ec:/$uid/jbdata (without .gz) and f_JBBAL is 'name of your .bal file in ec:/$uid/jbdata (without .gz) (in this example, f_JBCV=stab_METCOOPD_65_20200601_360.cv, stab_METCOOPD_65_20200601_360.bal). Add these three lines instead of the three lines in include.ass that follows right after the elif statement: elif [ "$DOMAIN" = METCOOP25D]; then. If domain is other than METCOOP25D one has to look for the alternative name of the domain.

    4. From $HOME/hm_home/jb_da launch experiment by typing

      ~hlam/Harmonie start DTG=2021010100 DTGEND=2021010103
    5. The resulting analysis file be found under $TEMP/hm_home/jb_da/archive/2021/01/01/03 and it will be called MXMIN1999+0000 and on and ec:/$uid/harmonie/2021/01/01/03. To diagnose the 3D-VAR analysis increments of the jb_da-experiment, copy the files MXMIN1999+0000 (analysis) and ICMSHHARM+0003 (fg) to $SCRATCH. The first guess (background) file can be found on $TEMP/hm_home/jb_da/archive/2021/01/01/00 and ec:/$uid/harmonie/jb_da/2021/01/01/00. Convert from FA-file format to GRIB with the gl-software ($SCRATCH/hm_home/jb_da/bin/gl) by typing ./gl -p MXMIN1999+0000 and ./gl -p ICMSHANAL+0000. Then plot the difference between files file with your favorite software. Plot horizontal and vertical cross-sections of temperature and other variables using your favourite software (epygram for example).

    6. Now you have managed to insert the newly generated background error statistics to the assimilation system and managed to carry out a full scale data assimilation system and plot the analysis increments. The next natural step to further diagnose the background error statistics is to carry out a single observation impact experiment, utilizing your newly generated background error statistics. Note the variables REDNMC and REDZONE in include.ass. REDNMC is the scaling factor for the background error statistics (default value 0.6/0.9) for METCOOP25D/NEW_DOMAIN). REDZONE described how far from the lateral boundaries (in km) the observations need to be located to be assimilated (default value 150/100) for METCOOP25D/NEW_DOMAIN.

    In-line Interpolation and Extrapolation of Jb-statistics

    In case you do not have existing background error statistics derived for your domain there is a built technical possibility to use Jb-files from another domain derived with the same number of vertical levels. From this host Jb-files background error statistics are then interpolated or extrapolated to the current domain configuration. The assumption is then (which is in general questionable) that the statistics derived derived on the host domain is as well valid for the current domain. If the longest side of the host domain is shorter than the longest side of the current domain an extrapolation of background error covariance spectra is needed. Such extrapolation should be avoided over a wide range of wavenumbers. Therefore it is recommended that the longest side of the host Jb-file is as long or longer than the longest side of the current domain.The interpolation is invoked by in ecf/config_exp.h set JB_INTERPOL=yeś and JB_REF_DOMAIN=$HOST_JB, where $HOST_JB is for example METCOOP25B. These settings will activate runnning of script jbconv.sh (in case no Jb files present for current domain), called from Fetch_assim_data.

    On-going work & future developments

    Recent and on-going work as well as plans for future developments:

    • Ongoing-work regarding structure functions concerns investigations of effects on B statistics and data assimilation of the upper-level relaxation towards ecmwf at upper boundary condition through LUNBC=.true. Longer term research is towards flow dependent background error statistics and close link between the data assimilation and the ensemble forecasting system. Plans for future work also include adopting towards use of cy46 Festat.standalone, reading FA-files rather than femars-files. Here is a newly developed stand-alone tool for interpolation in of Jb-statistics as well between different vertical levels (not recommended) not yet publicly available and documented. Finally it should be mentioned that there are alternative methods to EDA for carrying out STEP 2 of teh background error statistics derivation. Such alternatives are BRAND and BREND and these have been tested and compared with EDA in various contexts, such as in reanalysis frameworks. The conclusion is that there are both pros and cons with BRAND as compared with EDA. The main conclusion is that both EDAand BRAND are hampered by the homogeneity and isotrophy assumptions in 3DVAR/4DVAR framework, so that differences are smaller than in hybrid DA frameworks. Therefore continued EDA/BRAND comparisons are carried out withing hybrid ensemble/da frameworks. Nevertherless we aim here to include as well instructions for optionally replacing STEP 2 EDA in procedure above with STEP 2 BRAND. As well we aim for introducing instructions for using extended complementary diagnosis tools for Jb statistics using fediacov tool and associated plotting scripts. Such tools do exist, but not yet publicly available and documented

    References

diff --git a/previews/PR1129/DataAssimilation/Surface/CANARI/index.html b/previews/PR1129/DataAssimilation/Surface/CANARI/index.html index cd890667c..2b4b5b4c1 100644 --- a/previews/PR1129/DataAssimilation/Surface/CANARI/index.html +++ b/previews/PR1129/DataAssimilation/Surface/CANARI/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

Surface Data Assimilation Scheme: Canari

Introduction

(by Alena.Trojakova)

CANARI stands for Code for the Analysis Necessary for ARPEGE for its Rejects and its Initialization. It is software (part of IFS/ARPEGE source code) to produce an ARPEGE/ALADIN analysis based on optimum interpolation method. The number of ARPEGE/ALADIN configuration is 701. CANARI has the two main components the quality control and an analysis. According to the type of observations the analysis can be:

  • 3D multivariate for U, V, T, Ps
  • 3D univariate for RH
  • 2D univariate for 2m/10m fields
  • soil parameters analysis is based on 2m increments

CANARI can handle following 10 types of observations:

  • SYNOP: Ps, T2m, RH2m, 10m Wind, RR, Snow depth, SST
  • AIREP: P ( or Z), Wind, T
  • SATOB: P, Wind, T - from geostationary satellite imagery
  • DRIBU: Ps, T2m, 10m Wind, SST
  • TEMP: P, Wind, T, Q
  • PILOT: Wind with the corresponding Z, (sometimes 10m Wind)
  • SATEM: Q, T retrieved from radiances- surface

Applications

  • Diagpack - diagnostic of mesoscale features via detailed analysis of PBL using high resolution surface observation with specific tunings:
REF_S_T2  = 3.0,
+

Surface Data Assimilation Scheme: Canari

Introduction

(by Alena.Trojakova)

CANARI stands for Code for the Analysis Necessary for ARPEGE for its Rejects and its Initialization. It is software (part of IFS/ARPEGE source code) to produce an ARPEGE/ALADIN analysis based on optimum interpolation method. The number of ARPEGE/ALADIN configuration is 701. CANARI has the two main components the quality control and an analysis. According to the type of observations the analysis can be:

  • 3D multivariate for U, V, T, Ps
  • 3D univariate for RH
  • 2D univariate for 2m/10m fields
  • soil parameters analysis is based on 2m increments

CANARI can handle following 10 types of observations:

  • SYNOP: Ps, T2m, RH2m, 10m Wind, RR, Snow depth, SST
  • AIREP: P ( or Z), Wind, T
  • SATOB: P, Wind, T - from geostationary satellite imagery
  • DRIBU: Ps, T2m, 10m Wind, SST
  • TEMP: P, Wind, T, Q
  • PILOT: Wind with the corresponding Z, (sometimes 10m Wind)
  • SATEM: Q, T retrieved from radiances- surface

Applications

  • Diagpack - diagnostic of mesoscale features via detailed analysis of PBL using high resolution surface observation with specific tunings:
REF_S_T2  = 3.0,
 REF_S_H2  = 0.20,
 REF_A_H2  = 40000.,
 REF_A_T2  = 40000.,
@@ -17,4 +17,4 @@
 export ODB_MERGEODB_DIRECT= ... optional direct ODB merge, If your ODB was not merged previously use  1
  • Concerning the observation use, another file is necessary, but it is without any interest for CANARI (just part of variational analysis code is not controlled by a logical keyt !) The file can be obtained on "tori" via gget var.misc.rszcoef_fmt.01.

    ln -s rszcoef_fmt var.misc.rszcoef_fmt.01
  • The climatological files

    ln  -s  climfile_${mm}  ICMSHANALCLIM
     ln  -s  climfile_${mm2} ICMSHANALCLI2
  • The namelist file

    ln -s namelist fort.4 
  • The ISBA files

    • file used to derive soil moisture from 2m increment

      ln -s POLYNOMES_ISBA fort.61              
    • OPTIONAL assimilated increments files to smooth the fields (ICMSHANALLISSEF file is created at the and of analysis )

      ln -s increment_file ICMSHANALLISSE     
    • OPTIONAL The SST file - an interpolated NCEP SST analysis on ARPEGE grid and stored in FA file format) used for relaxation towards "up-dated" climatology

      ln -s SST_file ICMSHANALSST        
    • OPTIONAL The error statistic file; OI allows to know the variance of the analysis error, which can be used to improve background error next cycle => an option to use "dynamics" statistics instead of fixed. Output file ICMSHANALSTA2 is produced with statistics for the current run

      ln -s statistics_file ICMSHANALSTA
    • OPTIONAL The incremental mode files (global option only); it is possible to read 3 input file to build non-classical init; the combination is done on spectral fields only: G1=G0+A-G

      ln -s G0_file ICMSHANAINIT
       ln -s A_file ICMSHANALANIN
      -ln -s G_file ICMSHANALFGIN
  • run CANARI

    MASTERODB -c701 -vmeteo -maladin -eANAL -t1. -ft0 -aeul

    • -c: configuration number (CANARI = 701)
    • -v: version of the code (always "meteo" for ARPEGE/ALADIN)
    • -m: LAM or global model ("aladin" or "arpege")
    • -e: experiment name (ANAL for instance)
    • -t: time-step length (no matter for CANARI, usually "1.", avoid 0.)
    • -f: duration of the integration (t0 or h0 for CANARI)
    • -a: dynamical scheme (does not matter for CANARI Eulerian = eul or semi-Lagrangian = sli (sli as usual))

    OUTPUTs

    • OPTIONAL The analysis file
      ICMSHANAL+0000
    • OPTIONALLY updated observational database
      • OPTIONAL The error statistics file
        ICMSHANALSTA2
      • OPTIONAL The increment file
        ICMSHANALLISSEF
      • The output listing - enables checking of various parameters, e.g. number observation of given type (SYNOP,TEMP,..), number of used observation parameters (T2m, RH2m, T, geop., ...), some namelist variables, various control prints (O-G and O-A statistics, ...), grid-point and spectral norms.

    NODE*

    Sample of script is attached.

    As a part of the system training in Copenhagen in 2008, Roger prepared an intoduction to CANARI, which is found in HarmonieSystemTraining2008/Lecture/SurfaceAssimilation on hirlam.org

    References

    +ln -s G_file ICMSHANALFGIN

    run CANARI

    MASTERODB -c701 -vmeteo -maladin -eANAL -t1. -ft0 -aeul

    • -c: configuration number (CANARI = 701)
    • -v: version of the code (always "meteo" for ARPEGE/ALADIN)
    • -m: LAM or global model ("aladin" or "arpege")
    • -e: experiment name (ANAL for instance)
    • -t: time-step length (no matter for CANARI, usually "1.", avoid 0.)
    • -f: duration of the integration (t0 or h0 for CANARI)
    • -a: dynamical scheme (does not matter for CANARI Eulerian = eul or semi-Lagrangian = sli (sli as usual))

    OUTPUTs

    • OPTIONAL The analysis file
      ICMSHANAL+0000
    • OPTIONALLY updated observational database
      • OPTIONAL The error statistics file
        ICMSHANALSTA2
      • OPTIONAL The increment file
        ICMSHANALLISSEF
      • The output listing - enables checking of various parameters, e.g. number observation of given type (SYNOP,TEMP,..), number of used observation parameters (T2m, RH2m, T, geop., ...), some namelist variables, various control prints (O-G and O-A statistics, ...), grid-point and spectral norms.

    NODE*

    Sample of script is attached.

    As a part of the system training in Copenhagen in 2008, Roger prepared an intoduction to CANARI, which is found in HarmonieSystemTraining2008/Lecture/SurfaceAssimilation on hirlam.org

    References

    diff --git a/previews/PR1129/DataAssimilation/Surface/CANARI_EKF_SURFEX/index.html b/previews/PR1129/DataAssimilation/Surface/CANARI_EKF_SURFEX/index.html index aa7824b89..a03c88e26 100644 --- a/previews/PR1129/DataAssimilation/Surface/CANARI_EKF_SURFEX/index.html +++ b/previews/PR1129/DataAssimilation/Surface/CANARI_EKF_SURFEX/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Surface variables assimilated / read in EKF_MAIN

    From cycle 37 EKF is implemented in research/development mode. The following tiles and variables are modified:

    NATURE

    WG2/WG1/TG2/TG1

    The uppermost two levels in ISBA of soil moisture and temperature are assimilated. With CANARI/CANARI_OI_MAIN by an OI method, by CANARI_SURFEX_EKF by an Extended Kalman Filter (EKF).

    For 2012 it is planned to have a re-writing of OI_MAIN/EKF_MAIN to be the same binary in order to be able to apply the work done for OI_MAIN in EKF_MAIN and thus reduce the maintainance costs.

    +

    Surface variables assimilated / read in EKF_MAIN

    From cycle 37 EKF is implemented in research/development mode. The following tiles and variables are modified:

    NATURE

    WG2/WG1/TG2/TG1

    The uppermost two levels in ISBA of soil moisture and temperature are assimilated. With CANARI/CANARI_OI_MAIN by an OI method, by CANARI_SURFEX_EKF by an Extended Kalman Filter (EKF).

    For 2012 it is planned to have a re-writing of OI_MAIN/EKF_MAIN to be the same binary in order to be able to apply the work done for OI_MAIN in EKF_MAIN and thus reduce the maintainance costs.

    diff --git a/previews/PR1129/DataAssimilation/Surface/CANARI_OI_MAIN/index.html b/previews/PR1129/DataAssimilation/Surface/CANARI_OI_MAIN/index.html index 61926b02a..f54fc85b3 100644 --- a/previews/PR1129/DataAssimilation/Surface/CANARI_OI_MAIN/index.html +++ b/previews/PR1129/DataAssimilation/Surface/CANARI_OI_MAIN/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Surface variables assimilated / read in OI_main

    CANARI_OI_MAIN is the surface assimilation scheme which emulates what is done in CANARI for old_surface, but by using the external surface schme SURFEX.

    The default surface model is SURFEX and the default surface assimilation scheme is CANARI_OI_MAIN.

    NATURE

    WG2/WG1/TG2/TG1

    The uppermost two levels in ISBA of soil moisture and temperature are assimilated. With CANARI/CANARI_OI_MAIN by an OI method, by CANARI_SURFEX_EKF by an Extended Kalman Filter (EKF).

    SNOW

    The snow analysis is performed in CANARI and is controlled by the key: LAESNM. This is set default to be true in scr/RunCanari. And if running with SURFEX this will need to be true also in scr/OI_main as the SURFEX snow then needs to be updated by the analysis done in CANARI.

    SEA

    SST/SIC

    The only option for SST/SIC at the moment is to take it from the boundaries.

    • ecf/config_exp.h :SST=BOUNDARY

    If you are using boundaries from IFS the task Interpol_sst will interpolate sst from your boundary file and take into account that SST in the IFS files is not defined over land (as for HIRLAM) and also use an extra-polation routine to propagate the SST into narrow fjords.

    There is a SST analysis built-in in CANARI but not used by HARMONIE or METEO-FRANCE.

    WATER

    LAKE temperature

    Lake temperatures are updated in OI_main and are extrapolated from the land surface temperatures.

    TOWN

    ROAD temperature

    Only used when TEB is activated (key: LAROME). Increment for TG2 is added to to ROAD layer 3.

    +

    Surface variables assimilated / read in OI_main

    CANARI_OI_MAIN is the surface assimilation scheme which emulates what is done in CANARI for old_surface, but by using the external surface schme SURFEX.

    The default surface model is SURFEX and the default surface assimilation scheme is CANARI_OI_MAIN.

    NATURE

    WG2/WG1/TG2/TG1

    The uppermost two levels in ISBA of soil moisture and temperature are assimilated. With CANARI/CANARI_OI_MAIN by an OI method, by CANARI_SURFEX_EKF by an Extended Kalman Filter (EKF).

    SNOW

    The snow analysis is performed in CANARI and is controlled by the key: LAESNM. This is set default to be true in scr/RunCanari. And if running with SURFEX this will need to be true also in scr/OI_main as the SURFEX snow then needs to be updated by the analysis done in CANARI.

    SEA

    SST/SIC

    The only option for SST/SIC at the moment is to take it from the boundaries.

    • ecf/config_exp.h :SST=BOUNDARY

    If you are using boundaries from IFS the task Interpol_sst will interpolate sst from your boundary file and take into account that SST in the IFS files is not defined over land (as for HIRLAM) and also use an extra-polation routine to propagate the SST into narrow fjords.

    There is a SST analysis built-in in CANARI but not used by HARMONIE or METEO-FRANCE.

    WATER

    LAKE temperature

    Lake temperatures are updated in OI_main and are extrapolated from the land surface temperatures.

    TOWN

    ROAD temperature

    Only used when TEB is activated (key: LAROME). Increment for TG2 is added to to ROAD layer 3.

    diff --git a/previews/PR1129/DataAssimilation/Surface/SurfaceAnalysis/index.html b/previews/PR1129/DataAssimilation/Surface/SurfaceAnalysis/index.html index 73443b40a..b4241a6f8 100644 --- a/previews/PR1129/DataAssimilation/Surface/SurfaceAnalysis/index.html +++ b/previews/PR1129/DataAssimilation/Surface/SurfaceAnalysis/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Surface Data Assimilation in HARMONIE

    Surface model: SURFACE = surfex / old_surface

    • surfex: SURFEX is used as surface model (default and used in all Harmonie configurations) The surface fields are in a separat AROMOUT_.LLLL.lfi file in LFI format.
    • old_surface: surface physics modelled by routines integrated in code The surface fields are a part of the atmospheric file (ICMSHXXXX+LLLL) in FA format.

    Surface analysis method: ANASURF = "CANARI_OI_MAIN" / "CANARI_EKF_SURFEX"

    • the horizontal interpolation of screen level parameters is performed by CANARI in both cases
    • CANARIOIMAIN updates soil temperature, water and ice based on 2m analysis increments using coefficients that are derived empirically for ISBA2/3-layers scheme
    • CANARIEKFSURFEX (experimental) updates soil parameters using the Extended Kalman Filter method.

    ANASURF_MODE = "before" / "after"/ "both" - surface analysis performed before/after/both before and after 3DVAR

    ANASURF_INLINE = "yes" /"no"

    • yes: call SODA for updating soil parameters inside CANARI (default and experimental)
    • no: soil parameters are updated after CANARI

    Some details

    The default surface model is SURFEX and the default surface assimilation scheme is CANARI_OI_MAIN. CANARI_EKF_SURFEX was first implemented in cy37 and will be undergoing tests in experimental and research mode before it can be used in operational setups.

    CANARI is used for Optimum Interpolation horizontally to find analysis increments in each grid point based on observations minus first guess. The SURFEX assimilation schemes use two different techniques to propagate this information into the ground. The two ways CANARI is used is separated by two namelist settings needed when running with SURFEX:

    • LAEICS=.FALSE.

    No initialization of ground variables are done as they are in the SURFEX file

    • LDIRCLSMOD=.TRUE.

    2 metre variables taken directly from input file because they without surfex are diagnosed from 0 metre and lowest model height with the model specific routine achmt.

    CANARI was designed before SURFEX was introduced and some of the climate variables that normally exist in the input file for CANARI, do not exist when using SURFEX. This means the task Addsurf is run before CANARI, adding the needed fields from the FA climate file (mMM).

    The screen level analyisis (eg. T2m) used in blending/3DVAR/4DVAR is the same as for CANARI in the old_surface case.

    Variables updated in CANARI for old_surface and SURFEX

    src/arpifs/module/qactex.F90

    HARMONIE namelist settings:

    !  * LAET2M  : .T. 2 meter temperature analysis
    +

    Surface Data Assimilation in HARMONIE

    Surface model: SURFACE = surfex / old_surface

    • surfex: SURFEX is used as surface model (default and used in all Harmonie configurations) The surface fields are in a separat AROMOUT_.LLLL.lfi file in LFI format.
    • old_surface: surface physics modelled by routines integrated in code The surface fields are a part of the atmospheric file (ICMSHXXXX+LLLL) in FA format.

    Surface analysis method: ANASURF = "CANARI_OI_MAIN" / "CANARI_EKF_SURFEX"

    • the horizontal interpolation of screen level parameters is performed by CANARI in both cases
    • CANARIOIMAIN updates soil temperature, water and ice based on 2m analysis increments using coefficients that are derived empirically for ISBA2/3-layers scheme
    • CANARIEKFSURFEX (experimental) updates soil parameters using the Extended Kalman Filter method.

    ANASURF_MODE = "before" / "after"/ "both" - surface analysis performed before/after/both before and after 3DVAR

    ANASURF_INLINE = "yes" /"no"

    • yes: call SODA for updating soil parameters inside CANARI (default and experimental)
    • no: soil parameters are updated after CANARI

    Some details

    The default surface model is SURFEX and the default surface assimilation scheme is CANARI_OI_MAIN. CANARI_EKF_SURFEX was first implemented in cy37 and will be undergoing tests in experimental and research mode before it can be used in operational setups.

    CANARI is used for Optimum Interpolation horizontally to find analysis increments in each grid point based on observations minus first guess. The SURFEX assimilation schemes use two different techniques to propagate this information into the ground. The two ways CANARI is used is separated by two namelist settings needed when running with SURFEX:

    • LAEICS=.FALSE.

    No initialization of ground variables are done as they are in the SURFEX file

    • LDIRCLSMOD=.TRUE.

    2 metre variables taken directly from input file because they without surfex are diagnosed from 0 metre and lowest model height with the model specific routine achmt.

    CANARI was designed before SURFEX was introduced and some of the climate variables that normally exist in the input file for CANARI, do not exist when using SURFEX. This means the task Addsurf is run before CANARI, adding the needed fields from the FA climate file (mMM).

    The screen level analyisis (eg. T2m) used in blending/3DVAR/4DVAR is the same as for CANARI in the old_surface case.

    Variables updated in CANARI for old_surface and SURFEX

    src/arpifs/module/qactex.F90

    HARMONIE namelist settings:

    !  * LAET2M  : .T. 2 meter temperature analysis
     !  * LAEH2M  : .T. 2 meter humidity analysis
     !  * LAESNM  : .T. snow analysis
     !  * LAESST  : .F. SST analysis
    @@ -16,4 +16,4 @@
        (edit then nam/LISTE_NOIRE_DIAP to insert, e.g. at the last line, following
     
         1 SHIP        24  11 DBKR     03062012
    -
    +
    diff --git a/previews/PR1129/EPS/BDSTRATEGY/index.html b/previews/PR1129/EPS/BDSTRATEGY/index.html index 391095048..a44f5baa9 100644 --- a/previews/PR1129/EPS/BDSTRATEGY/index.html +++ b/previews/PR1129/EPS/BDSTRATEGY/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Boundary strategies for HarmonEPS: SLAF and EC ENS

    Presently there are two available options for choosing boundaries when running HarmonEPS: EC ENS or SLAF In the branch harmonEPS-40h1.1 SLAF is set as default

    Settings for SLAF (default in branch harmonEPS-40h1.1 )Settings for EC ENS
    ecf/config_exp.hBDSTRATEGY=simulate_operationalBDSTRATEGY=eps_ec
    BDINT=1 (can be set to larger value)BDINT=3 (or larger, hourly input is not possible)
    msms/harmonie.pmComment out SLAF settings: #SLAFLAG, #SLAFDIFF, #SLAFK
    'ENSBDMBR' => [ 0]'ENSBDMBR' => [ 0, 1..10] (or any other members from EC ENS you would like to use)

    More information about how to treat the settings in harmonie.pm, see: here Note that BDSTRATEGY=eps_ec uses EC ENS data as stored in the GLAMEPS archive (as ECMWF does not store model levels in MARS). Only EC ENS at 00UTC and 12UTC are in this archive, and with 3h output, hence you need to use BDINT=3 for this option.

    +

    Boundary strategies for HarmonEPS: SLAF and EC ENS

    Presently there are two available options for choosing boundaries when running HarmonEPS: EC ENS or SLAF In the branch harmonEPS-40h1.1 SLAF is set as default

    Settings for SLAF (default in branch harmonEPS-40h1.1 )Settings for EC ENS
    ecf/config_exp.hBDSTRATEGY=simulate_operationalBDSTRATEGY=eps_ec
    BDINT=1 (can be set to larger value)BDINT=3 (or larger, hourly input is not possible)
    msms/harmonie.pmComment out SLAF settings: #SLAFLAG, #SLAFDIFF, #SLAFK
    'ENSBDMBR' => [ 0]'ENSBDMBR' => [ 0, 1..10] (or any other members from EC ENS you would like to use)

    More information about how to treat the settings in harmonie.pm, see: here Note that BDSTRATEGY=eps_ec uses EC ENS data as stored in the GLAMEPS archive (as ECMWF does not store model levels in MARS). Only EC ENS at 00UTC and 12UTC are in this archive, and with 3h output, hence you need to use BDINT=3 for this option.

    diff --git a/previews/PR1129/EPS/Howto/index.html b/previews/PR1129/EPS/Howto/index.html index b35cf916e..452800cdb 100644 --- a/previews/PR1129/EPS/Howto/index.html +++ b/previews/PR1129/EPS/Howto/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    How to run an ensemble experiment

    Simple configuration

    Running an ensemble experiment is not very different from running a deterministic one. The basic instructions about setup are the same and will not be repeated here.

    What is different is that in ecf/config_exp.h one needs to pay attention to this particular section:

    # *** Ensemble mode general settings. ***
    +

    How to run an ensemble experiment

    Simple configuration

    Running an ensemble experiment is not very different from running a deterministic one. The basic instructions about setup are the same and will not be repeated here.

    What is different is that in ecf/config_exp.h one needs to pay attention to this particular section:

    # *** Ensemble mode general settings. ***
     # *** For member specific settings use msms/harmonie.pm ***
     ENSMSEL=                                # Ensemble member selection, comma separated list, and/or range(s):
                                             # m1,m2,m3-m4,m5-m6:step    mb-me == mb-me:1 == mb,mb+1,mb+2,...,me
    @@ -47,4 +47,4 @@
        'FirstHour' => sub { my $mbr = shift;
                             return $ENV{StartHour} % &Env('FCINT',$mbr);
                           }
    -    );

    ANAATMO is straightforward, only the control members need an exception from blending, so using a hash is most appropriate. Similarly for FCINT. For PHYSICS we have used an array and the fact that the array will be recycled. Thus member 0 will be the AROME control, while member 1 will be the ALARO control. The reason why we did not simply put a 2-element array [ 'arome','alaro'] to be repeated is that since the ECMWF perturbations come in +/- pairs, we don't want all the '+' perturbations to be always with the same physics (and the '-' perturbations with the other type). Therefore, we added a second pair with the order reversed, to alternate +/- perturbations between AROME and ALARO members. ENSCTL follows the same pattern as PHYSICS. Note the need for 3-digit numbers in ENSCTL, at present this is necessary to avoid parsing errors in the preparation step of mini-SMS.

    Note also how we have used ENSBDMBR. For both the AROME control (member 0) and ALARO control (member 1), we have used the EC EPS control member 0 to provide boundaries. The syntax 1..20 is a perl shorthand for the list 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20.

    Note added after changeset [12537]: The setting of ENSBDMBR created a race condition in the boundary extraction for runs at ECMWF. This is hopefully solved by the new definition for BDDIR, which makes use of the possibility of having a subroutine to compute the member specific settings. Another example where a subroutine came out handy was for the setting of FirstHour.

    Further reading

    More specific instructions and information about known problems can be found here.

    + );

    ANAATMO is straightforward, only the control members need an exception from blending, so using a hash is most appropriate. Similarly for FCINT. For PHYSICS we have used an array and the fact that the array will be recycled. Thus member 0 will be the AROME control, while member 1 will be the ALARO control. The reason why we did not simply put a 2-element array [ 'arome','alaro'] to be repeated is that since the ECMWF perturbations come in +/- pairs, we don't want all the '+' perturbations to be always with the same physics (and the '-' perturbations with the other type). Therefore, we added a second pair with the order reversed, to alternate +/- perturbations between AROME and ALARO members. ENSCTL follows the same pattern as PHYSICS. Note the need for 3-digit numbers in ENSCTL, at present this is necessary to avoid parsing errors in the preparation step of mini-SMS.

    Note also how we have used ENSBDMBR. For both the AROME control (member 0) and ALARO control (member 1), we have used the EC EPS control member 0 to provide boundaries. The syntax 1..20 is a perl shorthand for the list 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20.

    Note added after changeset [12537]: The setting of ENSBDMBR created a race condition in the boundary extraction for runs at ECMWF. This is hopefully solved by the new definition for BDDIR, which makes use of the possibility of having a subroutine to compute the member specific settings. Another example where a subroutine came out handy was for the setting of FirstHour.

    Further reading

    More specific instructions and information about known problems can be found here.

    diff --git a/previews/PR1129/EPS/SLAF/Get_pertdia.pl.pm/index.html b/previews/PR1129/EPS/SLAF/Get_pertdia.pl.pm/index.html index f50a47443..1cec789e8 100644 --- a/previews/PR1129/EPS/SLAF/Get_pertdia.pl.pm/index.html +++ b/previews/PR1129/EPS/SLAF/Get_pertdia.pl.pm/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/previews/PR1129/EPS/SLAF/index.html b/previews/PR1129/EPS/SLAF/index.html index f90baeac8..e94a7773d 100644 --- a/previews/PR1129/EPS/SLAF/index.html +++ b/previews/PR1129/EPS/SLAF/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    SLAF in HarmonEPS

    Background

    SLAF stands for Scaled Lagged Average Forecasting (Ebisuzaki & Kalnay, 1991) and it is a technique used to easily generate perturbed boundary and initial conditions from a single deterministic model (HRES).

    The general idea of SLAF is that perturbations are taking HRES forecasts valid at the same time but with different forecast lengths and initial times :

    \[IC_m = A_c + K_m * ( IFS_0 – IFS_N )\]

    \[BC_m = IFS_0 + K_m * ( IFS_0 – IFS_N )\]

    Where $IC_m$ is the initial condition for member $m$, $BC_m$ is the lateral boundary condition for member $m$, $A_c$ is the control analysis, $K_m$ a scaling factor, $IFS_0$ is the latest available IFS forecast and $N$ is the forecast length for an earlier forecast valid at the same time.

    This first attempt on using SLAF revealed an undesirable clustering between the different members using positive or negative perturbations. Depending on the sign of $K_m$ the members gather on either side of the mean. If HRES has a increasing bias over the forecast length the same bias will be introduced through the perturbations. A cure of this problem is to use shorter forecast lengths and construct the perturbations by two consecutive forecasts 6 hours apart:

    \[IC_m = A_c + K_m * ( IFS_N – IFS_N-6 )\]

    \[BC_m = IFS_0 + K_m * ( IFS_N – IFS_N-6 )\]

    where $IFS_N$ is a forecast with length $N$ and $IFS_N-6$ is a 6h shorter forecast, both valid at the same time as the analysis. With this construction most of the clustering is gone. THIS IS THE DEFAULT SETUP IN HarmonEPS cy40.

    From the equation it is clear that every lag used generates two perturbations, so if we use deterministic runs from 06, 12, 18, 24 and 30 hours before we'll have 10 different perturbed members and control, then 11 members in total.

    The goal is to have similar spread at the boundaries of the LAMEPS than using pure downscaling but with less communication time.

    Sotfware to use SLAF technique in HarmonEPS was introduced in HarmonEPS branch in version 38h1.1 and first tested by Jose A. Garcia-Moya.

    The main advantage of SLAF is that only needs having stored the last runs of the deterministic model and, in daily run mode at home, we only need to have access to the last ECMWF (or any other global model) run avoiding a lot of time in communications compared with the pure downscaling technique.

    Summarizing, to run an HarmonEPS experiment using SLAF (default in cy40) you have to:

    1. Refer to HarmonEPS branch 38h1.1 (minimum). (Constant 6h lag as described above from cy40)

    2. In ecf/config_exp.h choose:

      • BDSTRATEGY=simulate_operational
      • ENSMSEL=0-10 (or whatever you want)
      • SLAFLAG=1
      • SLAFK=1
    3. In msms/harmonie.pm:

      • 'ENSBDMBR' => [ 0],
      • 'SLAFLAG' => [ 0, 6, 6, 12, 12, 18, 18, 24, 24, 30, 30],
      • 'SLAFDIFF' => [ 0, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6],
      • 'SLAFK' => ['0.0','1.75','-1.75','1.5','-1.5','1.2','-1.2','1.0','-1.0','0.9','-0.9'] (example used for constant 6h lag)

    Where SLAFLAG represent the lags of every member of the ensemble and SLAF are the different scales (including the sign). Theoretically the user may set SLAFK as she/he likes but it seems to be more convenient choosing ± in order to keep symmetry of the perturbations around control. Note that to get equally sized perturbations SLAFK is required to be tuned, please see below for details.

    Note that no further control on the equilibrium among the perturbed variables (temperature, humidity and so on) is done, so perhaps you have to check the spin-up if you are interested in the first hour of the ensemble.

    Tuning of SLAF

    The correct size of SLAFK can be determined by the perturbation diagnostics done in PertAna (harmonie-40h1.1) or Pertdia (harmonie-40h1.2). Here xtool is used to calculate the differences between the boundary files for the control and the individual member. The output is then collected in the HM_Date*.html files or HM_Postprocessing*.html for harmonie-40h1.1 or harmonie-40h1.2 respectively.

    Check perturbations for member 004 against 000
    +

    SLAF in HarmonEPS

    Background

    SLAF stands for Scaled Lagged Average Forecasting (Ebisuzaki & Kalnay, 1991) and it is a technique used to easily generate perturbed boundary and initial conditions from a single deterministic model (HRES).

    The general idea of SLAF is that perturbations are taking HRES forecasts valid at the same time but with different forecast lengths and initial times :

    \[IC_m = A_c + K_m * ( IFS_0 – IFS_N )\]

    \[BC_m = IFS_0 + K_m * ( IFS_0 – IFS_N )\]

    Where $IC_m$ is the initial condition for member $m$, $BC_m$ is the lateral boundary condition for member $m$, $A_c$ is the control analysis, $K_m$ a scaling factor, $IFS_0$ is the latest available IFS forecast and $N$ is the forecast length for an earlier forecast valid at the same time.

    This first attempt on using SLAF revealed an undesirable clustering between the different members using positive or negative perturbations. Depending on the sign of $K_m$ the members gather on either side of the mean. If HRES has a increasing bias over the forecast length the same bias will be introduced through the perturbations. A cure of this problem is to use shorter forecast lengths and construct the perturbations by two consecutive forecasts 6 hours apart:

    \[IC_m = A_c + K_m * ( IFS_N – IFS_N-6 )\]

    \[BC_m = IFS_0 + K_m * ( IFS_N – IFS_N-6 )\]

    where $IFS_N$ is a forecast with length $N$ and $IFS_N-6$ is a 6h shorter forecast, both valid at the same time as the analysis. With this construction most of the clustering is gone. THIS IS THE DEFAULT SETUP IN HarmonEPS cy40.

    From the equation it is clear that every lag used generates two perturbations, so if we use deterministic runs from 06, 12, 18, 24 and 30 hours before we'll have 10 different perturbed members and control, then 11 members in total.

    The goal is to have similar spread at the boundaries of the LAMEPS than using pure downscaling but with less communication time.

    Sotfware to use SLAF technique in HarmonEPS was introduced in HarmonEPS branch in version 38h1.1 and first tested by Jose A. Garcia-Moya.

    The main advantage of SLAF is that only needs having stored the last runs of the deterministic model and, in daily run mode at home, we only need to have access to the last ECMWF (or any other global model) run avoiding a lot of time in communications compared with the pure downscaling technique.

    Summarizing, to run an HarmonEPS experiment using SLAF (default in cy40) you have to:

    1. Refer to HarmonEPS branch 38h1.1 (minimum). (Constant 6h lag as described above from cy40)

    2. In ecf/config_exp.h choose:

      • BDSTRATEGY=simulate_operational
      • ENSMSEL=0-10 (or whatever you want)
      • SLAFLAG=1
      • SLAFK=1
    3. In msms/harmonie.pm:

      • 'ENSBDMBR' => [ 0],
      • 'SLAFLAG' => [ 0, 6, 6, 12, 12, 18, 18, 24, 24, 30, 30],
      • 'SLAFDIFF' => [ 0, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6],
      • 'SLAFK' => ['0.0','1.75','-1.75','1.5','-1.5','1.2','-1.2','1.0','-1.0','0.9','-0.9'] (example used for constant 6h lag)

    Where SLAFLAG represent the lags of every member of the ensemble and SLAF are the different scales (including the sign). Theoretically the user may set SLAFK as she/he likes but it seems to be more convenient choosing ± in order to keep symmetry of the perturbations around control. Note that to get equally sized perturbations SLAFK is required to be tuned, please see below for details.

    Note that no further control on the equilibrium among the perturbed variables (temperature, humidity and so on) is done, so perhaps you have to check the spin-up if you are interested in the first hour of the ensemble.

    Tuning of SLAF

    The correct size of SLAFK can be determined by the perturbation diagnostics done in PertAna (harmonie-40h1.1) or Pertdia (harmonie-40h1.2). Here xtool is used to calculate the differences between the boundary files for the control and the individual member. The output is then collected in the HM_Date*.html files or HM_Postprocessing*.html for harmonie-40h1.1 or harmonie-40h1.2 respectively.

    Check perturbations for member 004 against 000
     SLAFLAG=18 SLAFK=1.4 SLAFDIFF=6
     ...
     Start check boundary 048
    @@ -23,4 +23,4 @@
     ...
     009  42  36       06        0.95     28.92    130.72    127.48   11 
     009  48  36       06        0.95     14.80    176.10    175.48   11 
    -

    The SLAKF can then be adjusted to achieve a uniform level of STDV for all member. Note that the response may be different for different seasons and will vary between IFS versions. An example of SLAF diagnostics from MetCoOp can be seen in the figure below

    Examples

    Below is an example for 2016052006 for the two different approaches of SLAF described above:

    +

    The SLAKF can then be adjusted to achieve a uniform level of STDV for all member. Note that the response may be different for different seasons and will vary between IFS versions. An example of SLAF diagnostics from MetCoOp can be seen in the figure below

    Examples

    Below is an example for 2016052006 for the two different approaches of SLAF described above:

    diff --git a/previews/PR1129/EPS/SPP/index.html b/previews/PR1129/EPS/SPP/index.html index d5b5a9483..35b53aed2 100644 --- a/previews/PR1129/EPS/SPP/index.html +++ b/previews/PR1129/EPS/SPP/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    SPP in HarmonEPS

    SPP options in HARMONIE

    The Stochastically Perturbed Parameterizations scheme (SPP) introduces stochastic perturbations to values of chosen closure parameters representing efficiencies or rates of change in parameterized atmospheric (sub)processes. See here for more information. SPP is available since cy40h1.1.1.

    SPP is activated by setting SPP=yes in ecf/config_exp.h

    SPP uses the Stochastic Pattern Generator (SPG). The pattern characteristics are set by the following settings in config_exp.h:

     SDEV_SPP = 1.0           # Standard deviation of the pattern
    +

    SPP in HarmonEPS

    SPP options in HARMONIE

    The Stochastically Perturbed Parameterizations scheme (SPP) introduces stochastic perturbations to values of chosen closure parameters representing efficiencies or rates of change in parameterized atmospheric (sub)processes. See here for more information. SPP is available since cy40h1.1.1.

    SPP is activated by setting SPP=yes in ecf/config_exp.h

    SPP uses the Stochastic Pattern Generator (SPG). The pattern characteristics are set by the following settings in config_exp.h:

     SDEV_SPP = 1.0           # Standard deviation of the pattern
      TAU_SPP = 43200          # Time scale (seconds)
      XLCOR_SPP = 200000       # Length scale (m)
      SPGQ_SPP = 0.5           # Controls small vs. large scales 
    @@ -65,4 +65,4 @@
        pattern   3 for CLDDPTHDP        using seed    980493159
     KGET_SEED_SPP: ICE_CLD_WGT             10008   1362729695
        pattern   4 for ICE_CLD_WGT      using seed   1362729695
    -...

    would give us

    Perturbationraw patternscaled pattern
    PSIGQSATS001EZDIAG01S002EZDIAG01
    CLDDPTHS003EZDIAG01S004EZDIAG01
    CLDDPTHDPS005EZDIAG01S006EZDIAG01
    ICE_CLD_WGTS007EZDIAG01S008EZDIAG01

    and so on

    SPPT pattern EZDIAG02 (same in all levels)

    SPP tendencies PtendU EZDIAG03

    SPP tendencies PtendV EZDIAG04

    SPP tendencies PtendT EZDIAG05

    SPP tendencies PtendQ EZDIAG06

    Suggestions for parameters to include in SPP:

    ParameterDescriptionDeterministic value cy43Suggested range of valuessuggestion for parameter to correlate withPerson responsible for implementing
    Terminal fall velocities of rain, snow and graupelSibbo
    RFRMIN(39)Depo_rate_graupelRFRMIN 39 and 40 should approximately respect log10C = -3.55 x + 3.89, see eq. 6.2 on p. 108 in the meso-NH documentation: [https://hirlam.org/trac/attachment/wiki/HarmonieSystemDocumentation/EPS/SPP/sciICE3doc_p3.pdf Doc]Pirkka
    RFRMIN(40)Depo_rate_snow)RFRMIN 39 and 40 should approximately respect log10C = -3.55 x + 3.89, see eq. 6.2 on p. 108 in the meso-NH documentation: [https://hirlam.org/trac/attachment/wiki/HarmonieSystemDocumentation/EPS/SPP/sciICE3doc_p3.pdf Doc]Pirkka
    RFRMIN(16)Distr_snow_cto be correlated with RFRMIN(17)
    RFRMIN(17)Distr_snow_xto be correlated with RFRMIN(16)

    Experiments

    List with cy43h22 experiments is here: [wiki:HarmonieSystemDocumentation/EPS/ExplistSPPcy43 List of experiments]

    A guide for running the tuning experiments is here: [wiki:HarmonieSystemDocumentation/EPS/HowtoSPPcy43 Guide]

    • 1Default/deterministic value of 1 and recommended range of 0.01-1 means the deterministic value is at the high end of the distribution.
    +...

    would give us

    Perturbationraw patternscaled pattern
    PSIGQSATS001EZDIAG01S002EZDIAG01
    CLDDPTHS003EZDIAG01S004EZDIAG01
    CLDDPTHDPS005EZDIAG01S006EZDIAG01
    ICE_CLD_WGTS007EZDIAG01S008EZDIAG01

    and so on

    SPPT pattern EZDIAG02 (same in all levels)

    SPP tendencies PtendU EZDIAG03

    SPP tendencies PtendV EZDIAG04

    SPP tendencies PtendT EZDIAG05

    SPP tendencies PtendQ EZDIAG06

    Suggestions for parameters to include in SPP:

    ParameterDescriptionDeterministic value cy43Suggested range of valuessuggestion for parameter to correlate withPerson responsible for implementing
    Terminal fall velocities of rain, snow and graupelSibbo
    RFRMIN(39)Depo_rate_graupelRFRMIN 39 and 40 should approximately respect log10C = -3.55 x + 3.89, see eq. 6.2 on p. 108 in the meso-NH documentation: [https://hirlam.org/trac/attachment/wiki/HarmonieSystemDocumentation/EPS/SPP/sciICE3doc_p3.pdf Doc]Pirkka
    RFRMIN(40)Depo_rate_snow)RFRMIN 39 and 40 should approximately respect log10C = -3.55 x + 3.89, see eq. 6.2 on p. 108 in the meso-NH documentation: [https://hirlam.org/trac/attachment/wiki/HarmonieSystemDocumentation/EPS/SPP/sciICE3doc_p3.pdf Doc]Pirkka
    RFRMIN(16)Distr_snow_cto be correlated with RFRMIN(17)
    RFRMIN(17)Distr_snow_xto be correlated with RFRMIN(16)

    Experiments

    List with cy43h22 experiments is here: [wiki:HarmonieSystemDocumentation/EPS/ExplistSPPcy43 List of experiments]

    A guide for running the tuning experiments is here: [wiki:HarmonieSystemDocumentation/EPS/HowtoSPPcy43 Guide]

    • 1Default/deterministic value of 1 and recommended range of 0.01-1 means the deterministic value is at the high end of the distribution.
    diff --git a/previews/PR1129/EPS/SPPImplementation/index.html b/previews/PR1129/EPS/SPPImplementation/index.html index f403bb263..9e492afbb 100644 --- a/previews/PR1129/EPS/SPPImplementation/index.html +++ b/previews/PR1129/EPS/SPPImplementation/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    The SPP implementation in IAL and HARMONIE

    The Stochastically Perturbed Parameterizations scheme (SPP) introduces stochastic perturbations to values of chosen closure parameters representing efficiencies or rates of change in parameterized atmospheric (sub)processes. See here for more information. See the main SPP documentation for selection of settings.

    Controling routines

    The SPP data structure and logics is controlled by the following routines

    RoutineDescription
    src/arpifs/module/spp_mod.F90Defines SPP scheme types TSPP_CONFIG_PAR and TSPP_CONFIG for the parameter config and the overall config respectively
    src/arpifs/module/spp_mod_type.F90Harmonie specific data types TSPP_CONFIG_TYPE, ATM_SPP_VARS, SFX_VARS, control and the methods CLEAR_SSP_TYPE, SET_SPP_TYPE, APPLY_SPP, APPLY_SPP_SURFEX, DIA_SPP, SET_ALL_ATM_SPP, SET_ALL_SFX_SPP, CLEAR_ALL_ATM_SPP, CLEAR_ALL_SFX_SPP
    src/surfex/SURFEX/modd_sfx_spp.F90SURFEX specific data types, control and methods CLEAR_SFX_SPP, SET_SFX_SPP, APPLY_SFX_SPP, CLEAR_ALL_SFX_SPP, SPP_MASK, SPP_DEMASK, PREP_SPP_SFX. Partly duplicates spp_mod_type.F90
    src/arpifs/namelist/namspp.nam.hThe SPP namelist
    src/arpifs/setup/get_spp_conf.F90Setup defaults and read the SPP namelist. Initialises the SPG parameters
    src/arpifs/phys_dmn/ini_spp.F90Initialises the pattern used for SPP
    src/arpifs/phys_dmn/evolve_spp.F90Control routine for pattern propagation
    src/mse/internals/aroset_spp.F90Initialises the SURFEX part of SPP

    Note that the control routines shared with IFS will be totally rewritten, and much neater, with the introduction of CY49T1. See e.g. spp_def_mod.F90, spp_gen_mod.F90

    SPG routines

    The pattern used for SPP within HARMONIE is SPG and the code for this is found under src/utilities/spg. For the propagation of the pattern we find the routine EVOLVE_ARP_SPG in src/arp/module/spectral_arp_mod.F90

    Applying the patterns

    In apl_arome.F90 the HARMONIE specific data types are initialised with SET_ALL_ATM_SPP and SET_ALL_SFX_SPP. These routine groups the different parameters and connects them to a pattern and a the correct diagnostic field EZDIAG if requested.

    Applying the patterns in the upper air part

    In the routine were a specific parameter is used the pattern is applied by calling APPLY_SPP. This is done for each parameter accoding to the table below.

    PerturbationRoutine
    RADGRsrc/arpifs/phys_dmn/apl_arome.F90
    RADSNsrc/arpifs/phys_dmn/apl_arome.F90
    RFAC_TWOCsrc/arpifs/phys_dmn/vdfexcuhl.F90
    RZC_Hsrc/arpifs/phys_dmn/vdfexcuhl.F90
    RZL_INFsrc/arpifs/phys_dmn/vdfexcuhl.F90
    RZMFDRYsrc/arpifs/phys_dmn/vdfhghtnhl.F90
    RZMBCLOSUREsrc/arpifs/phys_dmn/vdfhghtnhl.F90
    CLDDPTHDPsrc/arpifs/phys_dmn/vdfhghtnhl.F90
    RLWINHFsrc/arpifs/phys_radi/recmwf.F90
    RSWINHFsrc/arpifs/phys_radi/recmwf.F90
    PSIGQSATsrc/mpa/micro/internals/condensation.F90
    ICE_CLD_WGTsrc/mpa/micro/internals/condensation.F90
    ICENUsrc/mpa/micro/internals/rain_ice_old.F90
    KGN_ACONsrc/mpa/micro/internals/rain_ice_old.F90
    KGN_SBGRsrc/mpa/micro/internals/rain_ice_old.F90
    ALPHAsrc/mpa/micro/internals/rain_ice_old.F90
    RZNUCsrc/mpa/micro/internals/rain_ice_old.F90

    Applying the patterns in SURFEX

    As SURFEX should have no dependencies to external modules the data is copied into the internalt SURFEX SPP data structure in AROSET_SPP called from ARO_GROUND_PARAM.

    For SURFEX the parameter table looks like

    PerturbationRoutine
    CVsrc/surfex/SURFEX/coupling_isban.F90
    LAIsrc/surfex/SURFEX/coupling_isban.F90
    RSMINsrc/surfex/SURFEX/coupling_isban.F90

    In SURFEX we also have to pack/unpack the data arrays to only use the active points for a specific tile or patch. This is done in the SPP_MASK and SPP_DEMASK routines found in src/surfex/SURFEX/modd_sfx_spp.F90 and called from src/surfex/SURFEX/coupling_surf_atmn.F90. At the time of writing returning the diagnostics of the pattern doesn't work satisfactory.

    The additional code changes done for SPP in SURFEX can be viewed here

    +

    The SPP implementation in IAL and HARMONIE

    The Stochastically Perturbed Parameterizations scheme (SPP) introduces stochastic perturbations to values of chosen closure parameters representing efficiencies or rates of change in parameterized atmospheric (sub)processes. See here for more information. See the main SPP documentation for selection of settings.

    Controling routines

    The SPP data structure and logics is controlled by the following routines

    RoutineDescription
    src/arpifs/module/spp_mod.F90Defines SPP scheme types TSPP_CONFIG_PAR and TSPP_CONFIG for the parameter config and the overall config respectively
    src/arpifs/module/spp_mod_type.F90Harmonie specific data types TSPP_CONFIG_TYPE, ATM_SPP_VARS, SFX_VARS, control and the methods CLEAR_SSP_TYPE, SET_SPP_TYPE, APPLY_SPP, APPLY_SPP_SURFEX, DIA_SPP, SET_ALL_ATM_SPP, SET_ALL_SFX_SPP, CLEAR_ALL_ATM_SPP, CLEAR_ALL_SFX_SPP
    src/surfex/SURFEX/modd_sfx_spp.F90SURFEX specific data types, control and methods CLEAR_SFX_SPP, SET_SFX_SPP, APPLY_SFX_SPP, CLEAR_ALL_SFX_SPP, SPP_MASK, SPP_DEMASK, PREP_SPP_SFX. Partly duplicates spp_mod_type.F90
    src/arpifs/namelist/namspp.nam.hThe SPP namelist
    src/arpifs/setup/get_spp_conf.F90Setup defaults and read the SPP namelist. Initialises the SPG parameters
    src/arpifs/phys_dmn/ini_spp.F90Initialises the pattern used for SPP
    src/arpifs/phys_dmn/evolve_spp.F90Control routine for pattern propagation
    src/mse/internals/aroset_spp.F90Initialises the SURFEX part of SPP

    Note that the control routines shared with IFS will be totally rewritten, and much neater, with the introduction of CY49T1. See e.g. spp_def_mod.F90, spp_gen_mod.F90

    SPG routines

    The pattern used for SPP within HARMONIE is SPG and the code for this is found under src/utilities/spg. For the propagation of the pattern we find the routine EVOLVE_ARP_SPG in src/arp/module/spectral_arp_mod.F90

    Applying the patterns

    In apl_arome.F90 the HARMONIE specific data types are initialised with SET_ALL_ATM_SPP and SET_ALL_SFX_SPP. These routine groups the different parameters and connects them to a pattern and a the correct diagnostic field EZDIAG if requested.

    Applying the patterns in the upper air part

    In the routine were a specific parameter is used the pattern is applied by calling APPLY_SPP. This is done for each parameter accoding to the table below.

    PerturbationRoutine
    RADGRsrc/arpifs/phys_dmn/apl_arome.F90
    RADSNsrc/arpifs/phys_dmn/apl_arome.F90
    RFAC_TWOCsrc/arpifs/phys_dmn/vdfexcuhl.F90
    RZC_Hsrc/arpifs/phys_dmn/vdfexcuhl.F90
    RZL_INFsrc/arpifs/phys_dmn/vdfexcuhl.F90
    RZMFDRYsrc/arpifs/phys_dmn/vdfhghtnhl.F90
    RZMBCLOSUREsrc/arpifs/phys_dmn/vdfhghtnhl.F90
    CLDDPTHDPsrc/arpifs/phys_dmn/vdfhghtnhl.F90
    RLWINHFsrc/arpifs/phys_radi/recmwf.F90
    RSWINHFsrc/arpifs/phys_radi/recmwf.F90
    PSIGQSATsrc/mpa/micro/internals/condensation.F90
    ICE_CLD_WGTsrc/mpa/micro/internals/condensation.F90
    ICENUsrc/mpa/micro/internals/rain_ice_old.F90
    KGN_ACONsrc/mpa/micro/internals/rain_ice_old.F90
    KGN_SBGRsrc/mpa/micro/internals/rain_ice_old.F90
    ALPHAsrc/mpa/micro/internals/rain_ice_old.F90
    RZNUCsrc/mpa/micro/internals/rain_ice_old.F90

    Applying the patterns in SURFEX

    As SURFEX should have no dependencies to external modules the data is copied into the internalt SURFEX SPP data structure in AROSET_SPP called from ARO_GROUND_PARAM.

    For SURFEX the parameter table looks like

    PerturbationRoutine
    CVsrc/surfex/SURFEX/coupling_isban.F90
    LAIsrc/surfex/SURFEX/coupling_isban.F90
    RSMINsrc/surfex/SURFEX/coupling_isban.F90

    In SURFEX we also have to pack/unpack the data arrays to only use the active points for a specific tile or patch. This is done in the SPP_MASK and SPP_DEMASK routines found in src/surfex/SURFEX/modd_sfx_spp.F90 and called from src/surfex/SURFEX/coupling_surf_atmn.F90. At the time of writing returning the diagnostics of the pattern doesn't work satisfactory.

    The additional code changes done for SPP in SURFEX can be viewed here

    diff --git a/previews/PR1129/EPS/SPPT/index.html b/previews/PR1129/EPS/SPPT/index.html index 553fe6bad..dabac7aee 100644 --- a/previews/PR1129/EPS/SPPT/index.html +++ b/previews/PR1129/EPS/SPPT/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    SPPT

    ** Since CY46h1 SPPT is no longer supported in HarmonEPS **

    The SPPT configuration within HarmonEPS is being tested over the period 2016053000 to 2016060500 using the !MetCoOp domain. It has been found that there are some problems with the default pattern generator and thus it has been decided to use the Stochastic Pattern Generator (SPG).

    Below is a table of experiments which will be completed in order to find a suitable configuration of the SPG control parameters TAU (time correlation scale) and XLCOR (length correlation scale). The value of the standard deviation of the perturbation amplitudes (SDEV_SDT) is kept fixed at 0.20 as is the clipping ratio of the perturbations (XCLIP_RATIO_SDT=5.0). These values along with the default value for XLCOR come from suggested settings used by Mihaly Szucs.

    First of all, keeping the XLCOR parameter constant (set at the default value of 2000000), TAU will be varied between 1h and 24h as shown in the table. The value of TAU is in seconds in the table below. The value of XLCOR is in metres.

    The experiments are started by typing ~hlam/Harmonie start DTG=2016053000 DTGEND=2016060500 BUILD=yes

    = Experiment Name == Who == DTG == DTGEND == Version == Domain == TAU == XLCOR == Description and Comments == Status == Verification =
    SPPT_only_40h111_2000km_1hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B36002000000XLCOR constant, TAU varyingSuspendedNo
    SPPT_only_40h111_2000km_3hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B108002000000XLCOR constant, TAU varyingCrashNo
    SPPT_only_40h111_2000km_6hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B216002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_9hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B324002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_12hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B432002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_15hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B540002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_18hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B648002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_21hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B756002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_24hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B864002000000XLCOR constant, TAU varyingCompleteYes

    Once these experiments have been completed testing will commence on keeping the time correlation scale constant and the spatial scale will be varied. Below is a table of experiments to this effect.

    A default value of 8h will be used for TAU as per the suggested value from Mihaly Szucs.

    = Experiment Name == Who == DTG == DTGEND == Version == Domain == TAU == XLCOR == Description and Comments == Status == Verification =
    SPPTonly40h111100km8hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800100000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111200km8hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800200000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111400km8hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800400000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111600km8hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800600000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111800km8hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800800000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111000km8hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001000000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111200km8hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001200000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111500km8hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001500000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111800km8hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001800000XLCOR varying, TAU constantCompleteYes

    The next step in the SPPT sensitivity analysis will be a set of experiments designed to test the impact of the SDEV parameter. Default values of 8h and 2000000 for TAU and XLCOR are used respectively.

    The XCLIPRATIOSDT parameter will also be adjusted as a function of the SDEVSDT value. Initially keeping the clipping at 1.0 (clipping value is XCLIPRATIOSDT * SDEVSDT), but also exploring other options.

    = Experiment Name == Who == DTG == DTGEND == Version == Domain == SDEV_SDT == XCLIPRATIOSDT == Description and Comments == Status == Verification =
    SPPTonly40h111sdev01Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.110.0SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev02Janne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.25.0SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev03Karoliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.33.3SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev04Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.42.5SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev05Janne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.52.0SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev06Karoliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.61.65SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev07Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.71.4SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev08Janne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.81.25SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev09Karoliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.91.1SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev10Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B1.01.0SDEV and XCLIP varyingCompleteYes
    +

    SPPT

    ** Since CY46h1 SPPT is no longer supported in HarmonEPS **

    The SPPT configuration within HarmonEPS is being tested over the period 2016053000 to 2016060500 using the !MetCoOp domain. It has been found that there are some problems with the default pattern generator and thus it has been decided to use the Stochastic Pattern Generator (SPG).

    Below is a table of experiments which will be completed in order to find a suitable configuration of the SPG control parameters TAU (time correlation scale) and XLCOR (length correlation scale). The value of the standard deviation of the perturbation amplitudes (SDEV_SDT) is kept fixed at 0.20 as is the clipping ratio of the perturbations (XCLIP_RATIO_SDT=5.0). These values along with the default value for XLCOR come from suggested settings used by Mihaly Szucs.

    First of all, keeping the XLCOR parameter constant (set at the default value of 2000000), TAU will be varied between 1h and 24h as shown in the table. The value of TAU is in seconds in the table below. The value of XLCOR is in metres.

    The experiments are started by typing ~hlam/Harmonie start DTG=2016053000 DTGEND=2016060500 BUILD=yes

    = Experiment Name == Who == DTG == DTGEND == Version == Domain == TAU == XLCOR == Description and Comments == Status == Verification =
    SPPT_only_40h111_2000km_1hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B36002000000XLCOR constant, TAU varyingSuspendedNo
    SPPT_only_40h111_2000km_3hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B108002000000XLCOR constant, TAU varyingCrashNo
    SPPT_only_40h111_2000km_6hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B216002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_9hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B324002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_12hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B432002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_15hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B540002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_18hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B648002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_21hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B756002000000XLCOR constant, TAU varyingCompleteYes
    SPPT_only_40h111_2000km_24hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B864002000000XLCOR constant, TAU varyingCompleteYes

    Once these experiments have been completed testing will commence on keeping the time correlation scale constant and the spatial scale will be varied. Below is a table of experiments to this effect.

    A default value of 8h will be used for TAU as per the suggested value from Mihaly Szucs.

    = Experiment Name == Who == DTG == DTGEND == Version == Domain == TAU == XLCOR == Description and Comments == Status == Verification =
    SPPTonly40h111100km8hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800100000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111200km8hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800200000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111400km8hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800400000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111600km8hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800600000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h111800km8hJanne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B28800800000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111000km8hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001000000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111200km8hAlan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001200000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111500km8hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001500000XLCOR varying, TAU constantCompleteYes
    SPPTonly40h1111800km8hKaroliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B288001800000XLCOR varying, TAU constantCompleteYes

    The next step in the SPPT sensitivity analysis will be a set of experiments designed to test the impact of the SDEV parameter. Default values of 8h and 2000000 for TAU and XLCOR are used respectively.

    The XCLIPRATIOSDT parameter will also be adjusted as a function of the SDEVSDT value. Initially keeping the clipping at 1.0 (clipping value is XCLIPRATIOSDT * SDEVSDT), but also exploring other options.

    = Experiment Name == Who == DTG == DTGEND == Version == Domain == SDEV_SDT == XCLIPRATIOSDT == Description and Comments == Status == Verification =
    SPPTonly40h111sdev01Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.110.0SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev02Janne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.25.0SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev03Karoliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.33.3SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev04Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.42.5SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev05Janne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.52.0SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev06Karoliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.61.65SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev07Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.71.4SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev08Janne20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.81.25SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev09Karoliina20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B0.91.1SDEV and XCLIP varyingCompleteYes
    SPPTonly40h111sdev10Alan20160530002016060500harmonEPS40h1.1.1(17985)METCOOP25B1.01.0SDEV and XCLIP varyingCompleteYes
    diff --git a/previews/PR1129/EPS/Setup/index.html b/previews/PR1129/EPS/Setup/index.html index a474d7042..b6b99cecb 100644 --- a/previews/PR1129/EPS/Setup/index.html +++ b/previews/PR1129/EPS/Setup/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/previews/PR1129/EPS/System/index.html b/previews/PR1129/EPS/System/index.html index c8251ecee..252c5e4a4 100644 --- a/previews/PR1129/EPS/System/index.html +++ b/previews/PR1129/EPS/System/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Ensemble mode in the Harmonie script system

    Overview

    • Purpose
    • Prerequisites
    • Option checking
    • EPS in the tdf file

    Purpose

    The purpose of this document is to give more details about how ensemble mode works in the Harmonie script system than can easily be found in other pages. It is meant for system people and developers who need to understand or extend the functionality of HarmonEPS. Such extensions could be e.g. implementation of new initial perturbation techniques.

    Prerequisites

    You should also read the Howto to get acquainted with what is already implemented.

    Having read the prerequisite pages you know that an ensemble experiment is not very different from a deterministic one, you only need to set a few ensemble related variables ENSMSEL, ENSINIPERT, etc. in ecf/config_exp.h and then make some member specific exceptions in the perl "module" msms/harmonie.pm. But there is more going on behind the scenes.

    First of all, the ENSMSEL member selection variable exists for convenience, what is used in msms/harmonie.tdf and other scripts is an expanded version of it called ENSMSELX. In the script scr/Start this expansion is done by invoking script scr/Ens_util.pl, which is also used to set a couple of other convenience variables:

    # Compute derived EPS quantities, needed in harmonie.tdf
    +

    Ensemble mode in the Harmonie script system

    Overview

    • Purpose
    • Prerequisites
    • Option checking
    • EPS in the tdf file

    Purpose

    The purpose of this document is to give more details about how ensemble mode works in the Harmonie script system than can easily be found in other pages. It is meant for system people and developers who need to understand or extend the functionality of HarmonEPS. Such extensions could be e.g. implementation of new initial perturbation techniques.

    Prerequisites

    You should also read the Howto to get acquainted with what is already implemented.

    Having read the prerequisite pages you know that an ensemble experiment is not very different from a deterministic one, you only need to set a few ensemble related variables ENSMSEL, ENSINIPERT, etc. in ecf/config_exp.h and then make some member specific exceptions in the perl "module" msms/harmonie.pm. But there is more going on behind the scenes.

    First of all, the ENSMSEL member selection variable exists for convenience, what is used in msms/harmonie.tdf and other scripts is an expanded version of it called ENSMSELX. In the script scr/Start this expansion is done by invoking script scr/Ens_util.pl, which is also used to set a couple of other convenience variables:

    # Compute derived EPS quantities, needed in harmonie.tdf
     export ENSSIZE ENSMFIRST ENSMLAST
     ENSSIZE=`perl -S Ens_util.pl ENSSIZE`
     ENSMFIRST=`perl -S Ens_util.pl ENSMFIRST`
    @@ -46,4 +46,4 @@
      ), 

    To activate the change we also need to change scr/Get_namelist, the script that builds the namelist for us to take the member_$ENSMBR change into account.

     ...
      forecast|dfi|traj4d)
         NAMELIST_CONFIG="$DEFAULT dynamics $DYNAMICS $PHYSICS ${DYNAMICS}_${PHYSICS} $SURFACE $EXTRA_FORECAST_OPTIONS member_$ENSMBR"
    - ...

    Repeat this for all your members with the changes you would like to apply.

    + ...

    Repeat this for all your members with the changes you would like to apply.

    diff --git a/previews/PR1129/ExperimentConfiguration/ConfigureYourExperiment/index.html b/previews/PR1129/ExperimentConfiguration/ConfigureYourExperiment/index.html index 0cdb75777..97437b26e 100644 --- a/previews/PR1129/ExperimentConfiguration/ConfigureYourExperiment/index.html +++ b/previews/PR1129/ExperimentConfiguration/ConfigureYourExperiment/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Experiment configuration

    Introduction

    There are several levels on configuration available in HARMONIE. The highest level of configuration is done in ecf/config_exp.h. It includes the environment variables, which are used to control the experimentation. In the following we describe the meaning of the different variables and are described in the order they appear in ecf/config_exp.h.

    Host specific paths and environment variables for your system are defined in Env_system.

    Build options

    # **** Build and bin paths ****
    +

    Experiment configuration

    Introduction

    There are several levels on configuration available in HARMONIE. The highest level of configuration is done in ecf/config_exp.h. It includes the environment variables, which are used to control the experimentation. In the following we describe the meaning of the different variables and are described in the order they appear in ecf/config_exp.h.

    Host specific paths and environment variables for your system are defined in Env_system.

    Build options

    # **** Build and bin paths ****
     # Definitions about Build, should fit with hm_rev
     BUILD=${BUILD-yes}              # Turn on or off the compilation and binary build (yes|no)
     BUILD_WITH=${BUILD_WITH-makeup} # Which build system to use (makeup|cmake)

    BUILD is a switch for compiling HARMONIE code (yes|no) and BUILD_WITH controls which build system to use when compiling HARMONIE-AROME.

    BINDIR=${BINDIR-$HM_DATA/bin}                 # Binary directory

    BINDIR is the location of where your HARMONIE binaries will be installed. You can use this to point to binaries outside of your experiment. A few other options for non default configurations exists as well:

    COMPILE_ENKF=${COMPILE_ENKF-"no"}             # Compile LETKF code (yes|no)
    @@ -316,4 +316,4 @@
     MAIL_TESTBED=                           # testbed results summary
    • MAIL_ON_ABORT e-mail address to send a mail to if a task fails in ecFlow
    • MAIL_TESTBED e-mail address to send a mail to with a summary of the testbed results

    Testbed

    export TESTBED_LIST="AROME AROME_1D AROME_3DVAR \
                          AROME_BD_ARO AROME_BD_ARO_IO_SERV \
                          HarmonEPS HarmonEPS_IFSENS \
    -                     AROME_CLIMSIM"
    • TESTBED_LIST contains the configurations that will be run in the testbed
    + AROME_CLIMSIM"
    • TESTBED_LIST contains the configurations that will be run in the testbed
    diff --git a/previews/PR1129/ExperimentConfiguration/How_to_use_hires_topography/index.html b/previews/PR1129/ExperimentConfiguration/How_to_use_hires_topography/index.html index f0cdb4457..4a3de2384 100644 --- a/previews/PR1129/ExperimentConfiguration/How_to_use_hires_topography/index.html +++ b/previews/PR1129/ExperimentConfiguration/How_to_use_hires_topography/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    How to Introduce New High-Resolution Topography into Harmonie

    Introduction

    This page describes how to set up and use an ultra-high resolution topographic data set for your Harmonie domain, instead of the current standard GTOPO30 data set.

    The data replacing GTOPO30 is likely to be much denser (by a factor of 100 or more), so it probably doesn’t make much sense for each centre to store a complete quasi-global set. It is much more practical for each centre to generate and store a local sub-set of the high-resolution topography to encompass just their own computational domains. First the principal process is described and in [#DoitallinsideHARMONIE here] the streamlined implementation, with the coarser GMTED2010 data, in the system is summarized.

    Background

    The standard topographic data set currently used by Harmonie is the global GTOP030. This is a “Digital Elevation Model” (DEM) with a horizontal resolution of 30 arc seconds (approx. 1km). As Harmonie model configurations start to use grid-sizes of 1km or smaller, the computational grid can have finer resolution than the topographic grid, and so topography becomes a new limiting factor in the full model resolution.

    It is possible now to overcome this limitation of the relatively coarse GTOPO30 topography by replacing it with a much finer-scale DEM. One such DEM representation of the earth’s surface has been available since Oct. 2011. This is version 2 of the DEM derived from the Aster instrument on board the Terra satellite, as part of the collaboration between Japan’s Ministry of Economy, Trade and Industry (METI), and NASA in the U.S. In the Aster dataset, surface elevations are reported at a horizontal resolution of approx. 30m. Thus the Aster data is about 900 times denser than GTOPO30, i.e., has about 30 times higher resolution in each horizontal dimension. The average error in the vertical elevation estimates is approx. 6-10m (I think of this as the typical height of a tree or a house – the kind of things that can confuse the satellite radiometer into reporting a false surface elevation).

    More information about the Aster DEM is available here .

    Even for “relatively” coarse Harmonie grids (perhaps even 2.5km meshes), the “slope”, “roughness”, “silhouette” and other physical attributes of the topography used by Harmonie can be provided much more accurately by Aster data than by GTOPO30.

    Obtaining ASTER high-resolution DEM data

    The data are publicly and freely available here. Since the resolution is so fine, the complete dataset is quite voluminous: the compressed file for each $1^o x 1^o$ (longitude-latitude) tile or “granule” at $50^o$ N is about 15MB if totally land-covered. To obtain the data you want:

    1. Draw a bounding rectangle around your domain of interest (even all of Europe!) at that “reverb” web-site;
    2. Select“ASTER Global Digital Elevation Model V0002” from the list of data sets further down the same web-page;
    3. Click the “Search for Granules” box at the bottom of the page.

    From here you will be brought through a standard registration process familiar to anyone who has ever bought a train ticket online – the main difference being that the Aster data are free. Once registration is complete, you should receive an email after 1-2 days telling you that your data is ready, and how you can ftp it to your own computer.

    I obtained 97 “granules” of such data (i.e., DEM files for an area approximately 1^o^ square), covering the islands of Ireland and the UK ($14^o$ longitude x $11^o$ latitude). The 57 “missing” granules are simply ocean regions that encompass no land at all. No granules are provided for any 1-degree tile that is completely over the open ocean, since sea-level elevation is assumed to be zero. Each granule is a separate zip file, varying in size up to about 15MB, depending on the fraction of land area in each 1-degree tile and the complexity of the topography itself. Granules are identified by the latitude and longitude of the southwest (lower-left) corner (or more precisely, of the geometric centre of the southwest corner pixel), which is given in the file-name.

    Processing the Raw Aster Data

    Each “granule” of data is a zip file that unzips to 2 “tiff” files (ASTGM2_*_dem.tif, containing the actual data, and ASTGM2_*_num.tif, containing “quality assessment”), along with a generic README.pdf.

    The Quality Assessment files (*num.tif) contains the number of “stereo scene pairs” used to determine elevation at each pixel (if positive) or the source of non-ASTER elevation data used to replace bad ASTER DEM (if negative). Values less than 5 are associated with relatively larger errors in the elevation measurement. Larger values are associated with more accurate final estimates of surface elevation.

    This information can help to identify those regions where the Aster values may need to be merged or replaced with elevation data from some other source.

    For the purposes of formatting the DEM data for Harmonie, the first step is to extract the (longitude, latitude, elevation) triplet for each “pixel” (or “grid-point”) from the ASTGM2_*_dem.tif files.

    This can be done using software such as the Geospatial Data Abstraction Library (GDAL) open-source tools, available here. Once installed, these can be used to read, merge, and otherwise manipulate geoTIFF files.

    The command I used to generate a simple text file containing longitude, latitude and elevation from an ASTGM2_*_dem.tif file is, e.g.,

    gdal_translate –of XYZ ASTGM2_N59W006_dem.tif ASTGM2_N59W006_xyz.txt

    This fills the ascii output file ASTGTM2_N59W006_xyz.txt with rows of (longitude, latitude, elevation) values, starting from the northwest (top-left) corner (-6, 60), and proceeding eastwards to (-5,60), then moving south to the next row, ending up at the southeast (bottom-right) corner (-5,59). Each output text file contains 3601 x 3601 data-points, i.e., 12,967,201 rows, and uses over 550 MB of storage. Note that the various granules overlap each other at the boundaries (the line of boundary data is included in each bounding file). Elevation is in units of metres.

    This is relatively straightforward and only needs to be done once for each file, even if there may be more efficient ways to do it.

    The command above can easily be scripted to process the full collection of *dem.tif files:

    for i in $( ls ASTGTM2_*_dem.tif ); do
    +

    How to Introduce New High-Resolution Topography into Harmonie

    Introduction

    This page describes how to set up and use an ultra-high resolution topographic data set for your Harmonie domain, instead of the current standard GTOPO30 data set.

    The data replacing GTOPO30 is likely to be much denser (by a factor of 100 or more), so it probably doesn’t make much sense for each centre to store a complete quasi-global set. It is much more practical for each centre to generate and store a local sub-set of the high-resolution topography to encompass just their own computational domains. First the principal process is described and in [#DoitallinsideHARMONIE here] the streamlined implementation, with the coarser GMTED2010 data, in the system is summarized.

    Background

    The standard topographic data set currently used by Harmonie is the global GTOP030. This is a “Digital Elevation Model” (DEM) with a horizontal resolution of 30 arc seconds (approx. 1km). As Harmonie model configurations start to use grid-sizes of 1km or smaller, the computational grid can have finer resolution than the topographic grid, and so topography becomes a new limiting factor in the full model resolution.

    It is possible now to overcome this limitation of the relatively coarse GTOPO30 topography by replacing it with a much finer-scale DEM. One such DEM representation of the earth’s surface has been available since Oct. 2011. This is version 2 of the DEM derived from the Aster instrument on board the Terra satellite, as part of the collaboration between Japan’s Ministry of Economy, Trade and Industry (METI), and NASA in the U.S. In the Aster dataset, surface elevations are reported at a horizontal resolution of approx. 30m. Thus the Aster data is about 900 times denser than GTOPO30, i.e., has about 30 times higher resolution in each horizontal dimension. The average error in the vertical elevation estimates is approx. 6-10m (I think of this as the typical height of a tree or a house – the kind of things that can confuse the satellite radiometer into reporting a false surface elevation).

    More information about the Aster DEM is available here .

    Even for “relatively” coarse Harmonie grids (perhaps even 2.5km meshes), the “slope”, “roughness”, “silhouette” and other physical attributes of the topography used by Harmonie can be provided much more accurately by Aster data than by GTOPO30.

    Obtaining ASTER high-resolution DEM data

    The data are publicly and freely available here. Since the resolution is so fine, the complete dataset is quite voluminous: the compressed file for each $1^o x 1^o$ (longitude-latitude) tile or “granule” at $50^o$ N is about 15MB if totally land-covered. To obtain the data you want:

    1. Draw a bounding rectangle around your domain of interest (even all of Europe!) at that “reverb” web-site;
    2. Select“ASTER Global Digital Elevation Model V0002” from the list of data sets further down the same web-page;
    3. Click the “Search for Granules” box at the bottom of the page.

    From here you will be brought through a standard registration process familiar to anyone who has ever bought a train ticket online – the main difference being that the Aster data are free. Once registration is complete, you should receive an email after 1-2 days telling you that your data is ready, and how you can ftp it to your own computer.

    I obtained 97 “granules” of such data (i.e., DEM files for an area approximately 1^o^ square), covering the islands of Ireland and the UK ($14^o$ longitude x $11^o$ latitude). The 57 “missing” granules are simply ocean regions that encompass no land at all. No granules are provided for any 1-degree tile that is completely over the open ocean, since sea-level elevation is assumed to be zero. Each granule is a separate zip file, varying in size up to about 15MB, depending on the fraction of land area in each 1-degree tile and the complexity of the topography itself. Granules are identified by the latitude and longitude of the southwest (lower-left) corner (or more precisely, of the geometric centre of the southwest corner pixel), which is given in the file-name.

    Processing the Raw Aster Data

    Each “granule” of data is a zip file that unzips to 2 “tiff” files (ASTGM2_*_dem.tif, containing the actual data, and ASTGM2_*_num.tif, containing “quality assessment”), along with a generic README.pdf.

    The Quality Assessment files (*num.tif) contains the number of “stereo scene pairs” used to determine elevation at each pixel (if positive) or the source of non-ASTER elevation data used to replace bad ASTER DEM (if negative). Values less than 5 are associated with relatively larger errors in the elevation measurement. Larger values are associated with more accurate final estimates of surface elevation.

    This information can help to identify those regions where the Aster values may need to be merged or replaced with elevation data from some other source.

    For the purposes of formatting the DEM data for Harmonie, the first step is to extract the (longitude, latitude, elevation) triplet for each “pixel” (or “grid-point”) from the ASTGM2_*_dem.tif files.

    This can be done using software such as the Geospatial Data Abstraction Library (GDAL) open-source tools, available here. Once installed, these can be used to read, merge, and otherwise manipulate geoTIFF files.

    The command I used to generate a simple text file containing longitude, latitude and elevation from an ASTGM2_*_dem.tif file is, e.g.,

    gdal_translate –of XYZ ASTGM2_N59W006_dem.tif ASTGM2_N59W006_xyz.txt

    This fills the ascii output file ASTGTM2_N59W006_xyz.txt with rows of (longitude, latitude, elevation) values, starting from the northwest (top-left) corner (-6, 60), and proceeding eastwards to (-5,60), then moving south to the next row, ending up at the southeast (bottom-right) corner (-5,59). Each output text file contains 3601 x 3601 data-points, i.e., 12,967,201 rows, and uses over 550 MB of storage. Note that the various granules overlap each other at the boundaries (the line of boundary data is included in each bounding file). Elevation is in units of metres.

    This is relatively straightforward and only needs to be done once for each file, even if there may be more efficient ways to do it.

    The command above can easily be scripted to process the full collection of *dem.tif files:

    for i in $( ls ASTGTM2_*_dem.tif ); do
       gdal_translate -of XYZ $i $i.xyz
     done

    To obtain some information about any particular tif file, use the gdalinfo command.

    Next, the data from each separate $1^o x 1^o$ text file were combined into a single flat unformatted little-endian binary file consisting of surface elevation every 30m or so encompassing the entire Harmonie domain. For an Ireland/UK domain, bounded between latitudes 49 and 60 deg. N, and between longitudes 11 deg. W and 3 deg. E, this contains approx. 40,000 (latitude) x 50,000 (longitude) data points – or about 2 billion points altogether. (2 billion points stored in 2-byte integer format use about 4GB of storage). A domain like this can easily be extended to the west and north, where there is no land and where sea-level elevation is simply zero. In order to extend it to the south or east, however, where there is land, extra Aster granules would be required. See the attached standalone program (or [wiki:hirestopoggather_tiles.f]) for the details of what I did (crude but effective; nothing fancy and probably not the best way, but simple and it works). The file is written with the first element at the northwest corner, progressing eastwards, then south, with the last element at the southeast corner.

    Errors and Data Gaps

    I did not notice any gaps or "bad data" in the Aster topography over Ireland and the UK, but there do seem to be some gaps, negative pixels or positive "spikes" elsewhere, esp. over Scandanavia and other high latitudes. The "quality assessment" numbers in the ASTGM2_*_num.tiffiles mentioned above can help to identify bad or dubious elevation values. Nicolas Bauer (FMI) has a procedure for detecting and correcting these.

    Formatting topographic data for use by Harmonie

    The main topographic data files used by Harmonie are in $HM_CLDATA/PGD, and are called gtopo30.hdr and gtopo30.dir . In principle, all that is required now is to replace these 2 files (containing GTOPO30 data) with equivalent files containing Aster data. There is no need to change the file names or to edit any Harmonie source code – just create new files with the old names, and containing Aster instead of GTOPO30 data.

    The gtopo30.hdr file is the “header” file, containing meta-data about the main data file (gtopo30.dir). The header file contains all the information needed by the MASTERODB executable to read in the real data from the gtopo30.dir file, and to use it appropriately within the rest of Harmonie. The original gtopo30.hdr file contains:

    GTOPO30 orography model, rewritten by V. Masson, CNRM, Meteo-France, 16/07/98
     nodata: -9999
    @@ -187,4 +187,4 @@
      AOSIP           > 001:013-00600-105@20051219_00:00+0000 000 A/S i+     0.000E+000    8.055E-003  548.990E-003   14.087E-003
      AOSJP           > 001:014-00600-105@20051219_00:00+0000 000 A/S j+     0.000E+000    8.297E-003  461.020E-003   14.306E-003
      AOSIM           > 001:015-00600-105@20051219_00:00+0000 000 A/S i-     0.000E+000    8.280E-003  521.020E-003   14.863E-003
    - AOSJM           > 001:016-00600-105@20051219_00:00+0000 000 A/S i-     0.000E+000    8.454E-003  471.930E-003   15.079E-003

    Presently, derivations are done automatically, so there is nothing to worry for the user from the point of view of technical implementation. However, eventually there are needs for further development and improvements when the high-resolution source data on topography will be used.

    Conclusion

    In order to replace the (relatively) coarse-resolution GTOPO30 topography with higher-resolution data (e.g., from Aster), it is enough to generate replacements for the gtopo30.hdr and gtopo30.dir files in the $HM_CLDATA/PGD directory, as described in the upper part of this page.

    + AOSJM > 001:016-00600-105@20051219_00:00+0000 000 A/S i- 0.000E+000 8.454E-003 471.930E-003 15.079E-003

    Presently, derivations are done automatically, so there is nothing to worry for the user from the point of view of technical implementation. However, eventually there are needs for further development and improvements when the high-resolution source data on topography will be used.

    Conclusion

    In order to replace the (relatively) coarse-resolution GTOPO30 topography with higher-resolution data (e.g., from Aster), it is enough to generate replacements for the gtopo30.hdr and gtopo30.dir files in the $HM_CLDATA/PGD directory, as described in the upper part of this page.

    diff --git a/previews/PR1129/ExperimentConfiguration/ModelDomain/index.html b/previews/PR1129/ExperimentConfiguration/ModelDomain/index.html index 2033cfc5f..72b916c74 100644 --- a/previews/PR1129/ExperimentConfiguration/ModelDomain/index.html +++ b/previews/PR1129/ExperimentConfiguration/ModelDomain/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Model Domain

    Introduction

    There are four projections available in HARMONIE, polar stereographic, lambert, mercator and rotated mercator. The model itself chooses the best (least distortion) projection among the first three given your domain specifications. The rotated mercator projection is selected through the variable LROTMER. Note that the polar stereographic project is defined at $90^o$ N(S) whereas in GRIB1 it is defined at $60^o$ N(S).

    projections

    Polar stereographic, Lambert and Mercator projection.

    Rotated Mercator

    Rotated mercator projection

    Model domain settings

    For each domain we set variables related to the geometry and the resolution like:

    HARMONIE model domains are defined in settings in scr/Harmonie_domains.pm. The following variables related to the geometry and the resolution are required:

    • TSTEP is model timestep in seconds
    • NLON is number of points in $x$-direction.
    • NLAT is number of points in $y$-direction.
    • LONC is the longitude of domain centre in degrees.
    • LATC is the latitude of domain center in degrees.
    • LON0 is the reference longitude of the projection in degrees.
    • LAT0 is the reference latitude of the projection in degrees. If LAT0 is set to 90, the projection is polar stereographic. If LAT0 < 90, the projection is lambert unless LMRT=.TRUE.
    • GSIZE is the distance between each grid point in meters in both $x$- and $y$-direction.
    • EZONE is number of points over extension zone in both $x$- and $y$-direction. Default value 11.
    • LMRT switch for rotated Mercator projection. If LMRT=.TRUE. LAT0 should be zero.

    NLON and NLAT should satisfy the equation $5^b * 3^d * 2^e$, where $a$-$e$ are integers $\geq 0$.

    The default area is the Denmark domain (DKCOEXP). The following values for C+I zone and truncation are calculated in src/Harmonie_domains.pm from the values above.

    • NDLUXG is number of points in x-direction without extension (E) zone.
    • NDGUXG is number of points in y-direction without extension (E) zone.
    • NMSMAX_LINE is truncation order in longitude. By default (NLON-2)/2.
    • NSMAX_LINE is truncation order in latitude. By default (NLAT-2)/2.
    • NMSMAX_QUAD is truncation order in longitude. By default (NLON-2)/3. It is used to create filtered orography with lower resolution.
    • NSMAX_QUAD is truncation order in latitude. By default (NLAT-2)/3. It is used to create filtered orography with lower resolution.

    cie domain

    Domain creation tool

    To help with the design of a new domain, there is an interactive tool that lets you experiment with the grid parameters described above, and visualize the resulting domain immediately on a map, see figure below.

    At present, it only works for Lambert and polar stereographic projection, not rotated mercator.

    Creating a new domain

    If you are happy with your new domain created with the help of the domain creation tool you can add it to scr/Harmonie_domains.pm for your experiment, my_exp (assuming you have set up the experiment):

    cd $HOME/hm_home/my_exp
    +

    Model Domain

    Introduction

    There are four projections available in HARMONIE, polar stereographic, lambert, mercator and rotated mercator. The model itself chooses the best (least distortion) projection among the first three given your domain specifications. The rotated mercator projection is selected through the variable LROTMER. Note that the polar stereographic project is defined at $90^o$ N(S) whereas in GRIB1 it is defined at $60^o$ N(S).

    projections

    Polar stereographic, Lambert and Mercator projection.

    Rotated Mercator

    Rotated mercator projection

    Model domain settings

    For each domain we set variables related to the geometry and the resolution like:

    HARMONIE model domains are defined in settings in scr/Harmonie_domains.pm. The following variables related to the geometry and the resolution are required:

    • TSTEP is model timestep in seconds
    • NLON is number of points in $x$-direction.
    • NLAT is number of points in $y$-direction.
    • LONC is the longitude of domain centre in degrees.
    • LATC is the latitude of domain center in degrees.
    • LON0 is the reference longitude of the projection in degrees.
    • LAT0 is the reference latitude of the projection in degrees. If LAT0 is set to 90, the projection is polar stereographic. If LAT0 < 90, the projection is lambert unless LMRT=.TRUE.
    • GSIZE is the distance between each grid point in meters in both $x$- and $y$-direction.
    • EZONE is number of points over extension zone in both $x$- and $y$-direction. Default value 11.
    • LMRT switch for rotated Mercator projection. If LMRT=.TRUE. LAT0 should be zero.

    NLON and NLAT should satisfy the equation $5^b * 3^d * 2^e$, where $a$-$e$ are integers $\geq 0$.

    The default area is the Denmark domain (DKCOEXP). The following values for C+I zone and truncation are calculated in src/Harmonie_domains.pm from the values above.

    • NDLUXG is number of points in x-direction without extension (E) zone.
    • NDGUXG is number of points in y-direction without extension (E) zone.
    • NMSMAX_LINE is truncation order in longitude. By default (NLON-2)/2.
    • NSMAX_LINE is truncation order in latitude. By default (NLAT-2)/2.
    • NMSMAX_QUAD is truncation order in longitude. By default (NLON-2)/3. It is used to create filtered orography with lower resolution.
    • NSMAX_QUAD is truncation order in latitude. By default (NLAT-2)/3. It is used to create filtered orography with lower resolution.

    cie domain

    Domain creation tool

    To help with the design of a new domain, there is an interactive tool that lets you experiment with the grid parameters described above, and visualize the resulting domain immediately on a map, see figure below.

    At present, it only works for Lambert and polar stereographic projection, not rotated mercator.

    Creating a new domain

    If you are happy with your new domain created with the help of the domain creation tool you can add it to scr/Harmonie_domains.pm for your experiment, my_exp (assuming you have set up the experiment):

    cd $HOME/hm_home/my_exp
     PATH_TO_HARMONIE/config-sh/Harmonie co scr/Harmonie_domains.pm
     #
     # add domain information for new domain called MYNEWDOM in this file
    @@ -23,4 +23,4 @@
     OUTGEO%PROJLAT = 60.0
     OUTGEO%PROJLAT2 = 60.0
     OUTGEO%PROJLON = 0.0,
    -/

    Running gl using this namelist by

    gl -n namelist_file

    will create an GRIB file with a constant orography which you can use for plotting.

    +/

    Running gl using this namelist by

    gl -n namelist_file

    will create an GRIB file with a constant orography which you can use for plotting.

    diff --git a/previews/PR1129/ExperimentConfiguration/Namelists/index.html b/previews/PR1129/ExperimentConfiguration/Namelists/index.html index 79f92c257..1522bd02b 100644 --- a/previews/PR1129/ExperimentConfiguration/Namelists/index.html +++ b/previews/PR1129/ExperimentConfiguration/Namelists/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Controlling the namelists in HARMONIE

    Introduction

    IFS is largely driven by namelists and has thousands of options. For each configuration a number of namelists controlling different parts are read. To make the maintenance of the namelists manageable and to assure consistency in terms on e.g. name conventions for fields, packing accuracy, physics settings and parallel options all namelists are generated as they are needed during the run. All the basic settings are defined in a perl dictionary nam/harmonie_namelists.pm for IFS and nam/surfex_namelists.pm and nam/surfex_selected_output.pm for SURFEX. In future versions the commented namelists will be included and maintained as part of the code. The IFS dictionary is structured in several sections:

    harmone_namelists.pm

    • Global settings

    • Technical settings

      • Host specific settings
      • MPP options
      • File settings
    • DYNAMICS SETTINGS

      • Main dynamics switches
      • Non-hydrostatic settings
      • Vertical finite element
      • DFI
    • Main physics options. NB! These may contain switches for dynamics as well

      • ALADIN
      • AROME
      • EDMFM switches, to be applied after AROME
      • Alaro
      • Old surface
      • SURFEX
      • DDH
    • E927 Interpolation settings

      • Main fullpos settings
      • E927
      • E927 nh
      • SURFEX initial file generation
      • Aladin e927
      • ALARO e927
      • Arome e927
    • General postprocessing switches

      • Default fullpos settings
      • NH postprocessing
      • Switches for postprocessing with surfex
      • Special cases for arome
    • Assimilation

      • Canari
        • Arome canari
      • Varbc_rad
      • Varbc_coldstart
      • Screening
        • Arome screening
        • Alaro screening
      • Minimization
        • Alaro minimization
      • 4DVAR
        • 4DVAR minimization
        • 4DVAR screening
        • 4DVAR trajectory
    • Climate generation

      • Climate generations (e923)
    • Misc

      • General namelist settings for Tangent-Linear and Adjoint tests
      • Extra Adjoint test options
      • Oulan
      • Bator

    The final namelists are build through the rules given in scr/Get_namelist and are generated by nam/gen_namelists.pl. Note that in several cases environment variables are still parsed in the scripts.

    surfex_namelists.pm

    All possible SURFEX namelist setting are documented at the SURFEX web site. Use the search text area in the upper right corner to search for a specific namelist or namelist option. Please, keep in mind that the SURFEX web site documents the latest SURFEX version, i.e. SURFEXv8, while in cy40h SURFEXv7.3 is used. Therefore, some of the settings may be different or not available.

    PGD

    PGD represents in general the preparation of physiography data. The default PGD settings are listed here. Some modifications can be done for specific model configurations and will be specified as e.g. alaro_pgd or arome_pgd.

    # 2 layer ISBA scheme
    +

    Controlling the namelists in HARMONIE

    Introduction

    IFS is largely driven by namelists and has thousands of options. For each configuration a number of namelists controlling different parts are read. To make the maintenance of the namelists manageable and to assure consistency in terms on e.g. name conventions for fields, packing accuracy, physics settings and parallel options all namelists are generated as they are needed during the run. All the basic settings are defined in a perl dictionary nam/harmonie_namelists.pm for IFS and nam/surfex_namelists.pm and nam/surfex_selected_output.pm for SURFEX. In future versions the commented namelists will be included and maintained as part of the code. The IFS dictionary is structured in several sections:

    harmone_namelists.pm

    • Global settings

    • Technical settings

      • Host specific settings
      • MPP options
      • File settings
    • DYNAMICS SETTINGS

      • Main dynamics switches
      • Non-hydrostatic settings
      • Vertical finite element
      • DFI
    • Main physics options. NB! These may contain switches for dynamics as well

      • ALADIN
      • AROME
      • EDMFM switches, to be applied after AROME
      • Alaro
      • Old surface
      • SURFEX
      • DDH
    • E927 Interpolation settings

      • Main fullpos settings
      • E927
      • E927 nh
      • SURFEX initial file generation
      • Aladin e927
      • ALARO e927
      • Arome e927
    • General postprocessing switches

      • Default fullpos settings
      • NH postprocessing
      • Switches for postprocessing with surfex
      • Special cases for arome
    • Assimilation

      • Canari
        • Arome canari
      • Varbc_rad
      • Varbc_coldstart
      • Screening
        • Arome screening
        • Alaro screening
      • Minimization
        • Alaro minimization
      • 4DVAR
        • 4DVAR minimization
        • 4DVAR screening
        • 4DVAR trajectory
    • Climate generation

      • Climate generations (e923)
    • Misc

      • General namelist settings for Tangent-Linear and Adjoint tests
      • Extra Adjoint test options
      • Oulan
      • Bator

    The final namelists are build through the rules given in scr/Get_namelist and are generated by nam/gen_namelists.pl. Note that in several cases environment variables are still parsed in the scripts.

    surfex_namelists.pm

    All possible SURFEX namelist setting are documented at the SURFEX web site. Use the search text area in the upper right corner to search for a specific namelist or namelist option. Please, keep in mind that the SURFEX web site documents the latest SURFEX version, i.e. SURFEXv8, while in cy40h SURFEXv7.3 is used. Therefore, some of the settings may be different or not available.

    PGD

    PGD represents in general the preparation of physiography data. The default PGD settings are listed here. Some modifications can be done for specific model configurations and will be specified as e.g. alaro_pgd or arome_pgd.

    # 2 layer ISBA scheme
     %isba_2L=(
      NAM_ISBA=>{
       CISBA          => '"2-L",',
    @@ -292,4 +292,4 @@
     #NAMELIST=$WRK/$WDIR/namelist_forecast
     #Get_namelist forecast $NAMELIST
     NAMELIST=$HM_LIB/nam/namelist_forecast_with_a_unique_name
    -

    For namelists not present in the dictionary you just copy them to you local nam directory.

    There is also a description on how to generate new namelist dictionaries here.

    +

    For namelists not present in the dictionary you just copy them to you local nam directory.

    There is also a description on how to generate new namelist dictionaries here.

    diff --git a/previews/PR1129/ExperimentConfiguration/PlatformConfiguration/index.html b/previews/PR1129/ExperimentConfiguration/PlatformConfiguration/index.html index 4f1f88084..360bb7b55 100644 --- a/previews/PR1129/ExperimentConfiguration/PlatformConfiguration/index.html +++ b/previews/PR1129/ExperimentConfiguration/PlatformConfiguration/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Platform Configuration

    Overview

    This wiki page outlines the configuration files required by HARMONIE for successful compilation and running of the system.

    Basic requirements

    All experiments require a valid host to "setup" an experiment using the Harmonie script. Recall from the quick start instructions that in order to setup a new experiment on your platform, called YOURHOST, using HARMONIE downloaded to PATH_TO_HARMONIE one must issue the following command:

    cd hm_home/my_exp
    +

    Platform Configuration

    Overview

    This wiki page outlines the configuration files required by HARMONIE for successful compilation and running of the system.

    Basic requirements

    All experiments require a valid host to "setup" an experiment using the Harmonie script. Recall from the quick start instructions that in order to setup a new experiment on your platform, called YOURHOST, using HARMONIE downloaded to PATH_TO_HARMONIE one must issue the following command:

    cd hm_home/my_exp
     PATH_TO_HARMONIE/config-sh/Harmonie setup -r PATH_TO_HARMONIE -h YOURHOST

    hm_home/my_exp contains:

    Env_submit -> config-sh/submit.YOURHOST           ## YOURHOST platform specific settings
     Env_system -> config-sh/config.YOURHOST           ## YOURHOST task submission settings
     ./config-sh/hm_rev                                ## contains PATH_TO_HARMONIE
    @@ -12,4 +12,4 @@
     ./config-sh/config.YOURHOST                       ## YOURHOST task submission settings
     ./suites/harmonie.pm                              ## perl module to define ensemble settings
     ./ecf/config_exp.h                                ## your experiment definition (scientific type options)
    -./scr/include.ass                                 ## assimilation specific settings

    But, what if your host configuration is not available in the HARMONIE system? Host specific configuration files in PATH_TO_HARMONIE/config-sh must be available for your host and configuration files for the compilation of the code must be available. This documentation attempts to describe what is required.

    Host config files

    Env_system -> config-sh/config.YOURHOST

    The config.YOURHOST file defines host specific variables such as some input directory locations. If your YOURHOST is not already included in HARMONIE it may be work looking at config.* files in config-sh/ to see what other people have done. The table below outlines variables set in config-sh/config-sh.YOURHOST and what the variables do:

    Variable nameDescription
    COMPCENTREcontrols special ECMWF solutions (such as MARS) where required. Set to LOCAL if you are unsure
    HARMONIE_CONFIGdefines the config file used by Makeup compilation
    MAKEUP_BUILD_DIRlocation of where Makeup compiles the HARMONIE code
    MAKE_OWN_PRECOMPILEDyes=>install pre-compiled code in $PRECOMPILED
    PRECOMPILEDlocation of (optional) pre-compiled HARMONIE code
    E923_DATA_PATHlocation of input data for E923, climate generation
    PGD_DATA_PATHlocation of input data for PGD, surfex climate generation
    ECOSG_DATA_PATHlocation of input data for ECOCLIMAP2G
    GMTED2010_DATA_PATHlocation of HRES DEM
    SOILGRID_DATA_PATHlocation of SOILGRID data
    HM_SAT_CONSTlocation of constants for satellite assimilation
    RTTOV_COEFDIRlocation of RTTOV coefficients
    HM_DATAlocation of top working directory for the experiment
    HM_LIBlocation of src/scripts and compiled code
    TASK_LIMITMaximum number of jobs submitted by ECFLOW
    RSYNC_EXCLUDEused to exclude .git* sub-directories from copy of source code for compilation
    DR_HOOK_IGNORE_SIGNALSenvironment variable used by Dr Hook to ignore certain "signals"
    HOST0define primary host name
    HOSTNdefine other host name(s)
    HOST_INSTALL0=> install on HOST0, 0:...:N => install on HOST0,...,HOSTN
    MAKEmake command may need to be explicity defined. Set to make for most platforms
    MKDIRmkdir command (default: mkdir -p)
    JOBOUTDIRwhere ECFLOW writes its log files
    ECCODES_DEFINITION_PATHlocation of local ecCodes definition files
    BUFR_TABLESlocation of local BUFR tables

    Env_submit -> config-sh/submit.YOURHOST

    The Env_submit file uses perl to tell the HARMONIE scheduler how to execute programs - which programs should be run on multiple processors and define batch submissions if required.

    perldescription
    %backg_jobdefines variables for jobs run in the background on HOST0
    %scalar_jobdefines variables for single processor batch jobs
    %par_jobdefines variables for multi-processor batch jobs
    @backg_listlist of tasks to be submitted as a background job
    @scalar_listlist of tasks to be submitted as a scalar job
    @par_listlist of tasks to be submitted as parallel job
    default"wildcard" task name to defined default type of job for unlisted tasks

    Host summary

    YOURHOSTHost typebatchContactNotes
    AEMET.cirrus.gnu
    AEMET.nimbus.ifort.mpi
    biSMHI
    centos8
    cirrus
    debian11
    ecgbswitched off
    ecgb-ccaECMWF HPC with MPI dual hostslurm/PBSswitched off
    ECMWF.atosECMWF Atos HPC with MPIslurm
    fedora33
    fedora34
    KNMI.bullx_b720KNMI Atos HPC with MPIslurmBert van Ulft
    LinuxPCGeneral Linux PC no MPInone
    LinuxPC-MPIGeneral Linux PC with MPInone
    LinuxPC-MPI-KNMIKNMI Linux workstation (Fedora)none
    LinuxPC-MPI-ubuntuUbuntu Linux PC with MPInone
    LinuxPC-serial
    METIE.LinuxPCMETIE CentOS 6 PC with MPInoneEoin Whelan
    METIE.LinuxPC8
    METIE.LinuxRH7gnuMETIE Redhat 7 server with MPInoneEoin Whelan
    METIE.LinuxRH7gnu-dev
    METIE.reaserve8
    METIE.reaserve8musc
    nebula
    nebula-gnu
    opensuse
    SMHI.Linda4SMHI
    SMHI.Linda5SMHI
    stratus
    teho
    ubuntu18
    ubuntu20
    ubuntu20_nompi
    voima

    Compilation config files

    Makeup

    config files required for compilation of code using Makeup ...

    More information on Makeup is available here: Build with Makeup

    Obsmon

    For config files required for compilation of obsmon check util/obsmon/config

    +./scr/include.ass ## assimilation specific settings

    But, what if your host configuration is not available in the HARMONIE system? Host specific configuration files in PATH_TO_HARMONIE/config-sh must be available for your host and configuration files for the compilation of the code must be available. This documentation attempts to describe what is required.

    Host config files

    Env_system -> config-sh/config.YOURHOST

    The config.YOURHOST file defines host specific variables such as some input directory locations. If your YOURHOST is not already included in HARMONIE it may be work looking at config.* files in config-sh/ to see what other people have done. The table below outlines variables set in config-sh/config-sh.YOURHOST and what the variables do:

    Variable nameDescription
    COMPCENTREcontrols special ECMWF solutions (such as MARS) where required. Set to LOCAL if you are unsure
    HARMONIE_CONFIGdefines the config file used by Makeup compilation
    MAKEUP_BUILD_DIRlocation of where Makeup compiles the HARMONIE code
    MAKE_OWN_PRECOMPILEDyes=>install pre-compiled code in $PRECOMPILED
    PRECOMPILEDlocation of (optional) pre-compiled HARMONIE code
    E923_DATA_PATHlocation of input data for E923, climate generation
    PGD_DATA_PATHlocation of input data for PGD, surfex climate generation
    ECOSG_DATA_PATHlocation of input data for ECOCLIMAP2G
    GMTED2010_DATA_PATHlocation of HRES DEM
    SOILGRID_DATA_PATHlocation of SOILGRID data
    HM_SAT_CONSTlocation of constants for satellite assimilation
    RTTOV_COEFDIRlocation of RTTOV coefficients
    HM_DATAlocation of top working directory for the experiment
    HM_LIBlocation of src/scripts and compiled code
    TASK_LIMITMaximum number of jobs submitted by ECFLOW
    RSYNC_EXCLUDEused to exclude .git* sub-directories from copy of source code for compilation
    DR_HOOK_IGNORE_SIGNALSenvironment variable used by Dr Hook to ignore certain "signals"
    HOST0define primary host name
    HOSTNdefine other host name(s)
    HOST_INSTALL0=> install on HOST0, 0:...:N => install on HOST0,...,HOSTN
    MAKEmake command may need to be explicity defined. Set to make for most platforms
    MKDIRmkdir command (default: mkdir -p)
    JOBOUTDIRwhere ECFLOW writes its log files
    ECCODES_DEFINITION_PATHlocation of local ecCodes definition files
    BUFR_TABLESlocation of local BUFR tables

    Env_submit -> config-sh/submit.YOURHOST

    The Env_submit file uses perl to tell the HARMONIE scheduler how to execute programs - which programs should be run on multiple processors and define batch submissions if required.

    perldescription
    %backg_jobdefines variables for jobs run in the background on HOST0
    %scalar_jobdefines variables for single processor batch jobs
    %par_jobdefines variables for multi-processor batch jobs
    @backg_listlist of tasks to be submitted as a background job
    @scalar_listlist of tasks to be submitted as a scalar job
    @par_listlist of tasks to be submitted as parallel job
    default"wildcard" task name to defined default type of job for unlisted tasks

    Host summary

    YOURHOSTHost typebatchContactNotes
    AEMET.cirrus.gnu
    AEMET.nimbus.ifort.mpi
    biSMHI
    centos8
    cirrus
    debian11
    ecgbswitched off
    ecgb-ccaECMWF HPC with MPI dual hostslurm/PBSswitched off
    ECMWF.atosECMWF Atos HPC with MPIslurm
    fedora33
    fedora34
    KNMI.bullx_b720KNMI Atos HPC with MPIslurmBert van Ulft
    LinuxPCGeneral Linux PC no MPInone
    LinuxPC-MPIGeneral Linux PC with MPInone
    LinuxPC-MPI-KNMIKNMI Linux workstation (Fedora)none
    LinuxPC-MPI-ubuntuUbuntu Linux PC with MPInone
    LinuxPC-serial
    METIE.LinuxPCMETIE CentOS 6 PC with MPInoneEoin Whelan
    METIE.LinuxPC8
    METIE.LinuxRH7gnuMETIE Redhat 7 server with MPInoneEoin Whelan
    METIE.LinuxRH7gnu-dev
    METIE.reaserve8
    METIE.reaserve8musc
    nebula
    nebula-gnu
    opensuse
    SMHI.Linda4SMHI
    SMHI.Linda5SMHI
    stratus
    teho
    ubuntu18
    ubuntu20
    ubuntu20_nompi
    voima

    Compilation config files

    Makeup

    config files required for compilation of code using Makeup ...

    More information on Makeup is available here: Build with Makeup

    Obsmon

    For config files required for compilation of obsmon check util/obsmon/config

    diff --git a/previews/PR1129/ExperimentConfiguration/UpdateNamelists/index.html b/previews/PR1129/ExperimentConfiguration/UpdateNamelists/index.html index b701f8204..46c178990 100644 --- a/previews/PR1129/ExperimentConfiguration/UpdateNamelists/index.html +++ b/previews/PR1129/ExperimentConfiguration/UpdateNamelists/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/previews/PR1129/ExperimentConfiguration/UseofObservation/index.html b/previews/PR1129/ExperimentConfiguration/UseofObservation/index.html index 5e202aca5..780878704 100644 --- a/previews/PR1129/ExperimentConfiguration/UseofObservation/index.html +++ b/previews/PR1129/ExperimentConfiguration/UseofObservation/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Use of Observation

    Background Information

    Observation type

    The observation types used by Harmonie (upper-air) data assimilation are defined in scr/include.ass.

    SYNOP

    By default all SYNOP observation types (including SHIP) are used.

    export SYNOP_OBS=1             # All synop

    To blacklist SYNOP observations add blacklisted "ODB observation type/ASCII type/ODB code type/ODB variable number/station identifier/date to blacklist from" to nam/LISTE_NOIRE_DIAP. For example to blacklist 10m winds from Valentia Automatic SYNOP (03953) from the 10th of November 2012 enter the following line to LISTE_NOIRE_DIAP:

     1 SYNOP       14  41 03953    10112012

    (Note: please don't add Valentia to your blacklist - the observations from there are pretty good!)

    For further information on ODB observation types, code types, variable numbers etc see the ECMWF ODB governance page here: http://apps.ecmwf.int/odbgov/obstype/

    SHIP

    See information provided above on SYNOP observations.

    BUOY

    By default all BUOY observation types are used.

    export BUOY_OBS=1              # Buoy

    To blacklist BUOY observations add blacklisted "ODB observation type/ASCII type/ODB code type/ODB variable number/station identifier/date to blacklist from" to nam/LISTE_NOIRE_DIAP. For example to blacklist surface temperatures from BUOY M5 (62094) from the 10th of November 2012 enter the following line to LISTE_NOIRE_DIAP:

     4 BUOY        165  11 62094    10112012

    (Note: please don't add M4 to your blacklist - the observations from there are pretty good too!)

    For further information on ODB observation types, code types, variable numbers etc see the ECMWF ODB governance page here: http://apps.ecmwf.int/odbgov/obstype/

    AIRCRAFT

    By default all AIRCRAFT observation types (including AMDAR, AIREP, ACARS) are used.

    export AIRCRAFT_OBS=1          # AMDAR, AIREP, ACARS

    Below are lines added by Xiaohua to the DMI dka37 LISTE_NOIRE_DIAP file to exclude problematic aircraft observations:

    2 AMDAR 144 2 EU0028 08292013
    +

    Use of Observation

    Background Information

    Observation type

    The observation types used by Harmonie (upper-air) data assimilation are defined in scr/include.ass.

    SYNOP

    By default all SYNOP observation types (including SHIP) are used.

    export SYNOP_OBS=1             # All synop

    To blacklist SYNOP observations add blacklisted "ODB observation type/ASCII type/ODB code type/ODB variable number/station identifier/date to blacklist from" to nam/LISTE_NOIRE_DIAP. For example to blacklist 10m winds from Valentia Automatic SYNOP (03953) from the 10th of November 2012 enter the following line to LISTE_NOIRE_DIAP:

     1 SYNOP       14  41 03953    10112012

    (Note: please don't add Valentia to your blacklist - the observations from there are pretty good!)

    For further information on ODB observation types, code types, variable numbers etc see the ECMWF ODB governance page here: http://apps.ecmwf.int/odbgov/obstype/

    SHIP

    See information provided above on SYNOP observations.

    BUOY

    By default all BUOY observation types are used.

    export BUOY_OBS=1              # Buoy

    To blacklist BUOY observations add blacklisted "ODB observation type/ASCII type/ODB code type/ODB variable number/station identifier/date to blacklist from" to nam/LISTE_NOIRE_DIAP. For example to blacklist surface temperatures from BUOY M5 (62094) from the 10th of November 2012 enter the following line to LISTE_NOIRE_DIAP:

     4 BUOY        165  11 62094    10112012

    (Note: please don't add M4 to your blacklist - the observations from there are pretty good too!)

    For further information on ODB observation types, code types, variable numbers etc see the ECMWF ODB governance page here: http://apps.ecmwf.int/odbgov/obstype/

    AIRCRAFT

    By default all AIRCRAFT observation types (including AMDAR, AIREP, ACARS) are used.

    export AIRCRAFT_OBS=1          # AMDAR, AIREP, ACARS

    Below are lines added by Xiaohua to the DMI dka37 LISTE_NOIRE_DIAP file to exclude problematic aircraft observations:

    2 AMDAR 144 2 EU0028 08292013
     2 AMDAR 144 2 EU0092 01042013
     2 AMDAR 144 2 EU0079 01052013
     2 AMDAR 144 2 EU0097 01052013
    @@ -19,4 +19,4 @@
     export PAOB_OBS=0              # PAOB not defined everywhere
     export SCATT_OBS=0             # Scatterometer data not defined everywhere
     export LIMB_OBS=0              # LIMB observations, GPS Radio Occultations
    -export RADAR_OBS=0             # Radar 
    +export RADAR_OBS=0 # Radar
    diff --git a/previews/PR1129/ExperimentConfiguration/VerticalGrid/index.html b/previews/PR1129/ExperimentConfiguration/VerticalGrid/index.html index d8b915d7f..875dbe618 100644 --- a/previews/PR1129/ExperimentConfiguration/VerticalGrid/index.html +++ b/previews/PR1129/ExperimentConfiguration/VerticalGrid/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    HARMONIE Vertical Model Level Definitions

    HARMONIE vertical coordinate

    HARMONIE model, similar to that of HIRLAM, is constructed for a general pressure based and terrain following vertical coordinate $\eta(p,p_s)$, where

    \[\eta(0,P_s) = 0\]

    and

    \[\eta(p_s,p_s) = 1\]

    The formulation corresponds to the ECMWF hybrid system. The model is formulated for a spherical coordinate system ($\lambda$, $\theta$), but in the code two metric coefficients $(h_x,h_y)$ have been introduced. This is done to prepare the model for any orthogonal coordinate system or map projection with axes (x,y).

    To represent the vertical variation of the dependent variables (U, V, T and Q), the atmosphere is divided into "nlev" layers. These layers are defined by the pressures at the interfaces between them (the `half-levels'). From the general expression

    \[p_{k+1/2} = A_{k+1/2} (n) + B_{k+1/2}(n) * p_s(x,y)\]

    for $k=0,1,...,nlev$

    the vertical surfaces for half-levels are defined. Pure pressure surfaces are obtained for $B=0$ and pure $\sigma$ surfaces for $A=0$. `full-level' pressure associated with each model level (middle of two half layers) is then determined accordingly.

    Definition of model levels in HARMONIE

    The script src/Vertical_levels.pl contains definition of vertical levels that have been used in the HIRLAM community for research and/or operational purposes. Currently the default model setup defines 65-level structure as derived by Per Unden, SMHI. Model level definitions for commonly used vertical structures in HARMONIE are listed below.

    • FourtyLevel: HIRLAM_40 model levels (same as Hirlam 6.2.1, Nov 2003 - HIRLAM 7.0, 2006 )
    • SixtyLevel: HIRLAM-60 model levels (same as Hirlam 7.1, March 2007 - 2012 )
    • [wiki:MFSixtyLevel MF_60]: MF-60 model levels (same as Meteo France AROME since 2010 )
    • SixtyfiveLevel: 65 model levels (same as Hirlam 7.4, March 2012 - )
    • other levels: Prague87, MF70, 40 (ALADIN-40), ECMWF_60.

    Note that VLEV is the name of the set of A/B coefficients defining your levels set in ecf/config_exp.h. There are e.g. more than one definition for 60 levels. To print the levels just run scr/Vertical_levels.pl

    Usage: scr/Vertical_levels.pl [VLEV PRINT_OPTION] where:

    • VLEV: name of your level definition
    • PRINT_OPTION=AHALF: print A coefficients for VLEV
    • PRINT_OPTION=BHALF: print B coefficients for VLEV
    • PRINT_OPTION=NLEV: print number of levels for VLEV
    • PRINT_OPTION=NRFP3S: print NRFP3S namelist values for VLEV

    See here for ECMWF level definitions.

    When performing HARMONIE experiment, users can select vertical levels by changing VLEV in ecf/config_exp.h. If a non-standard level number is to be chosen, the script scr/Vertical_levels.pl needs to be edited to add layer definition.

    Define new eta levels

    A brief description and some code on how to create new eta levels can be found in here.

    There is also an interactive tool that can help you in creating a new set of levels.

    The method is based on a program by Pierre Bénard, Meteo France, that is described in this gmapdoc article.

    Relevant corresponding data set for different vertical structure

    HARMONIE 3D-VAR and 4DVAR upper air data assimilation needs background error structure function for each given vertical layer structure. It is noted that the structure function data included in the reference HARMONIE repository const/jb_data is only useful for reference configuration. Users that runs 3DVAR/4DVAR are strongly recommended to derive proper structure function data following instructions in the Harmonie wiki using own data archive to avoid improper use of structure function.

    +

    HARMONIE Vertical Model Level Definitions

    HARMONIE vertical coordinate

    HARMONIE model, similar to that of HIRLAM, is constructed for a general pressure based and terrain following vertical coordinate $\eta(p,p_s)$, where

    \[\eta(0,P_s) = 0\]

    and

    \[\eta(p_s,p_s) = 1\]

    The formulation corresponds to the ECMWF hybrid system. The model is formulated for a spherical coordinate system ($\lambda$, $\theta$), but in the code two metric coefficients $(h_x,h_y)$ have been introduced. This is done to prepare the model for any orthogonal coordinate system or map projection with axes (x,y).

    To represent the vertical variation of the dependent variables (U, V, T and Q), the atmosphere is divided into "nlev" layers. These layers are defined by the pressures at the interfaces between them (the `half-levels'). From the general expression

    \[p_{k+1/2} = A_{k+1/2} (n) + B_{k+1/2}(n) * p_s(x,y)\]

    for $k=0,1,...,nlev$

    the vertical surfaces for half-levels are defined. Pure pressure surfaces are obtained for $B=0$ and pure $\sigma$ surfaces for $A=0$. `full-level' pressure associated with each model level (middle of two half layers) is then determined accordingly.

    Definition of model levels in HARMONIE

    The script src/Vertical_levels.pl contains definition of vertical levels that have been used in the HIRLAM community for research and/or operational purposes. Currently the default model setup defines 65-level structure as derived by Per Unden, SMHI. Model level definitions for commonly used vertical structures in HARMONIE are listed below.

    • FourtyLevel: HIRLAM_40 model levels (same as Hirlam 6.2.1, Nov 2003 - HIRLAM 7.0, 2006 )
    • SixtyLevel: HIRLAM-60 model levels (same as Hirlam 7.1, March 2007 - 2012 )
    • [wiki:MFSixtyLevel MF_60]: MF-60 model levels (same as Meteo France AROME since 2010 )
    • SixtyfiveLevel: 65 model levels (same as Hirlam 7.4, March 2012 - )
    • other levels: Prague87, MF70, 40 (ALADIN-40), ECMWF_60.

    Note that VLEV is the name of the set of A/B coefficients defining your levels set in ecf/config_exp.h. There are e.g. more than one definition for 60 levels. To print the levels just run scr/Vertical_levels.pl

    Usage: scr/Vertical_levels.pl [VLEV PRINT_OPTION] where:

    • VLEV: name of your level definition
    • PRINT_OPTION=AHALF: print A coefficients for VLEV
    • PRINT_OPTION=BHALF: print B coefficients for VLEV
    • PRINT_OPTION=NLEV: print number of levels for VLEV
    • PRINT_OPTION=NRFP3S: print NRFP3S namelist values for VLEV

    See here for ECMWF level definitions.

    When performing HARMONIE experiment, users can select vertical levels by changing VLEV in ecf/config_exp.h. If a non-standard level number is to be chosen, the script scr/Vertical_levels.pl needs to be edited to add layer definition.

    Define new eta levels

    A brief description and some code on how to create new eta levels can be found in here.

    There is also an interactive tool that can help you in creating a new set of levels.

    The method is based on a program by Pierre Bénard, Meteo France, that is described in this gmapdoc article.

    Relevant corresponding data set for different vertical structure

    HARMONIE 3D-VAR and 4DVAR upper air data assimilation needs background error structure function for each given vertical layer structure. It is noted that the structure function data included in the reference HARMONIE repository const/jb_data is only useful for reference configuration. Users that runs 3DVAR/4DVAR are strongly recommended to derive proper structure function data following instructions in the Harmonie wiki using own data archive to avoid improper use of structure function.

    diff --git a/previews/PR1129/ExperimentConfiguration/namelist_sfx_forecast/index.html b/previews/PR1129/ExperimentConfiguration/namelist_sfx_forecast/index.html index f60c2b608..8b491c494 100644 --- a/previews/PR1129/ExperimentConfiguration/namelist_sfx_forecast/index.html +++ b/previews/PR1129/ExperimentConfiguration/namelist_sfx_forecast/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/previews/PR1129/ForecastModel/DDH/index.html b/previews/PR1129/ForecastModel/DDH/index.html index 40bb8c3b6..d772f8768 100644 --- a/previews/PR1129/ForecastModel/DDH/index.html +++ b/previews/PR1129/ForecastModel/DDH/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    How to use DDH in HARMONIE-AROME

    1. To use DDH, set USEDDH="yes" in ecf/config_exp.h

    2. Set TFLAG to minute (min) if you wish to have minute output. Note that the conversion of FA to grib format does not automatically work with this setting

    3. Setting up the namelist (nam/harmonie_namelists.pm). Below is an example where point data for Dublin is extracted.

      %ddh=(
      +

      How to use DDH in HARMONIE-AROME

      1. To use DDH, set USEDDH="yes" in ecf/config_exp.h

      2. Set TFLAG to minute (min) if you wish to have minute output. Note that the conversion of FA to grib format does not automatically work with this setting

      3. Setting up the namelist (nam/harmonie_namelists.pm). Below is an example where point data for Dublin is extracted.

        %ddh=(
         NAMDDH => {
         'LFLEXDIA' => '.TRUE.,', # Must be TRUE
         'BDEDDH(1,01)' => '4.,', # 4 means a point
        @@ -23,4 +23,4 @@
         'LHDMCI' => '.FALSE.,',
         'LHDENT' => '.FALSE.,',
         },
        -);
      4. Description of the variables

      VariableDescription
      LHDGLBtype of domain: global domain
      LHDZONtype of domain: zonal bands
      LHDDOPtype of domain: limited and isolated point
      LHDPRGwrite global diagnostics on listing
      LHDPRZwrite zonal bands
      LHDPRDwrite limited domains diagnostics on listing
      LHDEFGwrite global diagnostics on file
      LHDEFZwrite zonal bands diagnostics on file
      LHDEFDwrite limited domain diagnostic on file
      LHDHKSbudget of mass, energy, momentum, RH, soil
      LHDMCIbudget of kinetic momentum
      LHDENTbudget of entropy
      +);
    4. Description of the variables

    VariableDescription
    LHDGLBtype of domain: global domain
    LHDZONtype of domain: zonal bands
    LHDDOPtype of domain: limited and isolated point
    LHDPRGwrite global diagnostics on listing
    LHDPRZwrite zonal bands
    LHDPRDwrite limited domains diagnostics on listing
    LHDEFGwrite global diagnostics on file
    LHDEFZwrite zonal bands diagnostics on file
    LHDEFDwrite limited domain diagnostic on file
    LHDHKSbudget of mass, energy, momentum, RH, soil
    LHDMCIbudget of kinetic momentum
    LHDENTbudget of entropy
    diff --git a/previews/PR1129/ForecastModel/Forecast/index.html b/previews/PR1129/ForecastModel/Forecast/index.html index c1f55d90d..1ce5cae91 100644 --- a/previews/PR1129/ForecastModel/Forecast/index.html +++ b/previews/PR1129/ForecastModel/Forecast/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Forecast

    scr/Forecast is the script, which initiates actual forecast run (ALADIN/AROME/ALARO depending on FLAG and PHFLAG).

    • Input parameters: none.
    • Data: Boundary files (ELSCF*-files). Initial file (fc_start). If data assimilation is used, fc_start is the analysis file. In case of dynamical adaptation, fc_start is the first boundary file. In case of AROME, Surfex initial file (SURFXINI.lfi) is also needed scr/Prep_ini_surfex.
    • Namelists: namelist templates nam/namelist_fcst${FLAG}_default are fetched based on FLAG and PHFLAG. The templates are completed in scr/Forecast based on the choices of NPROCX, NPROCY (see config-sh/submit.*), TFLAG, OUTINT, BDINT and REDUCELFI. In case of AROME also the namelists to control SURFEX-scheme nam/TEST.des and nam/EXSEG1.nam are needed.
    • Executables: as defined by MODEL.
    • Output: Forecast files (spectral files ICMSHALAD+*). In case of AROME, Surfex files containing the surface data (AROMOUT_*.lfi).

    Forecast namelists

    The current switches in the HARMONIE system (in ecf/config_exp.h) provide only very limited possibility to control the different aspects of the model. If the user wants to have more detailed control on the specific schemes etc., one has to modify the variety of the namelists options.

    In general, the different namelist options are documented in the source code modules (e.g. src/arp/module/*.F90). Below is listed information on some of the choices.

    NH-dynamics/advection/time stepping:

    • A detailed overview of the such options has been given by Vivoda (2008).

    Upper air physics switches

    • Switches related to different schemes of ALADIN/ALARO physics, src/arp/module/yomphy.F90.
    • Switches related to physics schemes in AROME src/arp/module/yomarphy.F90.
    • Switches to tune different aspects of physics, src/arp/module/yomphy0.F90, src/arp/module/yomphy1.F90, src/arp/module/yomphy2.F90 and src/arp/module/yomphy3.F90
    • Switches related to HIRLAM physics, src/arp/module/yhloption.F90 and src/arp/setup/suhloption.F90.

    Initialization switch

    • Initialization is controlled by namelist NAMINI/NEINI, src/arp/module/yomini.F90.

    Horizontal diffusion switches

    • Horizontal diffusion is controlled by namelist NAMDYN/RDAMP*, src/arp/module/yomdyn.F90#L55. Larger the coefficient, less diffusion.

    MPP switches

    • The number of processors in HARMONIE are given in config-sh/submit.*. These values are transfered in to src/arp/module/yomct0.F90#L276 and src/arp/module/yommp.F90.

    Surface SURFEX switches

    • The SURFEX scheme is controlled through namelist settings in nam/surfex_namelists.pm. The different options are described here.

    Archiving

    Archiving has a two layer structure. Firstly, all the needed analysis forecast and field extract files are stored in ARCHIVE directory by scr/Archive_fc. This is the place where the postprocessing step expects to find the files.

    At ECMWF all the requested files are stored to ECFS into directory ECFSLOC by the script scr/Archive_ECMWF

    +

    Forecast

    scr/Forecast is the script, which initiates actual forecast run (ALADIN/AROME/ALARO depending on FLAG and PHFLAG).

    • Input parameters: none.
    • Data: Boundary files (ELSCF*-files). Initial file (fc_start). If data assimilation is used, fc_start is the analysis file. In case of dynamical adaptation, fc_start is the first boundary file. In case of AROME, Surfex initial file (SURFXINI.lfi) is also needed scr/Prep_ini_surfex.
    • Namelists: namelist templates nam/namelist_fcst${FLAG}_default are fetched based on FLAG and PHFLAG. The templates are completed in scr/Forecast based on the choices of NPROCX, NPROCY (see config-sh/submit.*), TFLAG, OUTINT, BDINT and REDUCELFI. In case of AROME also the namelists to control SURFEX-scheme nam/TEST.des and nam/EXSEG1.nam are needed.
    • Executables: as defined by MODEL.
    • Output: Forecast files (spectral files ICMSHALAD+*). In case of AROME, Surfex files containing the surface data (AROMOUT_*.lfi).

    Forecast namelists

    The current switches in the HARMONIE system (in ecf/config_exp.h) provide only very limited possibility to control the different aspects of the model. If the user wants to have more detailed control on the specific schemes etc., one has to modify the variety of the namelists options.

    In general, the different namelist options are documented in the source code modules (e.g. src/arp/module/*.F90). Below is listed information on some of the choices.

    NH-dynamics/advection/time stepping:

    • A detailed overview of the such options has been given by Vivoda (2008).

    Upper air physics switches

    • Switches related to different schemes of ALADIN/ALARO physics, src/arp/module/yomphy.F90.
    • Switches related to physics schemes in AROME src/arp/module/yomarphy.F90.
    • Switches to tune different aspects of physics, src/arp/module/yomphy0.F90, src/arp/module/yomphy1.F90, src/arp/module/yomphy2.F90 and src/arp/module/yomphy3.F90
    • Switches related to HIRLAM physics, src/arp/module/yhloption.F90 and src/arp/setup/suhloption.F90.

    Initialization switch

    • Initialization is controlled by namelist NAMINI/NEINI, src/arp/module/yomini.F90.

    Horizontal diffusion switches

    • Horizontal diffusion is controlled by namelist NAMDYN/RDAMP*, src/arp/module/yomdyn.F90#L55. Larger the coefficient, less diffusion.

    MPP switches

    • The number of processors in HARMONIE are given in config-sh/submit.*. These values are transfered in to src/arp/module/yomct0.F90#L276 and src/arp/module/yommp.F90.

    Surface SURFEX switches

    • The SURFEX scheme is controlled through namelist settings in nam/surfex_namelists.pm. The different options are described here.

    Archiving

    Archiving has a two layer structure. Firstly, all the needed analysis forecast and field extract files are stored in ARCHIVE directory by scr/Archive_fc. This is the place where the postprocessing step expects to find the files.

    At ECMWF all the requested files are stored to ECFS into directory ECFSLOC by the script scr/Archive_ECMWF

    diff --git a/previews/PR1129/ForecastModel/ForecastSettings/index.html b/previews/PR1129/ForecastModel/ForecastSettings/index.html index 0cb3e8e26..75ea8db5c 100644 --- a/previews/PR1129/ForecastModel/ForecastSettings/index.html +++ b/previews/PR1129/ForecastModel/ForecastSettings/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Forecast Settings

    This page gives some details and advice on appropriate settings for the HARMONIE-AROME forecast

    Microphysics

    ICE-T

    Switch ICE-T on by setting LICET=.TRUE. in harmonie_namelist.pm under &NAMPARAR in %arome. When using ICE-T (LICET), LOCND2 should be set to True, and LMODICEDEP preferably to False. LICET will override LKOGAN, so by default LKOGAN=F. Documentation: (Engdahl et al., 2020)

    Description: ICE-T is a modified cloud microphysics scheme that builds upon ICE3 and OCN2D, with elements from the Thompson scheme from WRF. ICE-T was developed in cy40h1.1 for the purpose of better representation of supercooled liquid water, and downstream forecasts of atmospheric icing. The changes include stricter conditions for ice nucleation, less efficient collection of liquid water by snow and graupel, and variable rain size distribution depending on the source of the rain. (Rain originating from melting snow or graupel have larger drops, than rain originating from warm processes.)

    Shallow Convection

    LSHALLOWMF activates (.TRUE.) or de-activates (.FALSE.) the DUAL (dry and moist) mass flux shallow convection parameterisation. Note that with LSHALLOWMF=.FALSE. the mass flux activity as a source term for TKE in the turbulence scheme (energy cascade) will be also eliminated. Also the moist updraft transport contribution to the cloud scheme is eliminated with LSHALLOWMF=.FALSE.. See for details of the convection scheme and links to the cloud and turbulence scheme: https://doi.org/10.5194/gmd-15-1513-2022.

    The scale-aware convection scheme is activated by setting LSCAWAREMF=.TRUE.. Setting this reduces the dry and moist (if present) mass flux using a tangent function scaled with the dry boundary layer height $h$ for the dry updraft and sub-cloud height plus cloud layer depth $h+h_c$ for the moist updraft:

    \[f = \tanh\left(1.86 \frac{\Delta x}{h+h_c}\right)\]

    NOTE: this option can only be used when LSHALLOWMF=.TRUE..

    To support the model when it is trying to build up convection itself, the setting LWTHRESH=.TRUE. can be used. Depending on the gridsize, a vertical velocity threshold is defined. If the absolute value of the vertical velocity in a grid column exceeds this threshold the shallow convection is shut down.

    LWTHRESH option is updated as the vertical velocity is now only diagnosed in the lowest 6km , this to prevent that high vertical velocities not related to convection are used.

    LWTHRESHMOIST option works similarly to option LWTHRESH but now only the paramterized moist convection is shut down as the threshold is met, the dry convection is not affected by this option (but could be affected by LSCAWAREMF).

    Note

    LWTHRESH or LWTRESHMOIST options can only be active when LSHALLOWMF=.TRUE..

    Turbulence scheme

    HARATU

    HARATU (HArmonie with RAcmo TUrbulence scheme) is the default (HARATU=yes in config_exp.h) turbulence scheme in HARMONIE-AROME originally developed for RACMO (Regional Atmospheric Climate MOdel). The length scale of this turbulence scheme is described by @(Lenderink and Holtslag, 2004). Note that HARATU is only tested in combination with LSHALLOWMF=.TRUE. and CMF_UPDRAFT='DUAL'. The later convection scheme provides input to the HARATU turbulence scheme to present the important energy cascade (from large to small scales), see https://doi.org/10.5194/gmd-15-1513-2022

    +

    Forecast Settings

    This page gives some details and advice on appropriate settings for the HARMONIE-AROME forecast

    Microphysics

    ICE-T

    Switch ICE-T on by setting LICET=.TRUE. in harmonie_namelist.pm under &NAMPARAR in %arome. When using ICE-T (LICET), LOCND2 should be set to True, and LMODICEDEP preferably to False. LICET will override LKOGAN, so by default LKOGAN=F. Documentation: (Engdahl et al., 2020)

    Description: ICE-T is a modified cloud microphysics scheme that builds upon ICE3 and OCN2D, with elements from the Thompson scheme from WRF. ICE-T was developed in cy40h1.1 for the purpose of better representation of supercooled liquid water, and downstream forecasts of atmospheric icing. The changes include stricter conditions for ice nucleation, less efficient collection of liquid water by snow and graupel, and variable rain size distribution depending on the source of the rain. (Rain originating from melting snow or graupel have larger drops, than rain originating from warm processes.)

    Shallow Convection

    LSHALLOWMF activates (.TRUE.) or de-activates (.FALSE.) the DUAL (dry and moist) mass flux shallow convection parameterisation. Note that with LSHALLOWMF=.FALSE. the mass flux activity as a source term for TKE in the turbulence scheme (energy cascade) will be also eliminated. Also the moist updraft transport contribution to the cloud scheme is eliminated with LSHALLOWMF=.FALSE.. See for details of the convection scheme and links to the cloud and turbulence scheme: https://doi.org/10.5194/gmd-15-1513-2022.

    The scale-aware convection scheme is activated by setting LSCAWAREMF=.TRUE.. Setting this reduces the dry and moist (if present) mass flux using a tangent function scaled with the dry boundary layer height $h$ for the dry updraft and sub-cloud height plus cloud layer depth $h+h_c$ for the moist updraft:

    \[f = \tanh\left(1.86 \frac{\Delta x}{h+h_c}\right)\]

    NOTE: this option can only be used when LSHALLOWMF=.TRUE..

    To support the model when it is trying to build up convection itself, the setting LWTHRESH=.TRUE. can be used. Depending on the gridsize, a vertical velocity threshold is defined. If the absolute value of the vertical velocity in a grid column exceeds this threshold the shallow convection is shut down.

    LWTHRESH option is updated as the vertical velocity is now only diagnosed in the lowest 6km , this to prevent that high vertical velocities not related to convection are used.

    LWTHRESHMOIST option works similarly to option LWTHRESH but now only the paramterized moist convection is shut down as the threshold is met, the dry convection is not affected by this option (but could be affected by LSCAWAREMF).

    Note

    LWTHRESH or LWTRESHMOIST options can only be active when LSHALLOWMF=.TRUE..

    Turbulence scheme

    HARATU

    HARATU (HArmonie with RAcmo TUrbulence scheme) is the default (HARATU=yes in config_exp.h) turbulence scheme in HARMONIE-AROME originally developed for RACMO (Regional Atmospheric Climate MOdel). The length scale of this turbulence scheme is described by @(Lenderink and Holtslag, 2004). Note that HARATU is only tested in combination with LSHALLOWMF=.TRUE. and CMF_UPDRAFT='DUAL'. The later convection scheme provides input to the HARATU turbulence scheme to present the important energy cascade (from large to small scales), see https://doi.org/10.5194/gmd-15-1513-2022

    diff --git a/previews/PR1129/ForecastModel/HR/index.html b/previews/PR1129/ForecastModel/HR/index.html index acd7d4dcc..7c76a0911 100644 --- a/previews/PR1129/ForecastModel/HR/index.html +++ b/previews/PR1129/ForecastModel/HR/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    High Resolution Modelling

    This page gives some details and advice on settings for running HARMONIE-AROME at sub-kilometre resolutions. A number of workshops have been held on this topic; more information and presentations may be found on hirlam.org.

    Default configuration to be made available via scr/Harmonie_configurations.pm. Below are some details on the various options.

    Practicalities

    Since experiments with higher resolutions may take a longer time than usual, it can be worthwhile to increase the PATIENCE beyond the default. For example, add the following to Env_submit

    if ( $ENV{SIMULATION_TYPE} eq 'nwp' ) {
    +

    High Resolution Modelling

    This page gives some details and advice on settings for running HARMONIE-AROME at sub-kilometre resolutions. A number of workshops have been held on this topic; more information and presentations may be found on hirlam.org.

    Default configuration to be made available via scr/Harmonie_configurations.pm. Below are some details on the various options.

    Practicalities

    Since experiments with higher resolutions may take a longer time than usual, it can be worthwhile to increase the PATIENCE beyond the default. For example, add the following to Env_submit

    if ( $ENV{SIMULATION_TYPE} eq 'nwp' ) {
      $job_list{'Forecast'}{'ENV'}              = $submit_type.'-v PATIENCE=28800' ;
     }

    Grid choice

    Within ecf/config_exp.h, it is recommended to use quadratic or cubic spectral truncations:

    GRID_TYPE=QUADRATIC           # Type of grid (LINEAR|QUADRATIC|CUBIC)

    These choices are cheaper and more stable. No problematic reduction in accuracy has been reported, and quadratic grids are used operationally at a number of centres.

    For 750m resolution and a quadratic grid, TSTEP=30 should work well. For linear truncation, 20s will be needed.

    Boundaries

    Use of IFS boundaries has been found to be satisfactory.

    With higher sub-kilometric resolutions, smaller domains are inevitable. To deal with domain spin-up issues with precipitation, it may be worth coupling hydrometeors from the boundary files. Within scr/MARS_get_bd and scr/gl_bd there is an option USE_IFS_CLOUD_COND, which adds cloud and hydrometeors to the the boundary files. This can be added and exported in ecf/config_exp.h if needed.

    The coupling is then activated in nam/harmonie_namelists.pm under NAMGFL, e.g. for cloud ice:

      'YI_NL%NCOUPLING' => '1,',

    Diffusion and controlling noise

    LGWADV

    By default, LGWADV=FALSE and LRDBBC=TRUE. These can be found in NAMDYNA.

    It has been found that changing the values can remove noise visible in certain situations in MSLP fields; i.e. set LGWADV=TRUE and LRDBBC=FALSE. These are the values used at Météo France and ALADIN centres using the predictor-corrector time-stepping, rather than semi-implicit SETTLS (default in HARMONIE).

    '''Update''': new defaults in 43h2.1.1, LGWADV=TRUE and LRDBBC=FALSE

    VESL

    Another option to control noise is the off-centering parameter VESL, found in NAMDYN, which is 0 by default in operational 2.5km configurations. A non-zero value such as 0.1 will smooth small-scale noise.

    For some (currently unknown) reason, the LGWADV option described above cannot be used with a non-zero value of VESL; forecasts crash quickly. Given the potential of VESL to stabilise higher-resolution runs, it is probably a preferable option, i.e.

     NAMDYN=>{
       'VESL' => '0.1,',
    @@ -29,4 +29,4 @@
       'RDAMPVD' => '20.,',
       'RDAMPVOR' => '20.,',
     },

    With a quadratic or cubic grid with non-zero VESL, these defaults have been found to be adequate. Without VESL, higher levels of diffusion through lover RDAMP* values of 10 or even 1 are necessary.

    SLHD

    Experiments at Météo France suggest not to use SLHD on hydrometeors: c.f. ASM 2020 presentation by Yann Seity.

    In ecf/config_exp.h

    LGRADSP=yes                             # Apply Wedi/Hortal vorticity dealiasing (yes|no)
    -LUNBC=yes                               # Apply upper nested boundary condition (yes|no)

    Sample configurations

    Coming soon...

    +LUNBC=yes # Apply upper nested boundary condition (yes|no)

    Sample configurations

    Coming soon...

    diff --git a/previews/PR1129/ForecastModel/NearRealTimeAerosols/index.html b/previews/PR1129/ForecastModel/NearRealTimeAerosols/index.html index 759dc4ab5..3f6d24959 100644 --- a/previews/PR1129/ForecastModel/NearRealTimeAerosols/index.html +++ b/previews/PR1129/ForecastModel/NearRealTimeAerosols/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Near Real Time Aerosols

    The model can be configured to use near-real-time aerosols from CAMS. This is done by setting USEAERO=camsnrt in ecf/config_exp.h, which leads to retrieval of boundary files containing aerosol mass mixing ratio fields from CAMS. Other values of USEAERO are related to use and generation of climatological (2D) aerosol. Please see scr/forecastmodelsettings.sh for some further details.

    • Namelist NAMNRTAER contains switches related to n.r.t. aerosols in cloud microphysics under src/mpa.
    • Namelist NAMPHY contains definitions for src/arpifs LAEROSEA,LAEROLAN,LAEROSOO,LAERODES,LAEROVOL,LAEROSUL,LAEROMMR,LAERONRT . LAERONRT is set true when n.r.t. aerosols are used. The others are related to climatological aerosol and are set false when n.r.t. aerosols are used.
    • Aerosol fields in YAERO_NL are defined in namelist NAMGFL. Variable NAERO defines the number of available n.r.t. aerosol species (14).
    • Namelist NAERAD contains definition of NAER=1/0 to use or not to use climatological aerosol for radiation. When LAERONRT is set true, NAER is set to 0.

    NAMNRTAER namelist

    The switches and some parameters can be set in NAMNRTAER (in nam/harmonie_namelists.pm)

    • LCAMS_NRT: switch on the use of CAMS aerosols in HARMONIE-AROME, the Mass mixing ratio fields must be present in the first guess and the boundary conditions. The number and name of those fields are specified in the namelist NAMGFL.
    • SSMINLO: Supersaturation at sfc level. (default 0.05%). The supersaturation activates the condensation nuclei (CN) to obtain CCN.
    • SSMINUP: Supersaturation over SSHEIGHT height (default 0.08%).
    • SSHEIGHT: Height over wich minimum SS is SSMINUP (default 100 m).
    • SSMAX: Maximum supersaturation (default 1.0%).
    • SSFACVV: Factor for dependence of SS with vertical velocity (0.0-1.0).
    • SSFACSS: Factor for dependence of SS with coarse sea salt (0.0-1.0).
    • CCNMIN: Minimum number concentration of Cloud Condensation Nuclei (CCN) inside the cloud: It is considered 10E6 (10 cm-3). Other values can be considered, but probably not over 50cm-3.
    • CLDROPMIN: Minimum CDNC inside the cloud. It is practically the same as CCNMIN. Other values can be considered, but probably not over 50cm-3.
    • IFNMINSIZE: Minimum radius of aerosol ice nucleating particles (default 0.01 micrometer).
    • LMOCA_NRT: In case of getting the aerosol fields from MOCAGE (still not in use).
    • LAEIFN: To activate Ice nuclei (mainly dust and hydrophobic organic matter and Black carbon).
    • LAERDRDEP: Activates the aerosol dry deposition. (FALSE by default).
    • LAECCN2CLDR: By default LAECCN2CLDR=FALSE, that is CDNC=CCN.
    • LAERSSEM: switch for sea salt emission (FALSE by default).
    +

    Near Real Time Aerosols

    The model can be configured to use near-real-time aerosols from CAMS. This is done by setting USEAERO=camsnrt in ecf/config_exp.h, which leads to retrieval of boundary files containing aerosol mass mixing ratio fields from CAMS. Other values of USEAERO are related to use and generation of climatological (2D) aerosol. Please see scr/forecastmodelsettings.sh for some further details.

    • Namelist NAMNRTAER contains switches related to n.r.t. aerosols in cloud microphysics under src/mpa.
    • Namelist NAMPHY contains definitions for src/arpifs LAEROSEA,LAEROLAN,LAEROSOO,LAERODES,LAEROVOL,LAEROSUL,LAEROMMR,LAERONRT . LAERONRT is set true when n.r.t. aerosols are used. The others are related to climatological aerosol and are set false when n.r.t. aerosols are used.
    • Aerosol fields in YAERO_NL are defined in namelist NAMGFL. Variable NAERO defines the number of available n.r.t. aerosol species (14).
    • Namelist NAERAD contains definition of NAER=1/0 to use or not to use climatological aerosol for radiation. When LAERONRT is set true, NAER is set to 0.

    NAMNRTAER namelist

    The switches and some parameters can be set in NAMNRTAER (in nam/harmonie_namelists.pm)

    • LCAMS_NRT: switch on the use of CAMS aerosols in HARMONIE-AROME, the Mass mixing ratio fields must be present in the first guess and the boundary conditions. The number and name of those fields are specified in the namelist NAMGFL.
    • SSMINLO: Supersaturation at sfc level. (default 0.05%). The supersaturation activates the condensation nuclei (CN) to obtain CCN.
    • SSMINUP: Supersaturation over SSHEIGHT height (default 0.08%).
    • SSHEIGHT: Height over wich minimum SS is SSMINUP (default 100 m).
    • SSMAX: Maximum supersaturation (default 1.0%).
    • SSFACVV: Factor for dependence of SS with vertical velocity (0.0-1.0).
    • SSFACSS: Factor for dependence of SS with coarse sea salt (0.0-1.0).
    • CCNMIN: Minimum number concentration of Cloud Condensation Nuclei (CCN) inside the cloud: It is considered 10E6 (10 cm-3). Other values can be considered, but probably not over 50cm-3.
    • CLDROPMIN: Minimum CDNC inside the cloud. It is practically the same as CCNMIN. Other values can be considered, but probably not over 50cm-3.
    • IFNMINSIZE: Minimum radius of aerosol ice nucleating particles (default 0.01 micrometer).
    • LMOCA_NRT: In case of getting the aerosol fields from MOCAGE (still not in use).
    • LAEIFN: To activate Ice nuclei (mainly dust and hydrophobic organic matter and Black carbon).
    • LAERDRDEP: Activates the aerosol dry deposition. (FALSE by default).
    • LAECCN2CLDR: By default LAECCN2CLDR=FALSE, that is CDNC=CCN.
    • LAERSSEM: switch for sea salt emission (FALSE by default).
    diff --git a/previews/PR1129/ForecastModel/OCDN2/index.html b/previews/PR1129/ForecastModel/OCDN2/index.html index 8b11c8044..72a5dd76c 100644 --- a/previews/PR1129/ForecastModel/OCDN2/index.html +++ b/previews/PR1129/ForecastModel/OCDN2/index.html @@ -3,6 +3,6 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Documentation of OCND2 modification of ICE3/ICE4 microphysics in AROME

    Introduction

    This option was implemented in ICE3/ICE4 microphysics in 2014 in order to improve the performance of the HARMONIE-AROME model configuration in Winter over the Arctic/Subarctic region. Errors corrected were mainly for missing low clouds in moderately cold conditions, an excess of ice clouds in the case of severe cold weather and an excess of cirrus clouds.

    Implementation in CY46 - switching on the parameterisation

    To use the parameterisation go to nam/harmonie_namelists.pm and set LOCND2 = .TRUE. in the namparar namelist.

    About the Code

    In CY46, there are two coding versions of ICE3/ICE4 - rain_ice_old.F90 and rain_ice.F90. The variable CMICRO determines which is used, OLD3 for rain_ice_old.F90 and ICE3 for rain_ice.F90. The structure of the code differs between these versions, and since the time stepping procedure is different, the result differs too. But the content of the modification for OCND2 is the same in both versions. The modifications can be be found by searching for

    IF(OCND2) THEN \
    +

    Documentation of OCND2 modification of ICE3/ICE4 microphysics in AROME

    Introduction

    This option was implemented in ICE3/ICE4 microphysics in 2014 in order to improve the performance of the HARMONIE-AROME model configuration in Winter over the Arctic/Subarctic region. Errors corrected were mainly for missing low clouds in moderately cold conditions, an excess of ice clouds in the case of severe cold weather and an excess of cirrus clouds.

    Implementation in CY46 - switching on the parameterisation

    To use the parameterisation go to nam/harmonie_namelists.pm and set LOCND2 = .TRUE. in the namparar namelist.

    About the Code

    In CY46, there are two coding versions of ICE3/ICE4 - rain_ice_old.F90 and rain_ice.F90. The variable CMICRO determines which is used, OLD3 for rain_ice_old.F90 and ICE3 for rain_ice.F90. The structure of the code differs between these versions, and since the time stepping procedure is different, the result differs too. But the content of the modification for OCND2 is the same in both versions. The modifications can be be found by searching for

    IF(OCND2) THEN \
         --- new code ---\
    -ENDIF 

    The main OCDN2 modifications are

    1. Tuning factors for reducing the rate of deposition/evaporation of snow and graupel. See code block “1.2 COMPUTE SOME CONSTANT PARAMETERS” in rain_ice_old.F90 or in ice4_slow.F90. The tuning factors are then used later in rain_ice_old.F90, see code block “3.4.3 compute the deposition on rs: RVDEPS" for snow and in “3.4.6 compute the deposition on rg: RVDEPG” for graupel. In the rain_ice.F90 framework it is all done in the routine ice4_slow.F90. More information about the tuning parameters is included later in this documentation.
    2. Mask to limit computation: Set by tuning parameters in the code block “1.2 COMPUTE SOME CONSTANT PARAMETERS“ in rain_ice_old.F90 or in aro_rain_ice.F90 within the rain_ice.F90 framework. For OCND2=FALSE the limits are hard-coded.
    3. The cloud ice crystal concentration: Modified with OCND2, see code block “3.1.1 compute the cloud ice concentration” in rain_ice_old or ice4_nucleation.F90 within the rain_ice.F90 framework.
    4. Turn large cloud ice crystals into snow: See code block “3.4.5 B:” in rain_ice_old.F90 or ice4_fast_si.F90 within the rain_ice.F90 framework.
    5. Omit collision between snow and graupel since the effect in nature is very small and thus better to omit and speed up the computation a little. See code block “6.2.5” in rain_ice_old.F90 or ice4_fast_rg.F90 respectively
    6. Sub grid-scale calculation of deposition/evaporation of cloud ice. See code block “7.2 Bergeron-Findeisen effect: RCBERI” in rain_ice_old.F90 or ice4_fast_si.F90 for the rain_ice.F90 set up.

    There is also an important difference in condensation.F90: With OCND2, only liquid cloud water is handled within the statistical cloud scheme, not both ice and water as is the case with OCND2=F. With OCND2=F, the total cloud cover is calculated directly from the statistical cloud scheme. With OCND2=T, the total cloud cover is calculated as a sum of a liquid part, which is basically just the cloud cover from the statistical cloud scheme and an ice part which is based on the relative humidity with respect to ice and on the content of solid water species.

    There are two new routines for OCND2:

    1. icecloud.F90 is used for the sub grid-scale handling of relative humidity with respect to ice and thus for ice clouds. It is called from condensation.F90.
    2. ice4_fast_si.F90 is only used by the newer rain_ice.F90 routine. As already mentioned, it deals with deposition/evaporation of cloud ice.

    Tuning parameters

    The tuning parameters used specifically for OCND2 can be divided into three categories:

    Only having an effect if OCND2 is set to TRUE and used for SPP (April 2023).

    VariableDescription
    RFMIN(21)Tuning factor for ice clouds, such as cirrus. A larger value means a larger effect of the presence of solid water and thus more ice clouds. (The value is somewhat dependent on what kind of measurement one compares with, and how thin a cirrus cloud should be to be counted as a cloud. A range of 0.5 to 3 should be enough.)

    Only having effect if OCND2 is set to TRUE but currently (April 2023) not used in SPP.

    VariableDescription
    RFMIN(12)Threshold supersaturation with respect to ice in the supersaturated part of the grid-box for treatment in the microphysics computation. A larger value gives more supersaturation and a somewhat faster computation. Values that are too large are physically unrealistic, but there seems to be no consensus about the best value.
    RFMIN(13)Threshold mixing ratio for different non-vapor water species treated in the microphysics computation. Larger values result in faster computation, but possible important processes, when only small mixing ratios of water species are present, may be missed.
    RFMIN(15)Ice crystal diameter(m) for conversion from cloud ice to snow. Larger values lead to more ice and less snow.
    RFMIN(27)Experimental! Minimum temperature (K) used for Meyers ice number concentration. Larger values give less ice for temperatures below RFRMIN(27).
    RFMIN(39)Speed factor for deposition/evaporation rate of graupel. Larger values give faster deposition /evaporation.
    RFMIN(40)Speed factor for deposition/evaporation rate of snow. Larger values give faster deposition /evaporation.

    Have an effect even when OCND2 is not used, but designed for OCND2

    VariableDescription
    RFRMIN(1),RFRMIN(2),RFRMIN(3) and RFRMIN(4)Different thresholds for snow, ice, graupel and graupel again, respectively, leading to conversion of super-cooled rain into graupel. A higher value gives more super-cooled rain, but may be less physically realistic.
    RFRMIN(7)Tuning factor for the collisions between rain and snow. Higher values give less super-cooled rain and more snow. Zero means that those collisions are disregarded (probably OK).

    Full list of RFRMIN variables (included here for completeness, not all OCDN2-related)

    VariableValueDescription
    RFMIN(1)1.0E−5Higher value means more supercooled rain and somewhat less graupel.
    RFMIN(2)1.0E−8""
    RFMIN(3)3.0E-7""
    RFMIN(4)3.0E-7""
    RFMIN(5)1.0E-7Higher value means less graupel and more snow. Experimental.
    RFMIN(6)0.15Higher value means more graupel and less snow. Experimental.
    RFMIN(7)0.Higher value means less supercooled rain and somewhat more snow.
    RFMIN(8)1.> 1. Increase melt of graupel, < 1 decrease it. Experimental.
    RFMIN(9)1.> 1 means increase IN-concentration and <1 decrease.
    RFMIN(10)10.>10 means faster Kogan autoconversion <10 slower, only active for LKOGAN=T. This originates from the fact that the formula was based on an LES model with a higher horizontal resolution. It is easy to show that with a coarser resolution and an inhomogeneous cloud liquid field one has to add a compensating factor in order to retain the original mean autoconversion. Tests shows that a lower value e.g 3 would be better, and more in line with what ECMWF is using. The value 10 is, to some extent, a way of decreasing fog, but now we have a lot of other ways to reduce fog.
    RFMIN(11)1.Setting e.g. 0.01 means that subgrid-scale fraction of cloud water is used. Minimum cloud fraction=0.01. Only active for LKOGAN=T.
    RFMIN(12)0.The level of supersaturation in the ice-supersaturated part of grid-box needed to be treated in ice microphysics. (Greg Thompson recommend a higher value 0.05-0.25, in MetCoOp 0.05 is used) Higher value means faster computations, but also that any ice deposition in clear sky is neglected for ice-supersaturated between zero and RFRMIN(12). Only used with OCND2.
    RFMIN(13)1.0E-15The mixing-ratio of any water species needed to be treated in ice microphysics. The value 1.0E-15 is taken from old Hirlam. Only used with OCND2.
    RFMIN(14)120.Time scale for conversion of large ice crystals to snow. Only used with LMODICEDEP (Experimental).
    RFMIN(15)1.0E-4Diameter for conversion ice crystals into snow. Larger value gives more ice and less snow.
    RFMIN(16)0.“C” parameter for size distribution of snow. (constant for number concentration, N=Cλ^x) Only active if non-zero. Experimental
    RFMIN(17)0.“x” parameter for size distribution of snow. (slope for number concentration, N=Cλ^x) Only active if RFRMIN(16) is non-zero. Experimental.
    RFMIN(18)0.With RFRMIN(18)=1, snow and graupel melt are based on wet bulb temperature, instead of temperature and leads to slower melting. Experimental.
    RFMIN(19)0.Threshold cloud thickness for StCu/Cu transition [m] Only active for EDMF scheme and if non-zero, but very small effect.
    RFMIN(20)0.Threshold cloud thickness used in shallow/deep decision [m]. Only active for EDMF scheme and if non-zero, higher value gives more shallow convection and less deep model resolved convection.
    RFMIN(21)1.Tuning parameter for ice clouds. Larger value gives more cirrus and other ice clouds.
    RFMIN(22)1.Tuning parameter for CDNC at lowest model level . Lower value give lower CDNC. RFRMIN(22)=0.5 means CDNC= old CDNC x 0.5.
    RFMIN(23)0.5Tuning parameter only active with LHGT_QS. The lower limit for reduction of VSIGQSAT.
    RFMIN(24)1.5Tuning parameter only active with LHGT_QS. The upper limit for increase of VSIGQSAT.
    RFMIN(25)30.Tuning parameter only active with LHGT_QS. The level thickness for which VSIGQSAT is unchanged with LHGT_QS.
    RFMIN(26)0.If > 0.01, it replaces default CDNC everywhere. So RFRMIN(26)=50E6 (Beware of that it is in m-3!) gives CDNC = 50 cm-3 at reference level (1000 hPa) and RFRMIN(26) x pressure/ ref-pressure elsewhere.
    RFMIN(27)0.Minimum assumed temperature with respect to Meyers IN - concentration (K). Gives less IN concentration for temperatures below the value set. Experimental!
    RFMIN(28)0.Currently not used.
    RFMIN(29)0.If >0. and RFRMIN(22)>0 it gives the upper limit in metres for which the reduction of CDNC has an effect. A linear decrease from the lowest level to RFRMIN(29) meters is assumed.
    RFMIN(30)1.If not unity, CDNC is reduced/increased over sea with a factor RFRMIN(30) for the lowest model level and linearly reaching "no change" at RFRMIN(29) m height. If RFRMIN(29) is unset, RFRMIN(30) only affects the lowest model level.
    RFRMIN(31:38)0.Currently not used.
    RFRMIN(39)0.25.Reduction factor for deposition/evaporation of graupel. Only used when OCND2=T and LMODICEDEP=F.
    RFRMIN(40)0.15Reduction factor for deposition/evaporation of snow. Only used when OCND2=T and LMODICEDEP=F.
    +ENDIF

    The main OCDN2 modifications are

    1. Tuning factors for reducing the rate of deposition/evaporation of snow and graupel. See code block “1.2 COMPUTE SOME CONSTANT PARAMETERS” in rain_ice_old.F90 or in ice4_slow.F90. The tuning factors are then used later in rain_ice_old.F90, see code block “3.4.3 compute the deposition on rs: RVDEPS" for snow and in “3.4.6 compute the deposition on rg: RVDEPG” for graupel. In the rain_ice.F90 framework it is all done in the routine ice4_slow.F90. More information about the tuning parameters is included later in this documentation.
    2. Mask to limit computation: Set by tuning parameters in the code block “1.2 COMPUTE SOME CONSTANT PARAMETERS“ in rain_ice_old.F90 or in aro_rain_ice.F90 within the rain_ice.F90 framework. For OCND2=FALSE the limits are hard-coded.
    3. The cloud ice crystal concentration: Modified with OCND2, see code block “3.1.1 compute the cloud ice concentration” in rain_ice_old or ice4_nucleation.F90 within the rain_ice.F90 framework.
    4. Turn large cloud ice crystals into snow: See code block “3.4.5 B:” in rain_ice_old.F90 or ice4_fast_si.F90 within the rain_ice.F90 framework.
    5. Omit collision between snow and graupel since the effect in nature is very small and thus better to omit and speed up the computation a little. See code block “6.2.5” in rain_ice_old.F90 or ice4_fast_rg.F90 respectively
    6. Sub grid-scale calculation of deposition/evaporation of cloud ice. See code block “7.2 Bergeron-Findeisen effect: RCBERI” in rain_ice_old.F90 or ice4_fast_si.F90 for the rain_ice.F90 set up.

    There is also an important difference in condensation.F90: With OCND2, only liquid cloud water is handled within the statistical cloud scheme, not both ice and water as is the case with OCND2=F. With OCND2=F, the total cloud cover is calculated directly from the statistical cloud scheme. With OCND2=T, the total cloud cover is calculated as a sum of a liquid part, which is basically just the cloud cover from the statistical cloud scheme and an ice part which is based on the relative humidity with respect to ice and on the content of solid water species.

    There are two new routines for OCND2:

    1. icecloud.F90 is used for the sub grid-scale handling of relative humidity with respect to ice and thus for ice clouds. It is called from condensation.F90.
    2. ice4_fast_si.F90 is only used by the newer rain_ice.F90 routine. As already mentioned, it deals with deposition/evaporation of cloud ice.

    Tuning parameters

    The tuning parameters used specifically for OCND2 can be divided into three categories:

    Only having an effect if OCND2 is set to TRUE and used for SPP (April 2023).

    VariableDescription
    RFMIN(21)Tuning factor for ice clouds, such as cirrus. A larger value means a larger effect of the presence of solid water and thus more ice clouds. (The value is somewhat dependent on what kind of measurement one compares with, and how thin a cirrus cloud should be to be counted as a cloud. A range of 0.5 to 3 should be enough.)

    Only having effect if OCND2 is set to TRUE but currently (April 2023) not used in SPP.

    VariableDescription
    RFMIN(12)Threshold supersaturation with respect to ice in the supersaturated part of the grid-box for treatment in the microphysics computation. A larger value gives more supersaturation and a somewhat faster computation. Values that are too large are physically unrealistic, but there seems to be no consensus about the best value.
    RFMIN(13)Threshold mixing ratio for different non-vapor water species treated in the microphysics computation. Larger values result in faster computation, but possible important processes, when only small mixing ratios of water species are present, may be missed.
    RFMIN(15)Ice crystal diameter(m) for conversion from cloud ice to snow. Larger values lead to more ice and less snow.
    RFMIN(27)Experimental! Minimum temperature (K) used for Meyers ice number concentration. Larger values give less ice for temperatures below RFRMIN(27).
    RFMIN(39)Speed factor for deposition/evaporation rate of graupel. Larger values give faster deposition /evaporation.
    RFMIN(40)Speed factor for deposition/evaporation rate of snow. Larger values give faster deposition /evaporation.

    Have an effect even when OCND2 is not used, but designed for OCND2

    VariableDescription
    RFRMIN(1),RFRMIN(2),RFRMIN(3) and RFRMIN(4)Different thresholds for snow, ice, graupel and graupel again, respectively, leading to conversion of super-cooled rain into graupel. A higher value gives more super-cooled rain, but may be less physically realistic.
    RFRMIN(7)Tuning factor for the collisions between rain and snow. Higher values give less super-cooled rain and more snow. Zero means that those collisions are disregarded (probably OK).

    Full list of RFRMIN variables (included here for completeness, not all OCDN2-related)

    VariableValueDescription
    RFMIN(1)1.0E−5Higher value means more supercooled rain and somewhat less graupel.
    RFMIN(2)1.0E−8""
    RFMIN(3)3.0E-7""
    RFMIN(4)3.0E-7""
    RFMIN(5)1.0E-7Higher value means less graupel and more snow. Experimental.
    RFMIN(6)0.15Higher value means more graupel and less snow. Experimental.
    RFMIN(7)0.Higher value means less supercooled rain and somewhat more snow.
    RFMIN(8)1.> 1. Increase melt of graupel, < 1 decrease it. Experimental.
    RFMIN(9)1.> 1 means increase IN-concentration and <1 decrease.
    RFMIN(10)10.>10 means faster Kogan autoconversion <10 slower, only active for LKOGAN=T. This originates from the fact that the formula was based on an LES model with a higher horizontal resolution. It is easy to show that with a coarser resolution and an inhomogeneous cloud liquid field one has to add a compensating factor in order to retain the original mean autoconversion. Tests shows that a lower value e.g 3 would be better, and more in line with what ECMWF is using. The value 10 is, to some extent, a way of decreasing fog, but now we have a lot of other ways to reduce fog.
    RFMIN(11)1.Setting e.g. 0.01 means that subgrid-scale fraction of cloud water is used. Minimum cloud fraction=0.01. Only active for LKOGAN=T.
    RFMIN(12)0.The level of supersaturation in the ice-supersaturated part of grid-box needed to be treated in ice microphysics. (Greg Thompson recommend a higher value 0.05-0.25, in MetCoOp 0.05 is used) Higher value means faster computations, but also that any ice deposition in clear sky is neglected for ice-supersaturated between zero and RFRMIN(12). Only used with OCND2.
    RFMIN(13)1.0E-15The mixing-ratio of any water species needed to be treated in ice microphysics. The value 1.0E-15 is taken from old Hirlam. Only used with OCND2.
    RFMIN(14)120.Time scale for conversion of large ice crystals to snow. Only used with LMODICEDEP (Experimental).
    RFMIN(15)1.0E-4Diameter for conversion ice crystals into snow. Larger value gives more ice and less snow.
    RFMIN(16)0.“C” parameter for size distribution of snow. (constant for number concentration, N=Cλ^x) Only active if non-zero. Experimental
    RFMIN(17)0.“x” parameter for size distribution of snow. (slope for number concentration, N=Cλ^x) Only active if RFRMIN(16) is non-zero. Experimental.
    RFMIN(18)0.With RFRMIN(18)=1, snow and graupel melt are based on wet bulb temperature, instead of temperature and leads to slower melting. Experimental.
    RFMIN(19)0.Threshold cloud thickness for StCu/Cu transition [m] Only active for EDMF scheme and if non-zero, but very small effect.
    RFMIN(20)0.Threshold cloud thickness used in shallow/deep decision [m]. Only active for EDMF scheme and if non-zero, higher value gives more shallow convection and less deep model resolved convection.
    RFMIN(21)1.Tuning parameter for ice clouds. Larger value gives more cirrus and other ice clouds.
    RFMIN(22)1.Tuning parameter for CDNC at lowest model level . Lower value give lower CDNC. RFRMIN(22)=0.5 means CDNC= old CDNC x 0.5.
    RFMIN(23)0.5Tuning parameter only active with LHGT_QS. The lower limit for reduction of VSIGQSAT.
    RFMIN(24)1.5Tuning parameter only active with LHGT_QS. The upper limit for increase of VSIGQSAT.
    RFMIN(25)30.Tuning parameter only active with LHGT_QS. The level thickness for which VSIGQSAT is unchanged with LHGT_QS.
    RFMIN(26)0.If > 0.01, it replaces default CDNC everywhere. So RFRMIN(26)=50E6 (Beware of that it is in m-3!) gives CDNC = 50 cm-3 at reference level (1000 hPa) and RFRMIN(26) x pressure/ ref-pressure elsewhere.
    RFMIN(27)0.Minimum assumed temperature with respect to Meyers IN - concentration (K). Gives less IN concentration for temperatures below the value set. Experimental!
    RFMIN(28)0.Currently not used.
    RFMIN(29)0.If >0. and RFRMIN(22)>0 it gives the upper limit in metres for which the reduction of CDNC has an effect. A linear decrease from the lowest level to RFRMIN(29) meters is assumed.
    RFMIN(30)1.If not unity, CDNC is reduced/increased over sea with a factor RFRMIN(30) for the lowest model level and linearly reaching "no change" at RFRMIN(29) m height. If RFRMIN(29) is unset, RFRMIN(30) only affects the lowest model level.
    RFRMIN(31:38)0.Currently not used.
    RFRMIN(39)0.25.Reduction factor for deposition/evaporation of graupel. Only used when OCND2=T and LMODICEDEP=F.
    RFRMIN(40)0.15Reduction factor for deposition/evaporation of snow. Only used when OCND2=T and LMODICEDEP=F.
    diff --git a/previews/PR1129/ForecastModel/Outputlist/index.html b/previews/PR1129/ForecastModel/Outputlist/index.html index 5e475ad2a..ab40e7666 100644 --- a/previews/PR1129/ForecastModel/Outputlist/index.html +++ b/previews/PR1129/ForecastModel/Outputlist/index.html @@ -3,8 +3,8 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Parameter list and GRIB definitions

    HARMONIE system output

    The HARMONIE system writes its primary output, in FA format, to the upper air history files ICMSHHARM+llll and the SURFEX history files ICMSHHARM+llll.sfx, where HARM is the four-character experiment identifier set in the configuration file config_exp.h, and llll is normally the current timestep in hours. The files are designed to be complete snapshots of respective model state described by the system for a particular time point. In addition more model output including post-processing/diagnostic fields can be written out during the forecast model integration, such as those model diagnostics or pressure level diagnostics, also in FA format, as PFHARMDOMAIN+llll. The FA files can be considered to be internal format files. All of them can be converted to GRIB files during the run for external usage. The name convention is as follows:

    • Forecast upper air history files: ICMSHHARM+llll -> fcYYYYMMDDHH+lll_grib (GRIB1) or fcYYYYMMDDHH+lll_grib2 (GRIB2)
    • Forecast Surfex history files: ICMSHHARM+llll.sfx -> fcYYYYMMDDHH+lll_grib_sfx (GRIB1 only)
    • Forecast Surfex selected output: ICMSSELE+llll.sfx -> fcYYYYMMDDHH+lll_grib_sfxs (GRIB1 only)
    • Postprocess files: PFHARMDOMAIN+llll.hfp -> fcYYYYMMDDHH+lllgrib_fp (GRIB1) or fcYYYYMMDDHH+lllgrib2_fp (GRIB2)
    • Analysis upper air history files: ICMSHANAL+0000 -> anYYYYMMDDHH+000grib (GRIB1) or anYYYYMMDDHH+000grib2 (GRIB2) (1)
    • Analysis SURFEX history files: ICMSHANAL+0000.sfx -> sa2019041600+000grib_sfx (only GRIB1 for the time being)

    GRIB1 table 2 version in HARMONIE

    To avoid conflicts with archived HIRLAM data HARMONIE uses version 253 of table 2. The table is based on the standard WMO version 3 of table 2 and postion 000-127 is kept the same as in the WMO. Note that accumulated and instantaneous versions of the same parameter differ only by the time range indicator. It is thus not sufficient to specify parameter, type and level when you refer to an accumulated parameter, but the time range indicator has to be included as well.

    The translation of SURFEX files to GRIB1 is still incomplete and contains several WMO violations. This is not changed in the current release but will revised later. However, the upper air history file also includes the most common surface parameters and should be sufficient for most users.

    The current table 2 version 253 definition files for gribapi can be found in `util/glgrib_api/definitions/`. These local definition files assume centre=233 (Dublin) and should be copied to your own GRIB-API installation. You are strongly recommended to set your own code for generating centre fore operational usage of the data.

    GRIB2 in HARMONIE

    The possibility to convert to GRIB2 has been introduced in release-43h2. So far the conversion is restricted to atmospheric history and fullpos files only. To get the output in GRIB2 set ARCHIVE_FORMAT=GRIB2 in ecf/config_exp.h. Please notice that if ARCHIVE_FORMAT=GRIB2 is selected, SURFEX files will be converted to GRIB1 anyway (for the time being). To convert from GRIB1 with GRIB2 using grib_filter we have to tell EcCodes how to translate the parameters. This is done by using the internal HARMONIE tables and setting

    export ECCODES_DEFINITION_PATH=$SOME_PATH_TO_GL/gl/definitions:$SOME_PATH_TO_ECCODES/share/eccodes/definitions

    Note that there are a few parameters that are not translated to GRIB2 to and those has to be excluded explicitly.

    List of parameters

    header abbreviations in the tables:

    abbr.descriptionsee table
    lvTlevelTypelevel types
    iOPindicatorOfParameterindicator of parameter
    ddiscipline
    pCparameterCategory
    pNparameterNumber
    levlevel
    sTstepTypetime range indicator

    3D model state variables on model levels (1-NLEV), levelType=hybrid

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SNNNHUMI.SPECIFIqhushy510101inskg/kgSpecific humidity
    SNNNLIQUID_WATERcwat_condclwhy7601831inskg/kgSpecific cloud liquid water content
    SNNNSOLID_WATERciwc_condclihy5801841inskg/kgSpecific cloud ice water content
    SNNNSNOWsnow_cond#hy18401861inskg/kgSpecific snow water content
    SNNNRAINrain_cond#hy18101851inskg/kgSpecific rain water content
    SNNNGRAUPELgrpl_cond#hy20101321inskg/kgSpecific graupel
    SNNNTKEtketkehy200019111insJ/kgTurbulent Kinetic Energy
    SNNNCLOUD_FRACTItccclthy71061921ins0-1Total cloud cover
    SNNNPRESS.DEPARTpdep#hy2120381insPaPressure departure
    SNNNTEMPERATUREttahy110001insKTemperature
    SNNNVERTIC.DIVERvdiv#hy213021921inss-1Vertical Divergence
    SNNNWIND.U.PHYSuuahy330221insm/su-component of wind
    SNNNWIND.V.PHYSvvahy340231insm/sv-component of wind

    2D Surface, prognostic/diagnostic near-surface and soil variables, levelType=heightAboveGround

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SURFPRESSIONprespshag10300insPaSurface pressure
    SURFTEMPERATUREtts_radhag110000insKSurface temperature
    CLSTEMPERATUREttashag110002insKTemperature at 2m
    CLSMAXI.TEMPERATtmaxtasmaxhag150002maxKMaximum temperature (FREQ_RESET_TEMP)
    CLSMINI.TEMPERATtmintasminhag160002minKMinimum temperature (FREQ_RESET_TEMP)
    CLSVENT.ZONALuuashag3302210insm/su-component of wind at 10m, relative to model coordinates
    CLSVENT.MERIDIENvvashag3402310insm/sv-component of wind at 10m, relative to model coordinates
    CLSHUMI.SPECIFIQqhusshag510102inskg/kgSpecific humidity at 2m
    CLSHUMI.RELATIVErhurshag52011922ins0-1Relative humidity at 2m
    SURFRESERV.NEIGEsdwesnwhag6501600inskg/m2Snow depth water equivalent
    CLPMHAUT.MOD.XFUmldzmlahag6701930insmHeight (in meters) of the PBL out of the model
    SURFNEBUL.TOTALEtccclt_inshag71061920ins0-1Total cloud cover
    SURFNEBUL.CONVECcccclc_inshag72061930ins0-1Convective cloud cover
    SURFNEBUL.BASSElcccll_inshag73061940ins0-1Low cloud cover
    SURFNEBUL.MOYENNmccclm_inshag74061950ins0-1Medium cloud cover
    SURFNEBUL.HAUTEhccclh_inshag75061960ins0-1High cloud cover
    SURFRAYT.SOLAIREswavr#hag1160470insW/m2Instantaneous surface solar radiation (SW down global) Parameter identifier was 116, again is???
    SURFRAYT.TERRESTlwavr#hag1150540insW/m2Instantaneous longwave radiation flux
    SURFCAPE.MOD.XFUcapecapehag1600760insJ/kgModel output CAPE (not calculated by AROME physics)
    SURFDIAGHAILxhail#hag161012030ins0-1AROME hail diagnostic, LXXDIAGH = .TRUE.
    CLSU.RAF.MOD.XFUugstugshag162022310maxm/sU-momentum of gusts from the model. LXXGST = .TRUE. in NAMXFU. gives gust between current and previous output time step (FREQ_RESET_GUST)
    CLSV.RAF.MOD.XFUvgstvgshag163022410maxm/sV-momentum of gusts from the model. LXXGST = .TRUE. in NAMXFU. gives gust between current and previous output time step (FREQ_RESET_GUST)
    SURFINSPLUIErain#hag18101650inskg/m2Instantaneous rain
    SURFINSNEIGEsnow#hag18401530inskg/m2Instantaneous snow
    SURFINSGRAUPELgrpl#hag20101750inskg/m2Instantaneous graupel
    CLSMINI.HUMI.RELrmn2m#hag2410112min0-1Minimum relative moisture at 2m over 3h
    CLSMAXI.HUMI.RELrmx2m#hag2420112max0-1Maximum relative moisture at 2m over 3h
    CLSRAFALES.POSfgwsgsmaxhag228022210maxm/sGust wind speed

    2D Surface, accumulated near-surface and soil variables

    Note that all these are coded with stepType=accum

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    S065RAYT SOL CLcssw#hy130041165accJ/m2SW net clear sky rad
    S065RAYT THER CLcslw#hy13105665accJ/m2LW net clear sky rad
    SURFACCGRAUPELgrplprgrplhag20101750acckg/m2Accumulated graupel
    SURFACCNEIGEsnowprsnhag18401530acckg/m2Accumulated snowfall
    SURFACCPLUIErainprrainhag18101650acckg/m2Accumulated rain
    SURFDIR NORM IRRdneridshag1403630accJ/m2Direct normal exposure
    SURFGBL NORM IRRgnerigshag19404130accJ/m2Global normal exposure
    SURFFLU.CHA.SENSshfhfsshag12200110accJ/m2Sensible heat flux
    SURFFLU.LAT.MEVAlhehfls_evahag132011930accJ/m2Latent heat flux through evaporation
    SURFFLU.LAT.MSUBlhsubhfls_sblhag244012020accJ/kgLatent Heat Sublimation
    SURFFLU.MEVAP.EAwevapevspsblhag2450160acckg/m2Water evaporation
    SURFFLU.MSUBL.NEsnsubsbl_snowhag24601620acckg/m2Snow sublimation
    SURFFLU.RAY.SOLAnswrsrsnshag1110490accJ/m2Net shortwave radiation flux (surface)
    SURFFLU.RAY.THERnlwrsrlnshag1120550accJ/m2Net longwave radiation flux (surface)
    SURFRAYT DIR SURswavrrsdsdirhag1160470accJ/m2Shortwave radiation flux
    SURFRAYT SOLA DEgradrsdshag1170430accJ/m2Global radiation flux
    SURFRAYT THER DElwavrrldshag1150540accJ/m2Longwave radiation flux
    SURFTENS.TURB.MEvflxtauvhag125021990accN/m2Momentum flux, v-component
    SURFTENS.TURB.ZOuflxtauuhag124021980accN/m2Momentum flux, u-component

    2D TOA, diagnostic and accumulated variables, levelType=nominalTop

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SOMMFLU.RAY.SOLAnswrtrsntnt1130490accJ/m2Net shortwave radiation flux(atmosph.top)
    SOMMFLU.RAY.THERnlwrtrlntnt1140550accJ/m2Net longwave radiation flux(atmosph.top)
    SOMMRAYT.SOLAIREnswrt#nt1130490insW/m2Net shortwave radiation flux(atmosph.top)
    SOMMRAYT.TERRESTnlwrt#nt1140550insW/m2Net longwave radiation flux(atmosph.top)
    SOMMRAYT SOL CLcsswrsntcsnt13004110accJ/m2TOA Net shortwave clear sky radiation(atmosph.top)
    SOMMRAYT THER CLcslwrlntcsnt1310560accJ/m2TOA Net longwave clear sky radiation(atmosph.top)
    TOPRAYT DIR SOMswavrrsdtnt1160470accJ/m2TOA Accumulated SW down radiation Parameter identifier was 117
    SOMMTBOZCLEARbtozcs#nt170-1-1-10-KBrightness temperature OZ clear
    SOMMTBOZCLOUDbtozcl#nt171-1-1-10-KBrightness temperature OZ cloud
    SOMMTBIRCLEARbtircs#nt172-1-1-10-KBrightness temperature IR clear
    SOMMTBIRCLOUDbtircl#nt173-1-1-10-KBrightness temperature IR cloud
    SOMMTBWVCLEARbtwvcs#nt174-1-1-10-KBrightness temperature WV clear
    SOMMTBWVCLOUDbtwvcl#nt175-1-1-10-KBrightness temperature WV cloud

    2D Surface, Postprocessed variables (fullpos)

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SURFCAPE.POS.F00capecapehag1600760insJ/kgConvective available potential energy (CAPE)
    SURFCIEN.POS.F00cincinhag1650770insJ/kgConvective inhibition (CIN)
    SURFLIFTCONDLEVlcl#ac1670360insmLifting condensation level (LCL)
    SURFFREECONVLEVlfc#lfc1680360insmLevel of free convection (LFC)
    SURFEQUILIBRLEVlnb#lnb1690360insmLevel of neutral buoyancy (LNB)

    2D Surface, constant near-surface and soil variables

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SPECSURFGEOPOTENzphis_shag60340insm2/s2Geopotential relative to mean sea level. "... contains a GRID POINT orography which is the interpolation of the departure orography"
    SURFIND.TERREMERlsmlsmhag812000ins0-1Land-sea mask
    SURFAEROS.SEAaers#hag2510131920inskg/kgSurface aerosol sea (Marine aerosols, locally defined GRIB)
    SURFAEROS.LANDaerl#hag2520131930inskg/kgSurface aerosol land (Continental aerosols, locally defined GRIB)
    SURFAEROS.SOOTaerc#hag2530131940inskg/kgSurface carbon aerosol (Carbone aerosols, locally defined GRIB)
    SURFAEROS.DESERTaerd#hag2540131950inskg/kgSurface aerosol desert (Desert aerosols, locally defined GRIB)
    SURFAEROS.VOLCAN##hag197-1-1-1-1Surface aerosol volcan (Stratospheric ash, to be locally defined GRIB)
    SURFAEROS.SULFAT##hag198-1-1-1-1Surface aerosol sulfate (Stratospheric sulfate, to be locally defined GRIB)
    SURFA.OF.OZONEao#hag2480141920inskg/kgA Ozone, First ozone profile (A), locally defined GRIB
    SURFB.OF.OZONEbo#hag2490141930inskg/kgB Ozone, Second ozone profile (B), locally defined GRIB
    SURFC.OF.OZONEco#hag2500141940inskg/kgC Ozone, Third ozone profile (C), locally defined GRIB
    PROFTEMPERATUREslt#dbl8523180insKSoil Temperature
    PROFRESERV.EAUsm#dbl8623200inskg/m2Deep Soil Wetness
    PROFPROP.RMAX.EAswv#dbl23823250inskg/m2Climate relaxed deep soil wetness
    PROFRESERV.GLACEwsoice#dbl19323220inskg/m2Deep soil ice

    2D variables on special surfaces

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    KT273ISOT_ALTITh#isot803627315insmAltitude of 0-degree isotherm
    KT263ISOT_ALTITh#isot803626315insmAltitude of -10-degree isotherm
    SURFISOTPW0.MALTh#isot0wb80360insmAltitude of iso-tprimw=0
    SURFTOT.WAT.VAPOwvintprwea5401640inskg/m2Total column integral water vapour
    WFPOWERINSwfpower_inswfpower_insea21102390insMWWind power production, instantaneous (LWINDFARM=.TRUE. in NAMPHY)
    WFPOWERACCwfpower_accwfpower_accea21102390accMJWind power production, accumulated (LWINDFARM=.TRUE. in NAMPHY)

    Postprocessed variables on different surface types

    Through the postprocessing sofware fullpos HARMONIE offers a number of variables postprocessed on different surface types. For the current choice of variables, surfaces and levels please see scr/Select_postp.pl.

    State variables and diagnostics on pressure levels, leveltype=isobaricInhPa

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    PNNNNNWIND.U.PHYuuapl33022NNNNNinsm/su-component of wind
    PNNNNNWIND.V.PHYvvapl34023NNNNNinsm/sv-component of wind
    PNNNNNTEMPERATURttapl11000NNNNNinsKTemperature
    PNNNNNHUMI.SPECIqhuspl51010NNNNNinskg/kgSpecific humidity
    PNNNNNLIQUID_WATcwat_condclwpl760183NNNNNinskg/kgSpecific cloud liquid water content
    PNNNNNSOLID_WATEciwc_condclipl580184NNNNNinskg/kgSpecific cloud ice water content
    PNNNNNCLOUD_FRACtcc#pl7106192NNNNNins0-1Total cloud cover
    PNNNNNSNOWsnow_cond#pl1840186NNNNNinskg/kgSpecific snow water content
    PNNNNNRAINrain_cond#pl1810185NNNNNinskg/kgSpecific rain water content
    PNNNNNGRAUPELgrpl_cond#pl2010132NNNNNinskg/kgSpecific graupel
    PNNNNNGEOPOTENTIzphipl6034NNNNNinsm2/s2Geopotential
    PNNNNNHUMI_RELATrhurpl5201192NNNNNins0-1Relative humidity
    PNNNNNTHETA_PRIMpaptthetaEpl14003NNNNNinsKPseudo-adiabatic potential temperature
    PNNNNNTHETA_VIRTvptmp#pl1760015NNNNNinsKVirtual potential temperature
    PNNNNNVERT.VELOCwwapl40029NNNNNinsm/sGeometrical vertical velocity
    PNNNNNPOT_VORTICpvpvpl40214NNNNNinsK m2/kg/sPotential vorticity
    PNNNNNABS_VORTICabsv#pl410210NNNNNinss-1Absolute vorticity
    PNNNNNDIVERGENCEd#pl440213NNNNNinss-1Relative divergence
    • NNNNN is in Pascals.
    • From FullPos documentation: "Warning: fields on pressure levels bigger or equal to 1000 hPa are written out with truncated names; for example, temperature at 1000 hPa is P00000TEMPERATURE while P00500TEMPERATURE could be as well the temperature at 5 hPa or the temperature at 1005 hPa!"

    State variables and diagnostics on height levels, levelType=heightAboveGround

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    HNNNNNWIND.U.PHYuuahag33022NNNNNinsm/su-component of wind
    HNNNNNWIND.V.PHYvvahag34023NNNNNinsm/sv-component of wind
    HNNNNNTEMPERATURttahag11000NNNNNinsKTemperature
    HNNNNNLIQUID_WATcwat_condclwhag760183NNNNNinskg/kgSpecific cloud liquid water content
    HNNNNNSOLID_WATEciwc_condclihag580184NNNNNinskg/kgSpecific cloud ice water content
    HNNNNNCLOUD_FRACtccclthag7106192NNNNNins0-1Total cloud cover
    HNNNNNSNOWsnow_cond#hag1840186NNNNNinskg/kgSpecific snow water content
    HNNNNNRAINrain_cond#hag1810185NNNNNinskg/kgSpecific rain water content
    HNNNNNGRAUPELgrpl_cond#hag2010132NNNNNinskg/kgSpecific graupel
    HNNNNNHUMI_RELATrhurhag5201192NNNNNins0-1Relative humidity
    HNNNNNPRESSUREpresphag1030NNNNNinsPaPressure
    • NNNNN is in meters.

    State variables and diagnostics on PV levels, GRIB1 level type 117, levelType=potentialVorticity

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    VNNNGEOPOTENTIELz#pv6034NNNinsm2/s2Geopotential
    VNNNTEMPERATUREt#pv11000NNNinsKTemperature
    VNNNPRESSUREpres#pv1030NNNinsPaPressure
    VNNNHUMI_RELATIVr#pv5201192NNNins0-1Relative humidity
    VNNNHUMI.SPECIFIq#pv51010NNNinskg/kgSpecific humidity
    VNNNWIND.U.PHYSu#pv33022NNNinsm/su-component of wind
    VNNNWIND.V.PHYSv#pv34023NNNinsm/sv-component of wind
    VNNNVITESSE_VERTomega#pv39028NNNinsPa/sPressure vertical velocity (DYNAMICS=h)
    VNNNVERT.VELOCITw#pv40029NNNinsm/sGeometrical vertical velocity (DYNAMICS=nh)
    VNNNTEMPE_POTENTpt#pv13002NNNinsKPotential temperature
    VNNNABS_VORTICITabsv#pv410210NNNinss-1Absolute vorticity
    VNNNDIVERGENCEd#pv440213NNNinss-1Relative divergence
    VNNNTHETAPRIMWpapt#pv14003NNNinsKPseudo-adiabatic potential temperature
    • "pv" stream is not available by default
    • NNN is in deci-PVU (1PVU = 1x10-6 K m2 kg-1 s-1) in FA files. PV levels must be in SI units in namelists
    • GRIB1 levels are in milli-PVU. Currently gl does not convert devi-PVU (FA) to milli-PVU (GRIB1)

    State variables and diagnostics on Theta levels, GRIB1 level type 113, levelType=theta

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    TNNNGEOPOTENTIELz#th6034NNNinsm2/s2Geopotential
    TNNNTEMPERATUREt#th11000NNNinsKTemperature
    TNNNPRESSUREpres#th1030NNNinsPaPressure
    TNNNHUMI_RELATIVr#th5201192NNNins0-1Relative humidity
    TNNNHUMI.SPECIFIq#th51010NNNinskg/kgSpecific humidity
    TNNNWIND.U.PHYSu#th33022NNNinsm/su-component of wind
    TNNNWIND.V.PHYSv#th34023NNNinsm/sv-component of wind
    TNNNVITESSE_VERTomega#th39028NNNinsPa/sPressure vertical velocity (DYNAMICS=h)
    TNNNVERT.VELOCITw#th40029NNNinsm/sGeometrical vertical velocity (DYNAMICS=nh)
    TNNNABS_VORTICITabsv#th410210NNNinss-1Absolute vorticity
    TNNNPOT_VORTICITpv#th40214NNNinsK m2/kg/sPotential vorticity
    TNNNDIVERGENCEd#th440213NNNinss-1Relative divergence
    • "th" stream is not available by default
    • NNN is in Kelvin.

    FA fields without any default GRIB1 translation

    Some very special fields are left without any default translation. Please see in the gl documentation on how to add you own translation.

    FA nameUnitComment
    CUF1PRESSURECoupling error field.
    THETAPWP_FLUXK m-4 s-1Instantaneous thetaprimwprim surface flux
    CLPMOCON.MOD.XFUkg kg-1 s-1MOCON model output
    ATMONEBUL.TOTALEAccumulated Total cloud cover.
    ATMONEBUL.CONVECAccumulated Convective cloud cover.
    ATMONEBUL.BASSEAccumulated Low cloud cover.
    ATMONEBUL.MOYENNAccumulated Medium cloud cover.
    ATMONEBUL.HAUTEAccumulated High cloud cover.
    SURFCFU.Q.TURBULAccumulated contribution of Turbulence to Q.
    SURFCFU.CT.TURBULAccumulated contribution of Turbulence to CpT
    SUNSHI. DURATIONSunshine duration.
    SURFFL.U TURBULContribution of Turbulence to U.
    SURFFL.V TURBULContribution of Turbulence to V.
    SURFFL.Q TURBULContribution of Turbulence to Q.
    SURFFL.CT TURBULContribution of Turbulence to CpT
    SNNNSRCSecond order flux.

    Variables postprocessed by gl

    The following fields are can be generated by gl from a history file and are thus not necessarily available as FA fields in Harmonie's FA output. When calculating these post-processed fields, make sure the required fields to derive them are in the input files! For details, check util/gl/grb/postprocess.f90 & the routines called therein.

    Single level fields

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    MSLPRESSUREprespslhas10300insPaMSLP. gl calculates MSLP independent of AROME/!FullPos
    #tdtdhag170062insKDew point temperature
    #vis#hag2001900insmVisibility
    #wdir#ttt31020lllinsDeg. trueWind direction. gl calculates based on u[33,ttt,lll] and v[34,ttt,lll] wind components
    #ws#ttt32021lllinsm/sWind speed. gl calculates based on u[33,ttt,lll] and v[34,ttt,lll] wind components
    TOT.WATER.PRECIPtpprhag610180acckg/m2Total precipitation, gl calculates TP![61,105,0]=rain![181,105,0]+snow![184,105,0]+graupel![201,105,0]+hail![204,105,0]
    TOT.SOLID.PRECIPtpsolidprsolidhag185012000acckg/m2Total solid precipitation, gl calculates ![185,105,0]=snow![184,105,0]+graupel![201,105,0]+hail![204,105,0]
    #mldzmlahag6701930insmMixed layer depth/boundary layer height
    #tcc#hag71061922ins0-1Fog, cloud fraction of lowest model level
    #icei#hag1350ins-Icing index
    #atmiceg#hy??01205insm/sIcing index, Atmospheric ice growth rate
    #icei2#hag/?134011940ins-Icing index version 2
    #psct#hag/ct?1360400insKPseudo satellite image, cloud top temperature (infrared)
    #pstb#hag137041980insKPseudo satellite image, water vapour brightness temperature
    #pstbc#hag138041990insKPseudo satellite image, water vapour br. temp. + correction for clouds
    #pscw#hag139042000ins-Pseudo satellite image, cloud water reflectivity (visible)
    #prtp#hag14401190inscodePrecipitation type, 0:drizzle, 1:rain, 2:sleet, 3:snow, 4:freezing drizzle, 5:freezing rain, 6:graupel, 7:hail
    #fg#ttt2280222lllmaxm/sGust wind speed, calculated from ugst & vgst on corresponding level & levelType
    #hti#hag1480171930ins-Helicopter Triggered lightning Index
    #transmit#hag149061990ins-Transmittance
    #cat#hag145019220ins-|%CAT (clear air turbulence) index
    #bvf#hag1590192020inss-1Brunt Vaisala frequency

    Integrated quantities

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    TOT.NEB.ICEciwc_vintcliviea5801700inskg/m2Vertical integral of cloud ice
    TOT.NEB.WATcwat_vintclqviea7601690inskg/m2Vertical integral of cloud liquid water
    #msca#ea133061970ins0-1Mask of significant cloud amount
    #cape#hag1600760insJ/kgConvective Available Potential Energy, comes in two flavours, cape_version=1|2, where the second is compatible with the ECMWF version
    #cin#hag1650770insJ/kgConvective inhibition, , comes in two flavours, cape_version=1|2, where the second is compatible with the ECMWF version
    #rain_vintclrviea18101450inskg/m2Vertical integral of rain
    #snow_vintclsviea18401460inskg/m2Vertical integral of snow
    #grpl_vintclgviea20101740inskg/m2Vertical integral of garupel
    #cb#ea18606110insmCloud base
    #ct#ea18706120insmCloud top
    #cb38#hag?189061983insmCloud base >3/8
    #lgt#ea2090171920insflash/hLightning intensity
    #lmxws#ea/mw?1420360insmLevel of max wind speed
    #maxucol#ea1640220insm/sMax u-component in column
    #maxvcol#ea1770230insm/sMax v-component in column
    #lmxice#ea143011990insmIcing index, Level of max icing
    #mxicegr#ea141012040ins1Icing index, Max icing growth index (0-4)
    #blice#ea14601960insmIcing index, Bottom level of icing
    #tlice#ea14701950insmIcing index, Top level of icing
    #cat_maxlev#ea1500192010insmLevel of max CAT index
    #cat_max#ea1510191970ins-Max CAT index
    #cat_b#ea1520191980insmBottom level of CAT
    #cat_t#ea1530191990insmTop level of CAT

    GRIB encoding information

    Time units, WMO code table 4

    The following time units are used to encode GRIB edition 1 data

    CodeUnit
    0Minute
    1Hour
    1315 minutes
    1430 minutes

    Time range indicator, WMO code TABLE 5

    CodeabbrDefinition
    0insForecast product valid for reference time + P1 (P1 > 0), or Uninitialized analysis product for reference time (P1 = 0)
    2min/maxProduct with a valid time ranging between reference time + P1 and reference time + P2. Used for min/max values
    3avgAverage (reference time + P1 to reference time + P2)
    4accAccumulation (reference time + P1 to reference time + P2) product considered valid at reference time + P2

    Note that fields available as both instanteous and accumulated values like e.g. rain has the same parameter values and can only be distinguished by the time range indicator.

    Level types, WMO Code table 3

    level typenameabbrWMO/HIRLAM type definitionUnitsnotes
    001surfacesfcGround or water surfaceWMO
    002cloudBasecbCloud base levelWMO
    003cloudTopctLevel of cloud topsWMO
    004isothermZeroisot0Level of 0°C isothermWMO
    005adiabaticCondensationacLevel of Adiabatic Condensation Lifted from the SurfaceWMO
    006maxWindmwMaximum wind levelWMO
    007tropopausetpTropopauseWMO
    008nominalTopntTop-of-atmosphereWMO
    020isothermalisotIsothermal levelTemperature in 1/100 KWMO
    100isobaricInhPaplIsobaric levelhPaWMO
    102meanSeamsAt mean sea level
    103heightAboveSeahasSpecified altitude above mean sea levelAltitude in mWMO
    105heightAboveGroundhagSpecified height above groundAltitude in mWMO
    107sigmaSigma levelSigma value in 1/10000WMO
    109hybridhyHybrid levelWMO
    112depthBelowLandLayerdbl
    113thetathIsentropic (theta) levelPotential temperature in KWMO
    117potentialVorticitypvPotential vorticity surface10-9 K m2 kg-1 s-1WMO
    192isothermZeroWetBulbisot0wb
    200entireAtmosphereeaEntire atmosphere (considered as a single layer)WMO, vertically integrated
    levelFreeConvectionlfcas heightAboveGround in GRIB1
    levelNeutralBuoyancylnbas heightAboveGround in GRIB1

    Harmonie GRIB1 code table 2 version 253 - Indicator of parameter

    Below the indicator of parameter code table for the Harmonie model. It is based on the WMO code table 2 version 3 with local parameters added. Parameter indicators 128-254 are reserved for originating center use. Parameter indicators 000-127 should not be altered. In HARMONIE, radiation fluxes are assumed positive downwards (against the recommendation by WMO).

    ParDescriptionSI Units
    000Reservedn/a
    001PressurePa
    002Pressure reduced to MSLPa
    003Pressure tendencyPa s-1
    004Potential vorticityK m2 kg-1 s-1
    005ICAO Standard Atmosphere reference heightm
    006Geopotentialm2 s-2
    007Geopotential heightgpm
    008Geometrical heightm
    009Standard deviation of heightm
    010Total ozoneDobson
    011TemperatureK
    012Virtual temperatureK
    013Potential temperatureK
    014Pseudo-adiabatic potential temperatureK
    015Maximum temperatureK
    016Minimum temperatureK
    017Dew-point temperatureK
    018Dew-point depression (or deficit)K
    019Lapse rateK m-1
    020Visibilitym
    021Radar spectra (1)-
    022Radar spectra (2)-
    023Radar spectra (3)-
    024Parcel lifted index (to 500 hPa)K
    025Temperature anomalyK
    026Pressure anomalyPa
    027Geopotential height anomalygpm
    028Wave spectra (1)-
    029Wave spectra (2)-
    030Wave spectra (3)-
    031Wind directionDegree true
    032Wind speedm s-1
    033u-component of windm s-1
    034v-component of windm s-1
    035Stream functionm2 s-1
    036Velocity potentialm2 s-1
    037Montgomery stream functionm2 s-1
    038Sigma coordinate vertical velocitys-1
    039Vertical velocityPa s-1
    040Vertical velocitym s-1
    041Absolute vorticitys-1
    042Absolute divergences-1
    043Relative vorticitys-1
    044Relative divergences-1
    045Vertical u-component shears-1
    046Vertical v-component shears-1
    047Direction of currentDegree true
    048Speed of currentm s-1
    049u-component of currentm s-1
    050v-component of currentm s-1
    051Specific humiditykg kg-1
    052Relative humidity%
    053Humidity mixing ratiokg kg-1
    054Precipitable waterkg m-2
    055Vapor pressurePa
    056Saturation deficitPa
    057Evaporationkg m-2
    058Cloud icekg m-2
    059Precipitation ratekg m-2 s-1
    060Thunderstorm probability%
    061Total precipitationkg m-2
    062Large scale precipitationkg m-2
    063Convective precipitationkg m-2
    064Snowfall rate water equivalentkg m-2 s-1
    065Water equivalent of accumulated snow depthkg m-2
    066Snow depthm
    067Mixed layer depthm
    068Transient thermocline depthm
    069Main thermocline depthm
    070Main thermocline anomalym
    071Total cloud cover%
    072Convective cloud cover%
    073Low cloud cover%
    074Medium cloud cover%
    075High cloud cover%
    076Cloud waterkg m-2
    077Best lifted index (to 500 hPa)K
    078Convective snowkg m-2
    079Large scale snowkg m-2
    080Water temperatureK
    081Land cover (1 = land, 0 = sea)Proportion
    082Deviation of sea level from meanm
    083Surface roughnessm
    084Albedo%
    085Soil temperatureK
    086Soil moisture contentkg m-2
    087Vegetation%
    088Salinitykg kg-1
    089Densitykg m-3
    090Water run-offkg m-2
    091Ice cover (1 = ice, 0 = no ice)Proportion
    092Ice thicknessm
    093Direction of ice driftDegree true
    094Speed of ice driftm s-1
    095u-component of ice driftm s-1
    096v-component of ice driftm s-1
    097Ice growth ratem s-1
    098Ice divergences-1
    099Snow meltkg m-2
    100Significant height of combined wind waves and swellm
    101Direction of wind wavesDegree true
    102Significant height of wind wavesm
    103Mean period of wind wavess
    104Direction of swell wavesDegree true
    105Significant height of swell wavesm
    106Mean period of swell wavess
    107Primary wave directionDegree true
    108Primary wave mean periods
    109Secondary wave directionDegree true
    110Secondary wave mean periods
    111Net short-wave radiation flux (surface)W m-2
    112Net long-wave radiation flux (surface)W m-2
    113Net short-wave radiation flux (top of atmosphere)W m-2
    114Net long-wave radiation flux (top of atmosphere)W m-2
    115Long-wave radiation fluxW m-2
    116Short-wave radiation fluxW m-2
    117Global radiation fluxW m-2
    118Brightness temperatureK
    119Radiance (with respect to wave number)W m-1 sr-1
    120Radiance (with respect to wave length)W m-3 sr-1
    121Latent heat fluxW m-2
    122Sensible heat fluxW m-2
    123Boundary layer dissipationW m-2
    124Momentum flux, u-componentN m-2
    125Momentum flux, v-componentN m-2
    126Wind mixing energyJ
    127Image data-
    128Analysed RMS of PHI (CANARI)m2 s-2
    129Forecasted RMS of PHI (CANARI)m2 s-2
    130SW net clear sky radW m-2
    131LW net clear sky radW m-2
    132Latent heat flux through evaporationW m-2
    133Mask of significant cloud amount0-1
    134Icing index version 2-
    135Icing indexCode table
    136Pseudo satellite image, cloud top temperature (infrared)K
    137Pseudo satellite image, water vapour brightness temperatureK
    138Pseudo satellite image, water vapour br. temp. + correction for cloudsK
    139Pseudo satellite image, cloud water reflectivity (visible)?
    140Direct normal irradianceJ m-2
    141Max icing growth index-
    142Level of max wind speedm
    143Level of max icingm
    144Precipition TypeCode table
    145CAT index- / %
    146Bottom level of icingm
    147Top level of icingm
    148Helicopter Triggered ligthning Index-
    149Transmittance-
    150Level of max CAT indexm
    151Max CAT index-
    152Bottom level of CATm
    153Top level of CATm
    154Max Wind speedm s-1
    155Available#
    156Available#
    157Available#
    158Surface downward moon radiationW m-2
    159ABrunt Vaisala frequencys-1
    160CAPEJ kg-1
    161AROME hail diagnostic%
    162U-momentum of gusts out of the modelm s-1
    163V-momentum of gusts out of the modelm s-1
    164Max u-component in columnm s-1
    165Convective inhibition (CIN)J kg-1
    166MOCON out of the modelkg/kg s-1
    167Lifting condensation level (LCL)m
    168Level of free convection (LFC)m
    169Level of neutral boyancy (LNB)m
    170Brightness temperature OZ clearK
    171Brightness temperature OZ cloudK
    172Brightness temperature IR clearK
    173Brightness temperature IR cloudK
    174Brightness temperature WV clearK
    175Brightness temperature WV cloudK
    176Virtual potential temperatureK
    177Max v-component in columnm s-1
    178Available#
    179Available#
    180Available#
    181Rainkg m-2
    182Stratiform Rainkg m-2
    183Convective Rainkg m-2
    184Snowkg m-2
    185Total solid precipitationkg m-2
    186Cloud basem
    187Cloud topm
    188Fraction of urban landProportion
    189Cloud base >3/8m
    190Snow AlbedoProportion
    191Snow densitykg/m3
    192Water on canopykg/m2
    193Soil icekg/m2
    194Global normal exposureJ/m2
    195Gravity wave stress U-compN/m2
    196Gravity wave stress V-compN/m2
    197Available#
    198Available#
    199Vegetation type-
    200TKEm2 s-2
    201Graupelkg m-2
    202Stratiform Graupelkg m-2
    203Convective Graupelkg m-2
    204Hailkg m-2
    205Stratiform Hailkg m-2
    206Convective Hailkg m-2
    207Available#
    208Available#
    209Lightningflash h-1
    210Simulated reflectivitydBz
    211Wind power productionMW or MJ
    212Pressure departurePa
    213Vertical divergences-1
    214UD_OMEGAms-1?
    215DD_OMEGAms-1?
    216UDMESHFRAC-
    217DDMESHFRAC-
    218PSHICONVCL-
    219Surface albedo for non snow covered areasProportion
    220Standard deviation of orography * gm2 s-2
    221Anisotropy coeff of topography-
    222Direction of main axis of topographyrad
    223Roughness length of bare surface * gm2 s-2
    224Roughness length for vegetation * gm2 s-2
    225Fraction of clay within soilProportion
    226Fraction of sand within soilProportion
    227Maximum proportion of vegetationProportion
    228Gust wind speedm s-1
    229Albedo of bare groundProportion
    230Albedo of vegetationProportion
    231Stomatal minimum resistances/m
    232Leaf area indexm2/m2
    233Thetaprimwprim surface fluxKm/s
    234Dominant vegetation index-
    235Surface emissivity-
    236Maximum soil depthm
    237Soil depthm
    238Soil wetnesskg/m2
    239Thermal roughness length * gm2 s-2
    240Resistance to evapotransirations/m
    241Minimum relative moisture at 2 meters%
    242Maximum relative moisture at 2 meters%
    243Duration of total precipitationss
    244Latent Heat SublimationW/m2
    245Water evaporationkg/m2
    246Snow sublimationkg/m2
    247Snow history???
    248A OZONEkg kg-1
    249B OZONEkg kg-1
    250C OZONEkg kg-1
    251Surface aerosol seakg kg-1
    252Surface aerosol landkg kg-1
    253Surface aerosol sootkg kg-1
    254Surface aerosol desertkg kg-1
    255Missing valuen/a

    SURFEX output Harmonie GRIB1 code table 2 version 001

    Levels are used in the conversion of SURFEX output to GRIB to indicate tile/patch/type/level:

    leveldescription
    300Extra yet unknown SURFEX variables
    301Fraction of each vegetation types on PATCH 1
    302Fraction of each vegetation types on PATCH 2
    303Fraction of each vegetation types cy43 (ECOCLIMAP-SG)
    600Physiography fields?
    720Sea ice
    730Sea ice (TICE_LL)
    755Precip
    760Sea
    770in addition to FLake (or instead of it)
    780Flake
    790Patch (*_P fields)
    800ISBA
    810Gridpoint average
    820Surface boundary multi layer fields
    830ISBA - patch 1 (X001*, open land)
    840ISBA - patch 2 (X002*, forest)
    950Town energy balance model (TEB)

    A small selection of fields available in the SURFEX output files is shown below.

    FA nameshortNameNCnamelvTiOPlevsTunitsdescription
    FRAC_SEA#sftofhag32300ins0-1Fraction of sea
    FRAC_WATER#sftlafhag33300ins0-1Fraction of water
    FRAC_NATURE#sftnfhag34300ins0-1Fraction of nature
    FRAC_TOWN#sfturfhag35300ins0-1Fraction of town
    COVER001#lsm10insLAND SEA MASK
    COVER002-COVER243##002-2430insECOCLIMAP I cover types
    COVER255##2550insECOCLIMAP I MY_COVER type
    COVER301-COVER573##001-254 & 001-0190insECOCLIMAP II cover types
    ZS#oroghag80insmOro hgt.
    SST#tosms110insKSST
    SIC#siconcams910ins0-1SIC
    T2M_SEA#tas_seahag11760insKT2m sea
    Q2M_SEA#huss_seahag51760inskg kg-1Q2m sea
    MER10M_SEA#vas_seahag34760insm s-1V10m sea
    ZON10M_SEA#uas_seahag33760insm s-1U10m sea
    T2M_WAT#tas_waterhag11772insKT2m water
    Q2M_WAT#huss_waterhag51770inskg kg-1Q2m water
    MER10M_WAT#vas_waterhag34770insm s-1V10m water
    ZON10M_WAT#uas_waterhag33770insm s-1U10m water
    DSNTISBA#sndhag660insmSnow depth
    WSNTISBA#snwhag130inskg m-2Total snow reservoir
    T2M_ISBA#tas_naturehag11802insKT2m isba
    Q2M_ISBA#huss_naturehag51802inskg kg-1Q2m isba
    X001T2M_P#tashag11832insKT2m of patch 1
    X002T2M_P#tashag11842insKT2m of patch 2
    T2M_TEB#tas_townhag11950insKT2m town
    T2MMAX_TEB#tasmax_townhag15950maxKMax Temp for town
    T2MMIN_TEB#tasmin_townhag16950minKMin Temp for town
    TGL#tg_LLLhag11800+insKTemperature of soil layer L(isba)
    WGL#wsa_LLLhag86800+insm3 m-3Liquid volumetric water content of soil layer L
    WGIL#isa_LLLhag193800+insm3 m-3Frozen volumetric water content of soil layer L
    WR#wrhag12800inskg m-2Liquid water retained by foliage (isba)
    DGL#dsoil_LLLhag23300insmSoil depth of soil layer L

    Harmonie GRIB1 code table 2 version 210

    Used for aerosol fields

    GRIB

    NetCDF

    +

    Parameter list and GRIB definitions

    HARMONIE system output

    The HARMONIE system writes its primary output, in FA format, to the upper air history files ICMSHHARM+llll and the SURFEX history files ICMSHHARM+llll.sfx, where HARM is the four-character experiment identifier set in the configuration file config_exp.h, and llll is normally the current timestep in hours. The files are designed to be complete snapshots of respective model state described by the system for a particular time point. In addition more model output including post-processing/diagnostic fields can be written out during the forecast model integration, such as those model diagnostics or pressure level diagnostics, also in FA format, as PFHARMDOMAIN+llll. The FA files can be considered to be internal format files. All of them can be converted to GRIB files during the run for external usage. The name convention is as follows:

    GRIB1 table 2 version in HARMONIE

    To avoid conflicts with archived HIRLAM data HARMONIE uses version 253 of table 2. The table is based on the standard WMO version 3 of table 2 and postion 000-127 is kept the same as in the WMO. Note that accumulated and instantaneous versions of the same parameter differ only by the time range indicator. It is thus not sufficient to specify parameter, type and level when you refer to an accumulated parameter, but the time range indicator has to be included as well.

    The translation of SURFEX files to GRIB1 is still incomplete and contains several WMO violations. This is not changed in the current release but will revised later. However, the upper air history file also includes the most common surface parameters and should be sufficient for most users.

    The current table 2 version 253 definition files for gribapi can be found in `util/glgrib_api/definitions/`. These local definition files assume centre=233 (Dublin) and should be copied to your own GRIB-API installation. You are strongly recommended to set your own code for generating centre fore operational usage of the data.

    GRIB2 in HARMONIE

    The possibility to convert to GRIB2 has been introduced in release-43h2. So far the conversion is restricted to atmospheric history and fullpos files only. To get the output in GRIB2 set ARCHIVE_FORMAT=GRIB2 in ecf/config_exp.h. Please notice that if ARCHIVE_FORMAT=GRIB2 is selected, SURFEX files will be converted to GRIB1 anyway (for the time being). To convert from GRIB1 with GRIB2 using grib_filter we have to tell EcCodes how to translate the parameters. This is done by using the internal HARMONIE tables and setting

    export ECCODES_DEFINITION_PATH=$SOME_PATH_TO_GL/gl/definitions:$SOME_PATH_TO_ECCODES/share/eccodes/definitions

    Note that there are a few parameters that are not translated to GRIB2 to and those has to be excluded explicitly.

    List of parameters

    header abbreviations in the tables:

    abbr.descriptionsee table
    lvTlevelTypelevel types
    iOPindicatorOfParameterindicator of parameter
    ddiscipline
    pCparameterCategory
    pNparameterNumber
    levlevel
    sTstepTypetime range indicator

    3D model state variables on model levels (1-NLEV), levelType=hybrid

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SNNNHUMI.SPECIFIqhushy510101inskg/kgSpecific humidity
    SNNNLIQUID_WATERcwat_condclwhy7601831inskg/kgSpecific cloud liquid water content
    SNNNSOLID_WATERciwc_condclihy5801841inskg/kgSpecific cloud ice water content
    SNNNSNOWsnow_cond#hy18401861inskg/kgSpecific snow water content
    SNNNRAINrain_cond#hy18101851inskg/kgSpecific rain water content
    SNNNGRAUPELgrpl_cond#hy20101321inskg/kgSpecific graupel
    SNNNTKEtketkehy200019111insJ/kgTurbulent Kinetic Energy
    SNNNCLOUD_FRACTItccclthy71061921ins0-1Total cloud cover
    SNNNPRESS.DEPARTpdep#hy2120381insPaPressure departure
    SNNNTEMPERATUREttahy110001insKTemperature
    SNNNVERTIC.DIVERvdiv#hy213021921inss-1Vertical Divergence
    SNNNWIND.U.PHYSuuahy330221insm/su-component of wind
    SNNNWIND.V.PHYSvvahy340231insm/sv-component of wind

    2D Surface, prognostic/diagnostic near-surface and soil variables, levelType=heightAboveGround

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SURFPRESSIONprespshag10300insPaSurface pressure
    SURFTEMPERATUREtts_radhag110000insKSurface temperature
    CLSTEMPERATUREttashag110002insKTemperature at 2m
    CLSMAXI.TEMPERATtmaxtasmaxhag150002maxKMaximum temperature (FREQ_RESET_TEMP)
    CLSMINI.TEMPERATtmintasminhag160002minKMinimum temperature (FREQ_RESET_TEMP)
    CLSVENT.ZONALuuashag3302210insm/su-component of wind at 10m, relative to model coordinates
    CLSVENT.MERIDIENvvashag3402310insm/sv-component of wind at 10m, relative to model coordinates
    CLSHUMI.SPECIFIQqhusshag510102inskg/kgSpecific humidity at 2m
    CLSHUMI.RELATIVErhurshag52011922ins0-1Relative humidity at 2m
    SURFRESERV.NEIGEsdwesnwhag6501600inskg/m2Snow depth water equivalent
    CLPMHAUT.MOD.XFUmldzmlahag6701930insmHeight (in meters) of the PBL out of the model
    SURFNEBUL.TOTALEtccclt_inshag71061920ins0-1Total cloud cover
    SURFNEBUL.CONVECcccclc_inshag72061930ins0-1Convective cloud cover
    SURFNEBUL.BASSElcccll_inshag73061940ins0-1Low cloud cover
    SURFNEBUL.MOYENNmccclm_inshag74061950ins0-1Medium cloud cover
    SURFNEBUL.HAUTEhccclh_inshag75061960ins0-1High cloud cover
    SURFRAYT.SOLAIREswavr#hag1160470insW/m2Instantaneous surface solar radiation (SW down global) Parameter identifier was 116, again is???
    SURFRAYT.TERRESTlwavr#hag1150540insW/m2Instantaneous longwave radiation flux
    SURFCAPE.MOD.XFUcapecapehag1600760insJ/kgModel output CAPE (not calculated by AROME physics)
    SURFDIAGHAILxhail#hag161012030ins0-1AROME hail diagnostic, LXXDIAGH = .TRUE.
    CLSU.RAF.MOD.XFUugstugshag162022310maxm/sU-momentum of gusts from the model. LXXGST = .TRUE. in NAMXFU. gives gust between current and previous output time step (FREQ_RESET_GUST)
    CLSV.RAF.MOD.XFUvgstvgshag163022410maxm/sV-momentum of gusts from the model. LXXGST = .TRUE. in NAMXFU. gives gust between current and previous output time step (FREQ_RESET_GUST)
    SURFINSPLUIErain#hag18101650inskg/m2Instantaneous rain
    SURFINSNEIGEsnow#hag18401530inskg/m2Instantaneous snow
    SURFINSGRAUPELgrpl#hag20101750inskg/m2Instantaneous graupel
    CLSMINI.HUMI.RELrmn2m#hag2410112min0-1Minimum relative moisture at 2m over 3h
    CLSMAXI.HUMI.RELrmx2m#hag2420112max0-1Maximum relative moisture at 2m over 3h
    CLSRAFALES.POSfgwsgsmaxhag228022210maxm/sGust wind speed

    2D Surface, accumulated near-surface and soil variables

    Note that all these are coded with stepType=accum

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    S065RAYT SOL CLcssw#hy130041165accJ/m2SW net clear sky rad
    S065RAYT THER CLcslw#hy13105665accJ/m2LW net clear sky rad
    SURFACCGRAUPELgrplprgrplhag20101750acckg/m2Accumulated graupel
    SURFACCNEIGEsnowprsnhag18401530acckg/m2Accumulated snowfall
    SURFACCPLUIErainprrainhag18101650acckg/m2Accumulated rain
    SURFDIR NORM IRRdneridshag1403630accJ/m2Direct normal exposure
    SURFFLU.CHA.SENSshfhfsshag12200110accJ/m2Sensible heat flux
    SURFFLU.LAT.MEVAlhehfls_evahag132011930accJ/m2Latent heat flux through evaporation
    SURFFLU.LAT.MSUBlhsubhfls_sblhag244012020accJ/kgLatent Heat Sublimation
    SURFFLU.MEVAP.EAwevapevspsblhag2450160acckg/m2Water evaporation
    SURFFLU.MSUBL.NEsnsubsbl_snowhag24601620acckg/m2Snow sublimation
    SURFFLU.RAY.SOLAnswrsrsnshag1110490accJ/m2Net shortwave radiation flux (surface)
    SURFFLU.RAY.THERnlwrsrlnshag1120550accJ/m2Net longwave radiation flux (surface)
    SURFRAYT DIR SURswavrrsdsdirhag1160470accJ/m2Shortwave radiation flux
    SURFRAYT SOLA DEgradrsdshag1170430accJ/m2Global radiation flux
    SURFRAYT THER DElwavrrldshag1150540accJ/m2Longwave radiation flux
    SURFTENS.TURB.MEvflxtauvhag125021990accN/m2Momentum flux, v-component
    SURFTENS.TURB.ZOuflxtauuhag124021980accN/m2Momentum flux, u-component

    2D TOA, diagnostic and accumulated variables, levelType=nominalTop

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SOMMFLU.RAY.SOLAnswrtrsntnt1130490accJ/m2Net shortwave radiation flux(atmosph.top)
    SOMMFLU.RAY.THERnlwrtrlntnt1140550accJ/m2Net longwave radiation flux(atmosph.top)
    SOMMRAYT.SOLAIREnswrt#nt1130490insW/m2Net shortwave radiation flux(atmosph.top)
    SOMMRAYT.TERRESTnlwrt#nt1140550insW/m2Net longwave radiation flux(atmosph.top)
    SOMMRAYT SOL CLcsswrsntcsnt13004110accJ/m2TOA Net shortwave clear sky radiation(atmosph.top)
    SOMMRAYT THER CLcslwrlntcsnt1310560accJ/m2TOA Net longwave clear sky radiation(atmosph.top)
    TOPRAYT DIR SOMswavrrsdtnt1160470accJ/m2TOA Accumulated SW down radiation Parameter identifier was 117
    SOMMTBOZCLEARbtozcs#nt170-1-1-10-KBrightness temperature OZ clear
    SOMMTBOZCLOUDbtozcl#nt171-1-1-10-KBrightness temperature OZ cloud
    SOMMTBIRCLEARbtircs#nt172-1-1-10-KBrightness temperature IR clear
    SOMMTBIRCLOUDbtircl#nt173-1-1-10-KBrightness temperature IR cloud
    SOMMTBWVCLEARbtwvcs#nt174-1-1-10-KBrightness temperature WV clear
    SOMMTBWVCLOUDbtwvcl#nt175-1-1-10-KBrightness temperature WV cloud

    2D Surface, Postprocessed variables (fullpos)

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SURFCAPE.POS.F00capecapehag1600760insJ/kgConvective available potential energy (CAPE)
    SURFCIEN.POS.F00cincinhag1650770insJ/kgConvective inhibition (CIN)
    SURFLIFTCONDLEVlcl#ac1670360insmLifting condensation level (LCL)
    SURFFREECONVLEVlfc#lfc1680360insmLevel of free convection (LFC)
    SURFEQUILIBRLEVlnb#lnb1690360insmLevel of neutral buoyancy (LNB)

    2D Surface, constant near-surface and soil variables

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    SPECSURFGEOPOTENzphis_shag60340insm2/s2Geopotential relative to mean sea level. "... contains a GRID POINT orography which is the interpolation of the departure orography"
    SURFIND.TERREMERlsmlsmhag812000ins0-1Land-sea mask
    SURFAEROS.SEAaers#hag2510131920inskg/kgSurface aerosol sea (Marine aerosols, locally defined GRIB)
    SURFAEROS.LANDaerl#hag2520131930inskg/kgSurface aerosol land (Continental aerosols, locally defined GRIB)
    SURFAEROS.SOOTaerc#hag2530131940inskg/kgSurface carbon aerosol (Carbone aerosols, locally defined GRIB)
    SURFAEROS.DESERTaerd#hag2540131950inskg/kgSurface aerosol desert (Desert aerosols, locally defined GRIB)
    SURFAEROS.VOLCAN##hag197-1-1-1-1Surface aerosol volcan (Stratospheric ash, to be locally defined GRIB)
    SURFAEROS.SULFAT##hag198-1-1-1-1Surface aerosol sulfate (Stratospheric sulfate, to be locally defined GRIB)
    SURFA.OF.OZONEao#hag2480141920inskg/kgA Ozone, First ozone profile (A), locally defined GRIB
    SURFB.OF.OZONEbo#hag2490141930inskg/kgB Ozone, Second ozone profile (B), locally defined GRIB
    SURFC.OF.OZONEco#hag2500141940inskg/kgC Ozone, Third ozone profile (C), locally defined GRIB
    PROFTEMPERATUREslt#dbl8523180insKSoil Temperature
    PROFRESERV.EAUsm#dbl8623200inskg/m2Deep Soil Wetness
    PROFPROP.RMAX.EAswv#dbl23823250inskg/m2Climate relaxed deep soil wetness
    PROFRESERV.GLACEwsoice#dbl19323220inskg/m2Deep soil ice

    2D variables on special surfaces

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    KT273ISOT_ALTITh#isot803627315insmAltitude of 0-degree isotherm
    KT263ISOT_ALTITh#isot803626315insmAltitude of -10-degree isotherm
    SURFISOTPW0.MALTh#isot0wb80360insmAltitude of iso-tprimw=0
    SURFTOT.WAT.VAPOwvintprwea5401640inskg/m2Total column integral water vapour
    WFPOWERINSwfpower_inswfpower_insea21102390insMWWind power production, instantaneous (LWINDFARM=.TRUE. in NAMPHY)
    WFPOWERACCwfpower_accwfpower_accea21102390accMJWind power production, accumulated (LWINDFARM=.TRUE. in NAMPHY)

    Postprocessed variables on different surface types

    Through the postprocessing sofware fullpos HARMONIE offers a number of variables postprocessed on different surface types. For the current choice of variables, surfaces and levels please see scr/Select_postp.pl.

    State variables and diagnostics on pressure levels, leveltype=isobaricInhPa

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    PNNNNNWIND.U.PHYuuapl33022NNNNNinsm/su-component of wind
    PNNNNNWIND.V.PHYvvapl34023NNNNNinsm/sv-component of wind
    PNNNNNTEMPERATURttapl11000NNNNNinsKTemperature
    PNNNNNHUMI.SPECIqhuspl51010NNNNNinskg/kgSpecific humidity
    PNNNNNLIQUID_WATcwat_condclwpl760183NNNNNinskg/kgSpecific cloud liquid water content
    PNNNNNSOLID_WATEciwc_condclipl580184NNNNNinskg/kgSpecific cloud ice water content
    PNNNNNCLOUD_FRACtcc#pl7106192NNNNNins0-1Total cloud cover
    PNNNNNSNOWsnow_cond#pl1840186NNNNNinskg/kgSpecific snow water content
    PNNNNNRAINrain_cond#pl1810185NNNNNinskg/kgSpecific rain water content
    PNNNNNGRAUPELgrpl_cond#pl2010132NNNNNinskg/kgSpecific graupel
    PNNNNNGEOPOTENTIzphipl6034NNNNNinsm2/s2Geopotential
    PNNNNNHUMI_RELATrhurpl5201192NNNNNins0-1Relative humidity
    PNNNNNTHETA_PRIMpaptthetaEpl14003NNNNNinsKPseudo-adiabatic potential temperature
    PNNNNNTHETA_VIRTvptmp#pl1760015NNNNNinsKVirtual potential temperature
    PNNNNNVERT.VELOCwwapl40029NNNNNinsm/sGeometrical vertical velocity
    PNNNNNPOT_VORTICpvpvpl40214NNNNNinsK m2/kg/sPotential vorticity
    PNNNNNABS_VORTICabsv#pl410210NNNNNinss-1Absolute vorticity
    PNNNNNDIVERGENCEd#pl440213NNNNNinss-1Relative divergence

    State variables and diagnostics on height levels, levelType=heightAboveGround

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    HNNNNNWIND.U.PHYuuahag33022NNNNNinsm/su-component of wind
    HNNNNNWIND.V.PHYvvahag34023NNNNNinsm/sv-component of wind
    HNNNNNTEMPERATURttahag11000NNNNNinsKTemperature
    HNNNNNLIQUID_WATcwat_condclwhag760183NNNNNinskg/kgSpecific cloud liquid water content
    HNNNNNSOLID_WATEciwc_condclihag580184NNNNNinskg/kgSpecific cloud ice water content
    HNNNNNCLOUD_FRACtccclthag7106192NNNNNins0-1Total cloud cover
    HNNNNNSNOWsnow_cond#hag1840186NNNNNinskg/kgSpecific snow water content
    HNNNNNRAINrain_cond#hag1810185NNNNNinskg/kgSpecific rain water content
    HNNNNNGRAUPELgrpl_cond#hag2010132NNNNNinskg/kgSpecific graupel
    HNNNNNHUMI_RELATrhurhag5201192NNNNNins0-1Relative humidity
    HNNNNNPRESSUREpresphag1030NNNNNinsPaPressure

    State variables and diagnostics on PV levels, GRIB1 level type 117, levelType=potentialVorticity

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    VNNNGEOPOTENTIELz#pv6034NNNinsm2/s2Geopotential
    VNNNTEMPERATUREt#pv11000NNNinsKTemperature
    VNNNPRESSUREpres#pv1030NNNinsPaPressure
    VNNNHUMI_RELATIVr#pv5201192NNNins0-1Relative humidity
    VNNNHUMI.SPECIFIq#pv51010NNNinskg/kgSpecific humidity
    VNNNWIND.U.PHYSu#pv33022NNNinsm/su-component of wind
    VNNNWIND.V.PHYSv#pv34023NNNinsm/sv-component of wind
    VNNNVITESSE_VERTomega#pv39028NNNinsPa/sPressure vertical velocity (DYNAMICS=h)
    VNNNVERT.VELOCITw#pv40029NNNinsm/sGeometrical vertical velocity (DYNAMICS=nh)
    VNNNTEMPE_POTENTpt#pv13002NNNinsKPotential temperature
    VNNNABS_VORTICITabsv#pv410210NNNinss-1Absolute vorticity
    VNNNDIVERGENCEd#pv440213NNNinss-1Relative divergence
    VNNNTHETAPRIMWpapt#pv14003NNNinsKPseudo-adiabatic potential temperature

    State variables and diagnostics on Theta levels, GRIB1 level type 113, levelType=theta

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    TNNNGEOPOTENTIELz#th6034NNNinsm2/s2Geopotential
    TNNNTEMPERATUREt#th11000NNNinsKTemperature
    TNNNPRESSUREpres#th1030NNNinsPaPressure
    TNNNHUMI_RELATIVr#th5201192NNNins0-1Relative humidity
    TNNNHUMI.SPECIFIq#th51010NNNinskg/kgSpecific humidity
    TNNNWIND.U.PHYSu#th33022NNNinsm/su-component of wind
    TNNNWIND.V.PHYSv#th34023NNNinsm/sv-component of wind
    TNNNVITESSE_VERTomega#th39028NNNinsPa/sPressure vertical velocity (DYNAMICS=h)
    TNNNVERT.VELOCITw#th40029NNNinsm/sGeometrical vertical velocity (DYNAMICS=nh)
    TNNNABS_VORTICITabsv#th410210NNNinss-1Absolute vorticity
    TNNNPOT_VORTICITpv#th40214NNNinsK m2/kg/sPotential vorticity
    TNNNDIVERGENCEd#th440213NNNinss-1Relative divergence

    FA fields without any default GRIB1 translation

    Some very special fields are left without any default translation. Please see in the gl documentation on how to add you own translation.

    FA nameUnitComment
    CUF1PRESSURECoupling error field.
    THETAPWP_FLUXK m-4 s-1Instantaneous thetaprimwprim surface flux
    CLPMOCON.MOD.XFUkg kg-1 s-1MOCON model output
    ATMONEBUL.TOTALEAccumulated Total cloud cover.
    ATMONEBUL.CONVECAccumulated Convective cloud cover.
    ATMONEBUL.BASSEAccumulated Low cloud cover.
    ATMONEBUL.MOYENNAccumulated Medium cloud cover.
    ATMONEBUL.HAUTEAccumulated High cloud cover.
    SURFCFU.Q.TURBULAccumulated contribution of Turbulence to Q.
    SURFCFU.CT.TURBULAccumulated contribution of Turbulence to CpT
    SUNSHI. DURATIONSunshine duration.
    SURFFL.U TURBULContribution of Turbulence to U.
    SURFFL.V TURBULContribution of Turbulence to V.
    SURFFL.Q TURBULContribution of Turbulence to Q.
    SURFFL.CT TURBULContribution of Turbulence to CpT
    SNNNSRCSecond order flux.

    Variables postprocessed by gl

    The following fields are can be generated by gl from a history file and are thus not necessarily available as FA fields in Harmonie's FA output. When calculating these post-processed fields, make sure the required fields to derive them are in the input files! For details, check util/gl/grb/postprocess.f90 & the routines called therein.

    Single level fields

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    MSLPRESSUREprespslhas10300insPaMSLP. gl calculates MSLP independent of AROME/!FullPos
    #tdtdhag170062insKDew point temperature
    #vis#hag2001900insmVisibility
    #wdir#ttt31020lllinsDeg. trueWind direction. gl calculates based on u[33,ttt,lll] and v[34,ttt,lll] wind components
    #ws#ttt32021lllinsm/sWind speed. gl calculates based on u[33,ttt,lll] and v[34,ttt,lll] wind components
    TOT.WATER.PRECIPtpprhag610180acckg/m2Total precipitation, gl calculates TP![61,105,0]=rain![181,105,0]+snow![184,105,0]+graupel![201,105,0]+hail![204,105,0]
    TOT.SOLID.PRECIPtpsolidprsolidhag185012000acckg/m2Total solid precipitation, gl calculates ![185,105,0]=snow![184,105,0]+graupel![201,105,0]+hail![204,105,0]
    #mldzmlahag6701930insmMixed layer depth/boundary layer height
    #tcc#hag71061922ins0-1Fog, cloud fraction of lowest model level
    #icei#hag1350ins-Icing index
    #atmiceg#hy??01205insm/sIcing index, Atmospheric ice growth rate
    #icei2#hag/?134011940ins-Icing index version 2
    #psct#hag/ct?1360400insKPseudo satellite image, cloud top temperature (infrared)
    #pstb#hag137041980insKPseudo satellite image, water vapour brightness temperature
    #pstbc#hag138041990insKPseudo satellite image, water vapour br. temp. + correction for clouds
    #pscw#hag139042000ins-Pseudo satellite image, cloud water reflectivity (visible)
    #prtp#hag14401190inscodePrecipitation type, 0:drizzle, 1:rain, 2:sleet, 3:snow, 4:freezing drizzle, 5:freezing rain, 6:graupel, 7:hail
    #fg#ttt2280222lllmaxm/sGust wind speed, calculated from ugst & vgst on corresponding level & levelType
    #hti#hag1480171930ins-Helicopter Triggered lightning Index
    #transmit#hag149061990ins-Transmittance
    #cat#hag145019220ins-|%CAT (clear air turbulence) index
    #bvf#hag1590192020inss-1Brunt Vaisala frequency

    Integrated quantities

    FA nameshortNameNCnamelvTiOPdpCpNlevsTunitsdescription
    TOT.NEB.ICEciwc_vintcliviea5801700inskg/m2Vertical integral of cloud ice
    TOT.NEB.WATcwat_vintclqviea7601690inskg/m2Vertical integral of cloud liquid water
    #msca#ea133061970ins0-1Mask of significant cloud amount
    #cape#hag1600760insJ/kgConvective Available Potential Energy, comes in two flavours, cape_version=1|2, where the second is compatible with the ECMWF version
    #cin#hag1650770insJ/kgConvective inhibition, , comes in two flavours, cape_version=1|2, where the second is compatible with the ECMWF version
    #rain_vintclrviea18101450inskg/m2Vertical integral of rain
    #snow_vintclsviea18401460inskg/m2Vertical integral of snow
    #grpl_vintclgviea20101740inskg/m2Vertical integral of garupel
    #cb#ea18606110insmCloud base
    #ct#ea18706120insmCloud top
    #cb38#hag?189061983insmCloud base >3/8
    #lgt#ea2090171920insflash/hLightning intensity
    #lmxws#ea/mw?1420360insmLevel of max wind speed
    #maxucol#ea1640220insm/sMax u-component in column
    #maxvcol#ea1770230insm/sMax v-component in column
    #lmxice#ea143011990insmIcing index, Level of max icing
    #mxicegr#ea141012040ins1Icing index, Max icing growth index (0-4)
    #blice#ea14601960insmIcing index, Bottom level of icing
    #tlice#ea14701950insmIcing index, Top level of icing
    #cat_maxlev#ea1500192010insmLevel of max CAT index
    #cat_max#ea1510191970ins-Max CAT index
    #cat_b#ea1520191980insmBottom level of CAT
    #cat_t#ea1530191990insmTop level of CAT

    GRIB encoding information

    Time units, WMO code table 4

    The following time units are used to encode GRIB edition 1 data

    CodeUnit
    0Minute
    1Hour
    1315 minutes
    1430 minutes

    Time range indicator, WMO code TABLE 5

    CodeabbrDefinition
    0insForecast product valid for reference time + P1 (P1 > 0), or Uninitialized analysis product for reference time (P1 = 0)
    2min/maxProduct with a valid time ranging between reference time + P1 and reference time + P2. Used for min/max values
    3avgAverage (reference time + P1 to reference time + P2)
    4accAccumulation (reference time + P1 to reference time + P2) product considered valid at reference time + P2

    Note that fields available as both instanteous and accumulated values like e.g. rain has the same parameter values and can only be distinguished by the time range indicator.

    Level types, WMO Code table 3

    level typenameabbrWMO/HIRLAM type definitionUnitsnotes
    001surfacesfcGround or water surfaceWMO
    002cloudBasecbCloud base levelWMO
    003cloudTopctLevel of cloud topsWMO
    004isothermZeroisot0Level of 0°C isothermWMO
    005adiabaticCondensationacLevel of Adiabatic Condensation Lifted from the SurfaceWMO
    006maxWindmwMaximum wind levelWMO
    007tropopausetpTropopauseWMO
    008nominalTopntTop-of-atmosphereWMO
    020isothermalisotIsothermal levelTemperature in 1/100 KWMO
    100isobaricInhPaplIsobaric levelhPaWMO
    102meanSeamsAt mean sea level
    103heightAboveSeahasSpecified altitude above mean sea levelAltitude in mWMO
    105heightAboveGroundhagSpecified height above groundAltitude in mWMO
    107sigmaSigma levelSigma value in 1/10000WMO
    109hybridhyHybrid levelWMO
    112depthBelowLandLayerdbl
    113thetathIsentropic (theta) levelPotential temperature in KWMO
    117potentialVorticitypvPotential vorticity surface10-9 K m2 kg-1 s-1WMO
    192isothermZeroWetBulbisot0wb
    200entireAtmosphereeaEntire atmosphere (considered as a single layer)WMO, vertically integrated
    levelFreeConvectionlfcas heightAboveGround in GRIB1
    levelNeutralBuoyancylnbas heightAboveGround in GRIB1

    Harmonie GRIB1 code table 2 version 253 - Indicator of parameter

    Below the indicator of parameter code table for the Harmonie model. It is based on the WMO code table 2 version 3 with local parameters added. Parameter indicators 128-254 are reserved for originating center use. Parameter indicators 000-127 should not be altered. In HARMONIE, radiation fluxes are assumed positive downwards (against the recommendation by WMO).

    ParDescriptionSI Units
    000Reservedn/a
    001PressurePa
    002Pressure reduced to MSLPa
    003Pressure tendencyPa s-1
    004Potential vorticityK m2 kg-1 s-1
    005ICAO Standard Atmosphere reference heightm
    006Geopotentialm2 s-2
    007Geopotential heightgpm
    008Geometrical heightm
    009Standard deviation of heightm
    010Total ozoneDobson
    011TemperatureK
    012Virtual temperatureK
    013Potential temperatureK
    014Pseudo-adiabatic potential temperatureK
    015Maximum temperatureK
    016Minimum temperatureK
    017Dew-point temperatureK
    018Dew-point depression (or deficit)K
    019Lapse rateK m-1
    020Visibilitym
    021Radar spectra (1)-
    022Radar spectra (2)-
    023Radar spectra (3)-
    024Parcel lifted index (to 500 hPa)K
    025Temperature anomalyK
    026Pressure anomalyPa
    027Geopotential height anomalygpm
    028Wave spectra (1)-
    029Wave spectra (2)-
    030Wave spectra (3)-
    031Wind directionDegree true
    032Wind speedm s-1
    033u-component of windm s-1
    034v-component of windm s-1
    035Stream functionm2 s-1
    036Velocity potentialm2 s-1
    037Montgomery stream functionm2 s-1
    038Sigma coordinate vertical velocitys-1
    039Vertical velocityPa s-1
    040Vertical velocitym s-1
    041Absolute vorticitys-1
    042Absolute divergences-1
    043Relative vorticitys-1
    044Relative divergences-1
    045Vertical u-component shears-1
    046Vertical v-component shears-1
    047Direction of currentDegree true
    048Speed of currentm s-1
    049u-component of currentm s-1
    050v-component of currentm s-1
    051Specific humiditykg kg-1
    052Relative humidity%
    053Humidity mixing ratiokg kg-1
    054Precipitable waterkg m-2
    055Vapor pressurePa
    056Saturation deficitPa
    057Evaporationkg m-2
    058Cloud icekg m-2
    059Precipitation ratekg m-2 s-1
    060Thunderstorm probability%
    061Total precipitationkg m-2
    062Large scale precipitationkg m-2
    063Convective precipitationkg m-2
    064Snowfall rate water equivalentkg m-2 s-1
    065Water equivalent of accumulated snow depthkg m-2
    066Snow depthm
    067Mixed layer depthm
    068Transient thermocline depthm
    069Main thermocline depthm
    070Main thermocline anomalym
    071Total cloud cover%
    072Convective cloud cover%
    073Low cloud cover%
    074Medium cloud cover%
    075High cloud cover%
    076Cloud waterkg m-2
    077Best lifted index (to 500 hPa)K
    078Convective snowkg m-2
    079Large scale snowkg m-2
    080Water temperatureK
    081Land cover (1 = land, 0 = sea)Proportion
    082Deviation of sea level from meanm
    083Surface roughnessm
    084Albedo%
    085Soil temperatureK
    086Soil moisture contentkg m-2
    087Vegetation%
    088Salinitykg kg-1
    089Densitykg m-3
    090Water run-offkg m-2
    091Ice cover (1 = ice, 0 = no ice)Proportion
    092Ice thicknessm
    093Direction of ice driftDegree true
    094Speed of ice driftm s-1
    095u-component of ice driftm s-1
    096v-component of ice driftm s-1
    097Ice growth ratem s-1
    098Ice divergences-1
    099Snow meltkg m-2
    100Significant height of combined wind waves and swellm
    101Direction of wind wavesDegree true
    102Significant height of wind wavesm
    103Mean period of wind wavess
    104Direction of swell wavesDegree true
    105Significant height of swell wavesm
    106Mean period of swell wavess
    107Primary wave directionDegree true
    108Primary wave mean periods
    109Secondary wave directionDegree true
    110Secondary wave mean periods
    111Net short-wave radiation flux (surface)W m-2
    112Net long-wave radiation flux (surface)W m-2
    113Net short-wave radiation flux (top of atmosphere)W m-2
    114Net long-wave radiation flux (top of atmosphere)W m-2
    115Long-wave radiation fluxW m-2
    116Short-wave radiation fluxW m-2
    117Global radiation fluxW m-2
    118Brightness temperatureK
    119Radiance (with respect to wave number)W m-1 sr-1
    120Radiance (with respect to wave length)W m-3 sr-1
    121Latent heat fluxW m-2
    122Sensible heat fluxW m-2
    123Boundary layer dissipationW m-2
    124Momentum flux, u-componentN m-2
    125Momentum flux, v-componentN m-2
    126Wind mixing energyJ
    127Image data-
    128Analysed RMS of PHI (CANARI)m2 s-2
    129Forecasted RMS of PHI (CANARI)m2 s-2
    130SW net clear sky radW m-2
    131LW net clear sky radW m-2
    132Latent heat flux through evaporationW m-2
    133Mask of significant cloud amount0-1
    134Icing index version 2-
    135Icing indexCode table
    136Pseudo satellite image, cloud top temperature (infrared)K
    137Pseudo satellite image, water vapour brightness temperatureK
    138Pseudo satellite image, water vapour br. temp. + correction for cloudsK
    139Pseudo satellite image, cloud water reflectivity (visible)?
    140Direct normal irradianceJ m-2
    141Max icing growth index-
    142Level of max wind speedm
    143Level of max icingm
    144Precipition TypeCode table
    145CAT index- / %
    146Bottom level of icingm
    147Top level of icingm
    148Helicopter Triggered ligthning Index-
    149Transmittance-
    150Level of max CAT indexm
    151Max CAT index-
    152Bottom level of CATm
    153Top level of CATm
    154Max Wind speedm s-1
    155Available#
    156Available#
    157Available#
    158Surface downward moon radiationW m-2
    159ABrunt Vaisala frequencys-1
    160CAPEJ kg-1
    161AROME hail diagnostic%
    162U-momentum of gusts out of the modelm s-1
    163V-momentum of gusts out of the modelm s-1
    164Max u-component in columnm s-1
    165Convective inhibition (CIN)J kg-1
    166MOCON out of the modelkg/kg s-1
    167Lifting condensation level (LCL)m
    168Level of free convection (LFC)m
    169Level of neutral boyancy (LNB)m
    170Brightness temperature OZ clearK
    171Brightness temperature OZ cloudK
    172Brightness temperature IR clearK
    173Brightness temperature IR cloudK
    174Brightness temperature WV clearK
    175Brightness temperature WV cloudK
    176Virtual potential temperatureK
    177Max v-component in columnm s-1
    178Available#
    179Available#
    180Available#
    181Rainkg m-2
    182Stratiform Rainkg m-2
    183Convective Rainkg m-2
    184Snowkg m-2
    185Total solid precipitationkg m-2
    186Cloud basem
    187Cloud topm
    188Fraction of urban landProportion
    189Cloud base >3/8m
    190Snow AlbedoProportion
    191Snow densitykg/m3
    192Water on canopykg/m2
    193Soil icekg/m2
    194Available#
    195Gravity wave stress U-compN/m2
    196Gravity wave stress V-compN/m2
    197Available#
    198Available#
    199Vegetation type-
    200TKEm2 s-2
    201Graupelkg m-2
    202Stratiform Graupelkg m-2
    203Convective Graupelkg m-2
    204Hailkg m-2
    205Stratiform Hailkg m-2
    206Convective Hailkg m-2
    207Available#
    208Available#
    209Lightningflash h-1
    210Simulated reflectivitydBz
    211Wind power productionMW or MJ
    212Pressure departurePa
    213Vertical divergences-1
    214UD_OMEGAms-1?
    215DD_OMEGAms-1?
    216UDMESHFRAC-
    217DDMESHFRAC-
    218PSHICONVCL-
    219Surface albedo for non snow covered areasProportion
    220Standard deviation of orography * gm2 s-2
    221Anisotropy coeff of topography-
    222Direction of main axis of topographyrad
    223Roughness length of bare surface * gm2 s-2
    224Roughness length for vegetation * gm2 s-2
    225Fraction of clay within soilProportion
    226Fraction of sand within soilProportion
    227Maximum proportion of vegetationProportion
    228Gust wind speedm s-1
    229Albedo of bare groundProportion
    230Albedo of vegetationProportion
    231Stomatal minimum resistances/m
    232Leaf area indexm2/m2
    233Thetaprimwprim surface fluxKm/s
    234Dominant vegetation index-
    235Surface emissivity-
    236Maximum soil depthm
    237Soil depthm
    238Soil wetnesskg/m2
    239Thermal roughness length * gm2 s-2
    240Resistance to evapotransirations/m
    241Minimum relative moisture at 2 meters%
    242Maximum relative moisture at 2 meters%
    243Duration of total precipitationss
    244Latent Heat SublimationW/m2
    245Water evaporationkg/m2
    246Snow sublimationkg/m2
    247Snow history???
    248A OZONEkg kg-1
    249B OZONEkg kg-1
    250C OZONEkg kg-1
    251Surface aerosol seakg kg-1
    252Surface aerosol landkg kg-1
    253Surface aerosol sootkg kg-1
    254Surface aerosol desertkg kg-1
    255Missing valuen/a

    SURFEX output Harmonie GRIB1 code table 2 version 001

    Levels are used in the conversion of SURFEX output to GRIB to indicate tile/patch/type/level:

    leveldescription
    300Extra yet unknown SURFEX variables
    301Fraction of each vegetation types on PATCH 1
    302Fraction of each vegetation types on PATCH 2
    303Fraction of each vegetation types cy43 (ECOCLIMAP-SG)
    600Physiography fields?
    720Sea ice
    730Sea ice (TICE_LL)
    755Precip
    760Sea
    770in addition to FLake (or instead of it)
    780Flake
    790Patch (*_P fields)
    800ISBA
    810Gridpoint average
    820Surface boundary multi layer fields
    830ISBA - patch 1 (X001*, open land)
    840ISBA - patch 2 (X002*, forest)
    950Town energy balance model (TEB)

    A small selection of fields available in the SURFEX output files is shown below.

    FA nameshortNameNCnamelvTiOPlevsTunitsdescription
    FRAC_SEA#sftofhag32300ins0-1Fraction of sea
    FRAC_WATER#sftlafhag33300ins0-1Fraction of water
    FRAC_NATURE#sftnfhag34300ins0-1Fraction of nature
    FRAC_TOWN#sfturfhag35300ins0-1Fraction of town
    COVER001#lsm10insLAND SEA MASK
    COVER002-COVER243##002-2430insECOCLIMAP I cover types
    COVER255##2550insECOCLIMAP I MY_COVER type
    COVER301-COVER573##001-254 & 001-0190insECOCLIMAP II cover types
    ZS#oroghag80insmOro hgt.
    SST#tosms110insKSST
    SIC#siconcams910ins0-1SIC
    T2M_SEA#tas_seahag11760insKT2m sea
    Q2M_SEA#huss_seahag51760inskg kg-1Q2m sea
    MER10M_SEA#vas_seahag34760insm s-1V10m sea
    ZON10M_SEA#uas_seahag33760insm s-1U10m sea
    T2M_WAT#tas_waterhag11772insKT2m water
    Q2M_WAT#huss_waterhag51770inskg kg-1Q2m water
    MER10M_WAT#vas_waterhag34770insm s-1V10m water
    ZON10M_WAT#uas_waterhag33770insm s-1U10m water
    DSNTISBA#sndhag660insmSnow depth
    WSNTISBA#snwhag130inskg m-2Total snow reservoir
    T2M_ISBA#tas_naturehag11802insKT2m isba
    Q2M_ISBA#huss_naturehag51802inskg kg-1Q2m isba
    X001T2M_P#tashag11832insKT2m of patch 1
    X002T2M_P#tashag11842insKT2m of patch 2
    T2M_TEB#tas_townhag11950insKT2m town
    T2MMAX_TEB#tasmax_townhag15950maxKMax Temp for town
    T2MMIN_TEB#tasmin_townhag16950minKMin Temp for town
    TGL#tg_LLLhag11800+insKTemperature of soil layer L(isba)
    WGL#wsa_LLLhag86800+insm3 m-3Liquid volumetric water content of soil layer L
    WGIL#isa_LLLhag193800+insm3 m-3Frozen volumetric water content of soil layer L
    WR#wrhag12800inskg m-2Liquid water retained by foliage (isba)
    DGL#dsoil_LLLhag23300insmSoil depth of soil layer L

    Harmonie GRIB1 code table 2 version 210

    Used for aerosol fields

    GRIB

    NetCDF

    diff --git a/previews/PR1129/ForecastModel/SingleColumnModel/Forcing/index.html b/previews/PR1129/ForecastModel/SingleColumnModel/Forcing/index.html index de9ffb63f..0549b5d47 100644 --- a/previews/PR1129/ForecastModel/SingleColumnModel/Forcing/index.html +++ b/previews/PR1129/ForecastModel/SingleColumnModel/Forcing/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    MUSC Forcing

    From Eric Bazile: The fields SXXXFORC0001 –> SXXXFORC00NN in the initial file for MUSC are the atmospheric forcing without any rules for variables or advection etc ...

    • XXX = vertical levels
    • NN = number of forcing fields

    So if you put for NN=1 the temperature and QV in 2, and the geostrophic wind ug (3) and Vg (4) and you want to force MUSC for 48h with a nudging for T and Q and a geostrophic wind you should add in the MUSC namelist

    NAMCT0
    +

    MUSC Forcing

    From Eric Bazile: The fields SXXXFORC0001 –> SXXXFORC00NN in the initial file for MUSC are the atmospheric forcing without any rules for variables or advection etc ...

    • XXX = vertical levels
    • NN = number of forcing fields

    So if you put for NN=1 the temperature and QV in 2, and the geostrophic wind ug (3) and Vg (4) and you want to force MUSC for 48h with a nudging for T and Q and a geostrophic wind you should add in the MUSC namelist

    NAMCT0
        LSFORC=T
        LSFROCS= FALSE ; default ONLY for surface forcing without SURFEX
     /   
    @@ -48,4 +48,4 @@
     NL_T_NUDG_TIME(3) = 43200
     NL_T_NUDG_TIME(4) = 64800
     NL_T_NUDG_TIME(5) = 86400
    -/

    and now you can not run MUSC more than 1 day ... if the time between the forcing profile is the same you can use *_FREQ instead of TIME ...

    +/

    and now you can not run MUSC more than 1 day ... if the time between the forcing profile is the same you can use *_FREQ instead of TIME ...

    diff --git a/previews/PR1129/ForecastModel/SingleColumnModel/MUSC/index.html b/previews/PR1129/ForecastModel/SingleColumnModel/MUSC/index.html index def913be8..b9d9c30be 100644 --- a/previews/PR1129/ForecastModel/SingleColumnModel/MUSC/index.html +++ b/previews/PR1129/ForecastModel/SingleColumnModel/MUSC/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    MUSC

    MUSC using the develop branch (CY46) in the git repository

    If you find any issues with any of the instructions or scripts, feel free to notify Emily Gleeson (emily.gleesonATmet.ie) and Eoin Whelan (eoin.whelanATmet.ie)

    Currently a "reference" test case, called musc_ref, works on ATOS, as well as the ARMCU cases (with and without SURFEX for both AROME and HARMONIE namelists) and the two microphysics-related cases (supercooled liquid) developed by Bjorg Jenny Engdahl in cycle 40.

    Some instructions on how to use MUSC are included below. See here for some information on HARMONIE-AROME experiments using MUSC but note that the scripts have changed somewhat since that paper was written.

    Set up MUSC

    1. Get the code:

      mkdir -p $SCRATCH/harmonie_releases/git/HCY46
      +

      MUSC

      MUSC using the develop branch (CY46) in the git repository

      If you find any issues with any of the instructions or scripts, feel free to notify Emily Gleeson (emily.gleesonATmet.ie) and Eoin Whelan (eoin.whelanATmet.ie)

      Currently a "reference" test case, called musc_ref, works on ATOS, as well as the ARMCU cases (with and without SURFEX for both AROME and HARMONIE namelists) and the two microphysics-related cases (supercooled liquid) developed by Bjorg Jenny Engdahl in cycle 40.

      Some instructions on how to use MUSC are included below. See here for some information on HARMONIE-AROME experiments using MUSC but note that the scripts have changed somewhat since that paper was written.

      Set up MUSC

      1. Get the code:

        mkdir -p $SCRATCH/harmonie_releases/git/HCY46
         cd $SCRATCH/harmonie_releases/git/HCY46
         git clone git@github.com:Hirlam/Harmonie.git
         cd Harmonie
        @@ -70,4 +70,4 @@
                  IF(ABS(ZVBH(JFLEV)-PVBH(JFLEV)) > PEPS) THEN
                    WRITE(KULOUT,*) ' VERTICAL FUNCTION *B* MISMATCH ON ',&

        Then you are ready to compile:

        • remove the file experimentislocked from the experiment directory.
        • remove the directory with your previous build (if any).
        • start the compile with the musc_compile.sh script

        When starting the MUSC run, add the PATH to mpirun and the libraries:

        export PATH=$PATH:/usr/lib64/openmpi/bin
         export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/lib64/openmpi/lib
        -./musc_run.sh [...]

        MUSC FAQ

        1. If there is an error, what files do I look in? NODE.001_01 and lola in your output directory.

        2. How to I handle the output files? The output files are of the form Out.XXX.XXXX and appear in your output directory. There are in lfa format and can be handled using ddh tools. See the bash script musc_plot1Dts.sh for ideas. There are also ICM*lfa output files that are also handy for plotting profiles - use musc_convertICM2ascii.sh to convert these files to ASCII and musc_plot_profiles_ICMfiles.sh to plot some profiles e.g. TKE, cloud liquid etc.

        3. I ran a different idealised case but did not get different results? The likely reason for this is that you did not delete the namelists from your experiment directory. If the namelists are there, the musc_run.sh script neither creates them nor copies them from the repository.

        4. How do I create a new idealised case? This is not straightforward but the following was used to create the ASTEX cases in cy43 using info from cy38: https://www.overleaf.com/7513443985ckqvfdcphnng

        5. How can I access a list of MUSC output parameters? Ensure you have the ddhtoolbox compiled. Then use lfaminm $file on any of your output files and it will show what is there. To look at a particular variable try lfac $file $parameter e.g. lfac $file PTS (for surface temperature). You can use cat to copy the values to an ASCII file for ease of use (e.g. lfac $file PTS > $ASCIIfile).

        6. Is MUSC similar to the full 3D model version - is the physics the same? Yes, if you checkout develop then you have MUSC up-to-date with that.

        7. Do I need to recompile the model if I modify code? Yes, if you modify code in a single file you must recompile the code but do not delete the original compiled model first. This will recompile relatively quickly. If you modify code in multiple files and you change what variables are passed between files, then you must delete your original compiled model and recompile the code. This will take longer to recompile.

        MUSC variable names

        A list of variable names found in the MUSC lfa output files can be found here. Please note that this is not a complete list of MUSC output parameters (yet). The variables in regular ICMSH... fa output are documented here

        Outstanding Issues

        1. ARMCU and Jenny's cases run without surface physics, radiation etc and hence return NANs in apl_arome. To circumvent this on ECMWF, we needed to compile less strictly. This needs to be investigated further.
        2. The ASTEX cases currently do not run on ECMWF but work perfectly at Met Eireann - debugging needed.

        MUSC using EMS

        These instructions have moved to MUSC EMS

      +./musc_run.sh [...]

      MUSC FAQ

      1. If there is an error, what files do I look in? NODE.001_01 and lola in your output directory.

      2. How to I handle the output files? The output files are of the form Out.XXX.XXXX and appear in your output directory. There are in lfa format and can be handled using ddh tools. See the bash script musc_plot1Dts.sh for ideas. There are also ICM*lfa output files that are also handy for plotting profiles - use musc_convertICM2ascii.sh to convert these files to ASCII and musc_plot_profiles_ICMfiles.sh to plot some profiles e.g. TKE, cloud liquid etc.

      3. I ran a different idealised case but did not get different results? The likely reason for this is that you did not delete the namelists from your experiment directory. If the namelists are there, the musc_run.sh script neither creates them nor copies them from the repository.

      4. How do I create a new idealised case? This is not straightforward but the following was used to create the ASTEX cases in cy43 using info from cy38: https://www.overleaf.com/7513443985ckqvfdcphnng

      5. How can I access a list of MUSC output parameters? Ensure you have the ddhtoolbox compiled. Then use lfaminm $file on any of your output files and it will show what is there. To look at a particular variable try lfac $file $parameter e.g. lfac $file PTS (for surface temperature). You can use cat to copy the values to an ASCII file for ease of use (e.g. lfac $file PTS > $ASCIIfile).

      6. Is MUSC similar to the full 3D model version - is the physics the same? Yes, if you checkout develop then you have MUSC up-to-date with that.

      7. Do I need to recompile the model if I modify code? Yes, if you modify code in a single file you must recompile the code but do not delete the original compiled model first. This will recompile relatively quickly. If you modify code in multiple files and you change what variables are passed between files, then you must delete your original compiled model and recompile the code. This will take longer to recompile.

      MUSC variable names

      A list of variable names found in the MUSC lfa output files can be found here. Please note that this is not a complete list of MUSC output parameters (yet). The variables in regular ICMSH... fa output are documented here

      Outstanding Issues

      1. ARMCU and Jenny's cases run without surface physics, radiation etc and hence return NANs in apl_arome. To circumvent this on ECMWF, we needed to compile less strictly. This needs to be investigated further.
      2. The ASTEX cases currently do not run on ECMWF but work perfectly at Met Eireann - debugging needed.

      MUSC using EMS

      These instructions have moved to MUSC EMS

    diff --git a/previews/PR1129/ForecastModel/SingleColumnModel/MUSC_EMS/index.html b/previews/PR1129/ForecastModel/SingleColumnModel/MUSC_EMS/index.html index 31ebc5c44..241b3e328 100644 --- a/previews/PR1129/ForecastModel/SingleColumnModel/MUSC_EMS/index.html +++ b/previews/PR1129/ForecastModel/SingleColumnModel/MUSC_EMS/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Run MUSC with EMS

    These instructions require the use of dev-CY46h1.

    If you find any issues with any of the instructions or scripts, feel free to notify Emily Gleeson (emily.gleesonATmet.ie) and Eoin Whelan (eoin.whelanATmet.ie)

    In this section a description of how to install and run MUSC using EMS is provided. This is based on compilation and execution in a Ubuntu 20.04 container (tested using Apptainer on the ECMWF Atos HPC) and use of the EMS system to execute MUSC and convert the output to NetCDF. EMS is primarily developed by Romain Roehrig (Météo France) https://github.com/romainroehrig/EMS.

    Start your container

    1. Log in to hpc-login on the Atos
    2. Load the Apptainer module and start the Ubuntu 20.04 container:
    module load apptainer
    +

    Run MUSC with EMS

    These instructions require the use of dev-CY46h1.

    If you find any issues with any of the instructions or scripts, feel free to notify Emily Gleeson (emily.gleesonATmet.ie) and Eoin Whelan (eoin.whelanATmet.ie)

    In this section a description of how to install and run MUSC using EMS is provided. This is based on compilation and execution in a Ubuntu 20.04 container (tested using Apptainer on the ECMWF Atos HPC) and use of the EMS system to execute MUSC and convert the output to NetCDF. EMS is primarily developed by Romain Roehrig (Météo France) https://github.com/romainroehrig/EMS.

    Start your container

    1. Log in to hpc-login on the Atos
    2. Load the Apptainer module and start the Ubuntu 20.04 container:
    module load apptainer
     /home/dui/musc_ubuntu.sif

    Further details concerning Apptainer on the Atos are available here

    Compile the code

    I (Eoin) have not had time to sort out compilation using CMake but the following instructions provide a minimalist approach to compile the code using makeup. These instructions depend on you having a clone or copy of Harmonie (dev-46h1) in your $PERM directory on Atos where GHUSER is your Github username.

    cd $PERM
     GHUSER=your_github_username
     git clone git@github.com:$GHUSER/Harmonie.git harmonie_git/$GHUSER/dev-CY46h1 -b dev-CY46h1
    @@ -55,4 +55,4 @@
     mkdir config
     cp $HOME/SCM-atlas_git/ewhelan/hirlam/examples/config/config_HARM.py config/
     ### edit config/config_HARM.py
    -run_atlas1d.py -config config/config_HARM.py
    • 1Issue with SURFEX namelist
    • 2Python issue L241 $EMS_DIR/ems/prep_init_forc_atm_GMAP.py
    • 3Missing data_input.nc
    +run_atlas1d.py -config config/config_HARM.py
    • 1Issue with SURFEX namelist
    • 2Python issue L241 $EMS_DIR/ems/prep_init_forc_atm_GMAP.py
    • 3Missing data_input.nc
    diff --git a/previews/PR1129/ForecastModel/SingleColumnModel/MUSC_vars/index.html b/previews/PR1129/ForecastModel/SingleColumnModel/MUSC_vars/index.html index 484dcd755..d4819ceb7 100644 --- a/previews/PR1129/ForecastModel/SingleColumnModel/MUSC_vars/index.html +++ b/previews/PR1129/ForecastModel/SingleColumnModel/MUSC_vars/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Variable names for MUSC output

    List of parameters copied from variable_list.csv

    short namevariable namelong nameunit
    lwdwPFRTHDSlong wave downward radiation at surfaceW/m2
    lwupPFRTHlong wave upward radiation at surfaceW/m2
    swdwPFRSODSshort wave downward radiation at surfaceW/m2
    swupPFRSOshort wave upward radiation at surfaceW/m2
    shfPFCSsensible heat fluxW/m2
    lhfPFCLNlatent heat fluxW/m2
    evapZLH_fluxevaporation+sublimation fluxmm/day
    evap2PFEVLevaporation+sublimation fluxmm/day
    ustarZUSTARfriction velocitym/s
    rainPREC_TOTprecipitation (liq+sol) ratemm/day
    psurfPAPRSsurface PressurePa
    hpblPCLPHboundary layer heightm
    hpbl2KCLPHboundary layer heightm
    tsurfPTSsurface temperatureK
    t2mPTCLS2 m temperatureK
    q2mPQCLS2 m specific humidityKg/Kg
    rh2mPRHCLS2 m relative humidity[0-100]
    u10mPUCLS1 0m u-componentm/s
    v10mPVCLS10m v-componentm/s
    t3mPT_03temperature at 3.30 meter above the surfaceK
    q3mPQ_03specific humidity at 3.30 meterKg/Kg
    rh3mPRH_03relative humidity at 3.30 meter[0-100]
    u3mPU_03u-component at 3.30 meterm/s
    v3mPV_03v-component at 3.30 meterm/s
    etc
    t42mPT_42temperature at 41.90 meter above the surfaceK
    q42mPQ_42specific humidity at 41.90 meterKg/Kg
    rh42mPRH_42relative humidity at 41.90 meter[0-100]
    u42mPU_42u-component at 41.90 meterm/s
    v42mPV_42v-component at 41.90 meterm/s
    ccPCLCTtotal cloud cover fraction0 1
    tsurfPTSSurface temperatureK
    albPALBHAlbedo[0-1]
    alb_surfTALB_ISBAsurface albedo-
    z0mPGZ0Momentum roughness lengthm
    z0hPGZ0HHeat roughness lengthm
    emisPEMISsurface emissivity[0-1]
    emisEMISsurface emissivity[0-1]
    zfPAPHIFAltitude of layer mid-points at t=0 (full-level)m
    pfPAPRSFPressure of layer mid-points at t=0 (full-level)Pa
    tPTtemperatureK
    thTHETApotential temperatureK
    qPQspecific humiditykg/kg
    uPUzonal wind componentm/s
    vPVmeridional wind componentm/s
    ugeoZFUGEOu-component geostrophic windm/s
    vgeoZFVGEOv-component geostrophic windm/s
    dudt_lsZFUu-component advectionm/s/s
    dvdt_lsZFVv-component advectionm/s/s
    dtdt_lsZFTtemperature advectionK/s
    dqdt_lsZFQmoisture advectionKg/Kg/s
    wZWvertical movementm/s
    zhhPAPHIheight of half levelm
    phhPAPRSpressure of half levelPa
    kmZKMEddy diffusivity momentumm2/s
    khZKHEddy diffusivity momentumm2/s
    mfZMF_shalmassfluxKg/m2/s
    dT_dt_radZDTRADtemperature tendency from radiationK/d
    TKEPECTturbulent kinetic energy$m^2/s^2$
    shearZPRDYshear production$m^2/s^3$
    buoyZPRTHbuoyancy production$m^2/s^3$
    transZDIFFtotal transport$m^2/s^3$
    dissiZDISSdissipation$m^2/s^3$
    +

    Variable names for MUSC output

    List of parameters copied from variable_list.csv

    short namevariable namelong nameunit
    lwdwPFRTHDSlong wave downward radiation at surfaceW/m2
    lwupPFRTHlong wave upward radiation at surfaceW/m2
    swdwPFRSODSshort wave downward radiation at surfaceW/m2
    swupPFRSOshort wave upward radiation at surfaceW/m2
    shfPFCSsensible heat fluxW/m2
    lhfPFCLNlatent heat fluxW/m2
    evapZLH_fluxevaporation+sublimation fluxmm/day
    evap2PFEVLevaporation+sublimation fluxmm/day
    ustarZUSTARfriction velocitym/s
    rainPREC_TOTprecipitation (liq+sol) ratemm/day
    psurfPAPRSsurface PressurePa
    hpblPCLPHboundary layer heightm
    hpbl2KCLPHboundary layer heightm
    tsurfPTSsurface temperatureK
    t2mPTCLS2 m temperatureK
    q2mPQCLS2 m specific humidityKg/Kg
    rh2mPRHCLS2 m relative humidity[0-100]
    u10mPUCLS1 0m u-componentm/s
    v10mPVCLS10m v-componentm/s
    t3mPT_03temperature at 3.30 meter above the surfaceK
    q3mPQ_03specific humidity at 3.30 meterKg/Kg
    rh3mPRH_03relative humidity at 3.30 meter[0-100]
    u3mPU_03u-component at 3.30 meterm/s
    v3mPV_03v-component at 3.30 meterm/s
    etc
    t42mPT_42temperature at 41.90 meter above the surfaceK
    q42mPQ_42specific humidity at 41.90 meterKg/Kg
    rh42mPRH_42relative humidity at 41.90 meter[0-100]
    u42mPU_42u-component at 41.90 meterm/s
    v42mPV_42v-component at 41.90 meterm/s
    ccPCLCTtotal cloud cover fraction0 1
    tsurfPTSSurface temperatureK
    albPALBHAlbedo[0-1]
    alb_surfTALB_ISBAsurface albedo-
    z0mPGZ0Momentum roughness lengthm
    z0hPGZ0HHeat roughness lengthm
    emisPEMISsurface emissivity[0-1]
    emisEMISsurface emissivity[0-1]
    zfPAPHIFAltitude of layer mid-points at t=0 (full-level)m
    pfPAPRSFPressure of layer mid-points at t=0 (full-level)Pa
    tPTtemperatureK
    thTHETApotential temperatureK
    qPQspecific humiditykg/kg
    uPUzonal wind componentm/s
    vPVmeridional wind componentm/s
    ugeoZFUGEOu-component geostrophic windm/s
    vgeoZFVGEOv-component geostrophic windm/s
    dudt_lsZFUu-component advectionm/s/s
    dvdt_lsZFVv-component advectionm/s/s
    dtdt_lsZFTtemperature advectionK/s
    dqdt_lsZFQmoisture advectionKg/Kg/s
    wZWvertical movementm/s
    zhhPAPHIheight of half levelm
    phhPAPRSpressure of half levelPa
    kmZKMEddy diffusivity momentumm2/s
    khZKHEddy diffusivity momentumm2/s
    mfZMF_shalmassfluxKg/m2/s
    dT_dt_radZDTRADtemperature tendency from radiationK/d
    TKEPECTturbulent kinetic energy$m^2/s^2$
    shearZPRDYshear production$m^2/s^3$
    buoyZPRTHbuoyancy production$m^2/s^3$
    transZDIFFtotal transport$m^2/s^3$
    dissiZDISSdissipation$m^2/s^3$
    diff --git a/previews/PR1129/ForecastModel/WindFarms/index.html b/previews/PR1129/ForecastModel/WindFarms/index.html index 5f01aed22..9990514ee 100644 --- a/previews/PR1129/ForecastModel/WindFarms/index.html +++ b/previews/PR1129/ForecastModel/WindFarms/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Documentation for wind farm parameterisation

    Natalie Theeuwes & Bert van Ulft<br> contact: Natalie.Theeuwes@knmi.nl

    Introduction

    We've implemented the Fitch et al. (2012) scheme in HARMONIE-AROME. First implemented in CY40 and evaluated for 1 year, 2016. Results are published in this paper: (van Stratum et al., 2022) Afterwards it has been moved to CY43 and run for 3 year (2019-2021) and evaluated.

    Implementation in CY46

    Switching on parameterisation

    To switch the parameterisation on go to ecf/config_exp.h and set WINDFARM="yes". This will make the necessary switch to the namelist using the scr/forecast_model_settings.sh

    # Fitch et al. (2012) wind turbine parametrization
    +

    Documentation for wind farm parameterisation

    Natalie Theeuwes & Bert van Ulft<br> contact: Natalie.Theeuwes@knmi.nl

    Introduction

    We've implemented the Fitch et al. (2012) scheme in HARMONIE-AROME. First implemented in CY40 and evaluated for 1 year, 2016. Results are published in this paper: (van Stratum et al., 2022) Afterwards it has been moved to CY43 and run for 3 year (2019-2021) and evaluated.

    Implementation in CY46

    Switching on parameterisation

    To switch the parameterisation on go to ecf/config_exp.h and set WINDFARM="yes". This will make the necessary switch to the namelist using the scr/forecast_model_settings.sh

    # Fitch et al. (2012) wind turbine parametrization
     export LWINDFARM=.FALSE.
     if [ "$WINDFARM" = yes ]; then
         LWINDFARM=.TRUE.
    @@ -141,4 +141,4 @@
             editionNumber = 2 ;
             interpretationOfNumberOfPoints = 0 ;
             subCentre = 255 ;
    -        }

    For both GRIB 1 and GRIB 2:

    1. Wind power production, accumulated:

      • name: 'Wind power production, accumulated'
      • paramId: '253211'
      • shortName: 'wfpower_acc'
      • units: 'MJ'
    2. Wind power production, accumulated:

      • name: 'Wind power production, instantaneous'
      • paramId: '253211'
      • shortName: 'wfpower_ins'
      • units: 'MW'
    + }

    For both GRIB 1 and GRIB 2:

    1. Wind power production, accumulated:

      • name: 'Wind power production, accumulated'
      • paramId: '253211'
      • shortName: 'wfpower_acc'
      • units: 'MJ'
    2. Wind power production, accumulated:

      • name: 'Wind power production, instantaneous'
      • paramId: '253211'
      • shortName: 'wfpower_ins'
      • units: 'MW'
    diff --git a/previews/PR1129/Observations/Aeolus/index.html b/previews/PR1129/Observations/Aeolus/index.html index edf9e6bc4..62248e655 100644 --- a/previews/PR1129/Observations/Aeolus/index.html +++ b/previews/PR1129/Observations/Aeolus/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Aeolus, HLOS wind

    short overview

    Aeolus was an ESA Earth Explorer mission, carrying a Doppler wind lidar that measured the vertical profile of winds. Aeolus was launched in August 2018 and safely re-entered over Antarctica in July 2023. The period of usable data is from 31 August 2018 to 30 April 2023

    Aeolus winds come in two different versions, Mie and Rayleigh. The Mie winds are measured by observing the scattering by cloud droplets and aerosols and are only available in optically thin and medium-thin clouds. The horizontal resolution of Mie profiles is 10 km. Rayleigh winds are obtained by measuring the scattering by air molecules in clear air, and have a lower horizontal resolution of 80 km.

    Since Aeolus was a non-operational mission, the data need to be downloaded manually from, e.g. ESA's Earth Observation portal, https://aeolus-ds.eo.esa.int/oads/access/ (a registration is needed to download the data).

    The data from Aeolus is being processed by the Aeolus DISC team, and the processing has been continously improved throughout the mission lifetime. A final version, covering the full Aeolus data set, will be released in 2028 (using baseline 18, the operational baseline at the time of the satellite's reentry was baseline 13). More details can be found here.

    Harmonie changes

    To use Aeolus winds, activate them in scr/include.ass by setting LIDAR_OBS to 1

    export LIDAR_OBS=1             # LIDAR aeolus hlos wind
    +

    Aeolus, HLOS wind

    short overview

    Aeolus was an ESA Earth Explorer mission, carrying a Doppler wind lidar that measured the vertical profile of winds. Aeolus was launched in August 2018 and safely re-entered over Antarctica in July 2023. The period of usable data is from 31 August 2018 to 30 April 2023

    Aeolus winds come in two different versions, Mie and Rayleigh. The Mie winds are measured by observing the scattering by cloud droplets and aerosols and are only available in optically thin and medium-thin clouds. The horizontal resolution of Mie profiles is 10 km. Rayleigh winds are obtained by measuring the scattering by air molecules in clear air, and have a lower horizontal resolution of 80 km.

    Since Aeolus was a non-operational mission, the data need to be downloaded manually from, e.g. ESA's Earth Observation portal, https://aeolus-ds.eo.esa.int/oads/access/ (a registration is needed to download the data).

    The data from Aeolus is being processed by the Aeolus DISC team, and the processing has been continously improved throughout the mission lifetime. A final version, covering the full Aeolus data set, will be released in 2028 (using baseline 18, the operational baseline at the time of the satellite's reentry was baseline 13). More details can be found here.

    Harmonie changes

    To use Aeolus winds, activate them in scr/include.ass by setting LIDAR_OBS to 1

    export LIDAR_OBS=1             # LIDAR aeolus hlos wind
     [[  $LIDAR_OBS -eq 1  ]] && types_BASE="$types_BASE lidar"

    The optimal settings to use for the observation errors of Aeolus data is still an open question. They are reported in the .bufr file which contain the L2B winds, and the limit of when to allow them can be adjusted in src/odb/pandor/module/bator_decodbufr_mod.F90

    The main ones to be careful are the upper error limits. The recommended values at the time of writing are

      REAL, PARAMETER    :: error_est_threshold_Mie = 4.5  ! m/s
       REAL, PARAMETER    :: error_est_threshold_Ray = 8.  ! m/s
    -

    Future updates

    When the follow-on mission, Aeolus-2 (ESA's name) or EPS-Aeolus (EUMETSAT''s name) launches in 2032, these settings will probably have to be revised. The Aeolus follow-on mission will carry a revised version of the previous instrument, providing observations with higher resolution.

    +

    Future updates

    When the follow-on mission, Aeolus-2 (ESA's name) or EPS-Aeolus (EUMETSAT''s name) launches in 2032, these settings will probably have to be revised. The Aeolus follow-on mission will carry a revised version of the previous instrument, providing observations with higher resolution.

    diff --git a/previews/PR1129/Observations/Amv/index.html b/previews/PR1129/Observations/Amv/index.html index 563f02218..de7d4f5bb 100644 --- a/previews/PR1129/Observations/Amv/index.html +++ b/previews/PR1129/Observations/Amv/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Atmospheric Motion Vectors (AMV)

    Introduction

    In this short information about the (pre-)processing, assimilation, and post-processing, as well as access to the AMV data in the Harmonie system is shown.

    AMV data

    AMV data is available via 1) EUMETCast, 2) the MARS archive at ECMWF (both polar and geowind) or 3) locally using NWCSAF software. Through the EUMETCast, both data are in BUFR format. An abstract from the 5th Winds Workshop on the quality control of EUMETSAT wind products (S2-3_Elliott-Parallel.pdf) provides some useful information on how AMV BUFR is encoded. We define two kinds of AMV data in the Harmonie system: Geostationary satellite based (GEOW) and polar satellite based (POLW). GEOW and POLW can be processed separately through the usual request in scr/include.ass as described below.

    HARMONIE changes

    scr/include.ass

    In scr/include.ass should be edited to "switch on" the use of AMVs (SATOB/geowinds):

    export GEOW_OBS=1               # Satob geowind / SAFNWC geowind
    +

    Atmospheric Motion Vectors (AMV)

    Introduction

    In this short information about the (pre-)processing, assimilation, and post-processing, as well as access to the AMV data in the Harmonie system is shown.

    AMV data

    AMV data is available via 1) EUMETCast, 2) the MARS archive at ECMWF (both polar and geowind) or 3) locally using NWCSAF software. Through the EUMETCast, both data are in BUFR format. An abstract from the 5th Winds Workshop on the quality control of EUMETSAT wind products (S2-3_Elliott-Parallel.pdf) provides some useful information on how AMV BUFR is encoded. We define two kinds of AMV data in the Harmonie system: Geostationary satellite based (GEOW) and polar satellite based (POLW). GEOW and POLW can be processed separately through the usual request in scr/include.ass as described below.

    HARMONIE changes

    scr/include.ass

    In scr/include.ass should be edited to "switch on" the use of AMVs (SATOB/geowinds):

    export GEOW_OBS=1               # Satob geowind / SAFNWC geowind
     export GEOW_SOURCE=ears         # mars:MARS | else: file in $OBDIR
     [[  $GEOW_OBS -eq 1  ]] && types_BASE="$types_BASE geow"
     
    @@ -36,4 +36,4 @@
     values      24  008012  LAND/SEA QUALIFIER
     values      25  007024  SATELLITE ZENITH ANGLE
     values     211  033007  % CONFIDENCE
    -END geowind

    Please be reminded that the processing of data from MARS was not yet tested. From 43h2.1, we have the all necessary content of the param file for processing of both GEOW and POLW in const/bator_param/param_bator.cfg.geow.${GEOW_SOURCE/POLW_SOURCE}

    BATOR namelist

    Depending on the satellite and channel you may have to add entries to the NADIRS namelist in the Bator script like the following:

    TS_GEOWIND(isatid)%T_SELECT%LCANAL(ichanal)=.TRUE.,
    • Satellite identifiers are available here: [https://confluence.ecmwf.int/wiki/display/ECC/WMO%3D27+code-flag+table]
    • Bator defaults for MSG AMV data are set in src/odb/pandor/module/bator_init_mod.F90

    Source code

    The reading of BUFR AMVs is taken care of by src/odb/pandor/module/bator_decodbufr_mod.F90. This subroutine reads the following parameters defined in the param.cfg file:

    NameDescription
    Date and timederived from the tconfig(004001) - assumes month, day, hour and minute are in consecutive entries in the values array
    Locationlatitude and longitude are read from tconfig(005001) and tconfig(006001)
    Satellitethe satellite identifier is read from tconfig(001007)
    Origin. centerthe originating center (of the AMV) is read from tconfig(001031)
    Compu. methodthe wind computation method (type of channel + cloudy/clear if WV) is read from tconfig(002023)
    Derivation methodthe height assignment method is read from tconfig(002163) and the tracking method from tconfig (002164)
    Channel frequencythe centre frequency of the satellite channel is read from tconfig(002153)
    Height (pressure)the height of the AMV observation is read from tconfig(007004)
    Windthe wind speed and direction are read from tconfig(011002) and tconfig(011001)
    Temperaturethe coldest cluster temperature is read from tconfig(012071)
    FG QIThe QI (including FG consistency) for MSG AMVs is read from the first location where descriptor 033007 appears
    noFG-QIThe FG-independent QI for MSG AMVs is read from the first location where 033007 appears + offset(1)=24
    Sat zenith anglethe satellite zenith angle is read from tconfig(007024)
    Land/sea/coasta land/sea/coast qualifier is read from tconfig(008012)

    The geowind routine was adapted to handle MSG AMVs from MARS and its module /src/odb/pandor/module/bator_decodbufr_mod.F90 uploaded to the trunk (Mar 2017) .

    Blacklist

    The selection/blacklist of AMVs according to channel, underlying sea/land, QI, etc. is done in src/blacklist/mf_blacklist.b, section - SATOB CONSTANT DATA SELECTION -.

    +END geowind

    Please be reminded that the processing of data from MARS was not yet tested. From 43h2.1, we have the all necessary content of the param file for processing of both GEOW and POLW in const/bator_param/param_bator.cfg.geow.${GEOW_SOURCE/POLW_SOURCE}

    BATOR namelist

    Depending on the satellite and channel you may have to add entries to the NADIRS namelist in the Bator script like the following:

    TS_GEOWIND(isatid)%T_SELECT%LCANAL(ichanal)=.TRUE.,
    • Satellite identifiers are available here: [https://confluence.ecmwf.int/wiki/display/ECC/WMO%3D27+code-flag+table]
    • Bator defaults for MSG AMV data are set in src/odb/pandor/module/bator_init_mod.F90

    Source code

    The reading of BUFR AMVs is taken care of by src/odb/pandor/module/bator_decodbufr_mod.F90. This subroutine reads the following parameters defined in the param.cfg file:

    NameDescription
    Date and timederived from the tconfig(004001) - assumes month, day, hour and minute are in consecutive entries in the values array
    Locationlatitude and longitude are read from tconfig(005001) and tconfig(006001)
    Satellitethe satellite identifier is read from tconfig(001007)
    Origin. centerthe originating center (of the AMV) is read from tconfig(001031)
    Compu. methodthe wind computation method (type of channel + cloudy/clear if WV) is read from tconfig(002023)
    Derivation methodthe height assignment method is read from tconfig(002163) and the tracking method from tconfig (002164)
    Channel frequencythe centre frequency of the satellite channel is read from tconfig(002153)
    Height (pressure)the height of the AMV observation is read from tconfig(007004)
    Windthe wind speed and direction are read from tconfig(011002) and tconfig(011001)
    Temperaturethe coldest cluster temperature is read from tconfig(012071)
    FG QIThe QI (including FG consistency) for MSG AMVs is read from the first location where descriptor 033007 appears
    noFG-QIThe FG-independent QI for MSG AMVs is read from the first location where 033007 appears + offset(1)=24
    Sat zenith anglethe satellite zenith angle is read from tconfig(007024)
    Land/sea/coasta land/sea/coast qualifier is read from tconfig(008012)

    The geowind routine was adapted to handle MSG AMVs from MARS and its module /src/odb/pandor/module/bator_decodbufr_mod.F90 uploaded to the trunk (Mar 2017) .

    Blacklist

    The selection/blacklist of AMVs according to channel, underlying sea/land, QI, etc. is done in src/blacklist/mf_blacklist.b, section - SATOB CONSTANT DATA SELECTION -.

    diff --git a/previews/PR1129/Observations/Ascat/index.html b/previews/PR1129/Observations/Ascat/index.html index 1cc2e2fef..460f6c434 100644 --- a/previews/PR1129/Observations/Ascat/index.html +++ b/previews/PR1129/Observations/Ascat/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/previews/PR1129/Observations/Atovs/index.html b/previews/PR1129/Observations/Atovs/index.html index de769dc59..345e04732 100644 --- a/previews/PR1129/Observations/Atovs/index.html +++ b/previews/PR1129/Observations/Atovs/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    ATOVS radiances (pre-) processing

    Introduction

    The IFS/ARPEGE/AROME data assimilation code uses level 1-c radiances. ATOVS radiances are available through local HRPT (High Rate Picture Transmission) antenna and the EUMETSAT EARS (European Advanced Retransmission Service) EUMETCast broadcasting system. Data received through local antenna need to be pre-processed with the ATOVS and AVHRR Pre-processing Package (AAPP). Radiances are also available trough the GTS, but with longer timelines.

    This short description explains how to prepare ATOVS radiances for (operational) data assimilation. Like all radiances, ATOVS data bias is corrected using Variational technique. VarBC coefficients should be updated for each limited area model. The variational bias correction is activated through namelist switches (see below).

    ATOVS radiances

    scr/include.ass

    src/include.ass should be edited to "switch on" the use of AMSUA (AMSU-A), AMSUB (AMSU-B/MHS):

    export AMSUA_OBS=1             # AMSU-A
    +

    ATOVS radiances (pre-) processing

    Introduction

    The IFS/ARPEGE/AROME data assimilation code uses level 1-c radiances. ATOVS radiances are available through local HRPT (High Rate Picture Transmission) antenna and the EUMETSAT EARS (European Advanced Retransmission Service) EUMETCast broadcasting system. Data received through local antenna need to be pre-processed with the ATOVS and AVHRR Pre-processing Package (AAPP). Radiances are also available trough the GTS, but with longer timelines.

    This short description explains how to prepare ATOVS radiances for (operational) data assimilation. Like all radiances, ATOVS data bias is corrected using Variational technique. VarBC coefficients should be updated for each limited area model. The variational bias correction is activated through namelist switches (see below).

    ATOVS radiances

    scr/include.ass

    src/include.ass should be edited to "switch on" the use of AMSUA (AMSU-A), AMSUB (AMSU-B/MHS):

    export AMSUA_OBS=1             # AMSU-A
     export AMSUB_OBS=1             # AMSU-B, MHS
     export ATOVS_SOURCE=mars       # local: EUMETCast;
                                    # mars: data from MARS
    @@ -255,4 +255,4 @@
              cp $HM_LIB/const/bias_corr/${DOMAIN}/VARBC.cycle.$HH ${DLOCVARBC}/VARBC.cycle || \
              { echo "Could not find cold start VARBC data VARBC.cycle.$EMONTH.$HH" ; exit 1 ; }
                   ls -lrt ${DLOCVARBC}
    -       fi

    With a tiny difference that all the VarBC files are now stored under a ${DOMAIN} directory. This allows our system to be up-to-date and ready for all known model domains. Please send your VarBC files to the system administrators.

    For operational implementation

    The setup is much easier. Name the VARBC.cycle files the following way VARBC.cycle.${HH} and put them in $ARCHIVE_ROOT/VARBC_latest, which you need to create.

    To check that you have done things right:

    • Doing experiment: Check that you have all LISTE_LOC_$HH files under the nam directory, and the VARBC.cycle.$DOMAIN.$EMONTH.$HH files under const/bias_corr directory.

    • Operational implementation: Check that you have all LISTE_LOC_$HH files under the nam directory, and the VARBC.cycle.$HH under the $ARCHIVE_ROOT/VARBC_latest directory.

    If you passed the test, then you are ready with ATOVS implementation. Congratulation!

    + fi

    With a tiny difference that all the VarBC files are now stored under a ${DOMAIN} directory. This allows our system to be up-to-date and ready for all known model domains. Please send your VarBC files to the system administrators.

    For operational implementation

    The setup is much easier. Name the VARBC.cycle files the following way VARBC.cycle.${HH} and put them in $ARCHIVE_ROOT/VARBC_latest, which you need to create.

    To check that you have done things right:

    • Doing experiment: Check that you have all LISTE_LOC_$HH files under the nam directory, and the VARBC.cycle.$DOMAIN.$EMONTH.$HH files under const/bias_corr directory.

    • Operational implementation: Check that you have all LISTE_LOC_$HH files under the nam directory, and the VARBC.cycle.$HH under the $ARCHIVE_ROOT/VARBC_latest directory.

    If you passed the test, then you are ready with ATOVS implementation. Congratulation!

    diff --git a/previews/PR1129/Observations/Bator/index.html b/previews/PR1129/Observations/Bator/index.html index 64b12812a..f0bbe7924 100644 --- a/previews/PR1129/Observations/Bator/index.html +++ b/previews/PR1129/Observations/Bator/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    ODB creation: Bator

    General Description

    The pre-processing step creates ODB (Observational Data Base) from various observation data files possibly in different formats.

    • Software: The programs used for pre-processing (ShuffleBufr, oulan and BATOR) are not part of the IFS code. oulan is software developed at Météo France to extract (conventional) observations from their local database (BDM). The ASCII output from oulan, the OBSOUL file, is one of the inputs of BATOR. By default, oulan is no longer part of the observation processing chain. BATOR is also developed at Météo France to generate the ODB (Observational !DataBase) database for the ARPEGE/ALADIN/HARMONIE analysis system. ODB is a tailor made database software developed at ECMWF to manage very large observational data volumes assimilated in the IFS 4DVAR system, and to enable flexible post-processing of this data. HARMONIE's BATOR originates from the MF export-pack. The figure below describes the mechanism of the observation pre-processing in HARMONIE DA. To sum it up, !ShuffleBufr splits different observations into BUFR files and BATOR creates the ODB file using from BUFR/HDF5/NetCDF input files.
    • Compilation: BATOR is compiled using gmakpack or makeup.
    • Scripts: scr/Bator.
    • Input: BUFR/HDF5/NetCDF
    • Output: ODB databases for surface and upper-air data assimilation

    BATOR

    BATOR creates the ODB files from observational data in BUFR/HDF5/NetCDF format. BATOR also includes filtering (blacklisting) of parameters from stations of different observation types. To run the BATOR program one needs files containing blacklist rules/information, namelist(s), file containing information about observations and their format – refdata -, and some setting for the ODB environment. Documentation provided by Météo France is available at http://www.umr-cnrm.fr/gmapdoc/spip.php?article229. In particular: BATOR namelists, the param_bator.cfg file and the batormap files.

    observation window and timeslots

    The timeslots characteristics are provided by BATOR using the following environment variables. These are defined in scr/Bator based on settings provided in scr/include.ass and ecf/config_exp.h.

    Environment variableDescription
    ODB_ANALYSIS_DATEanalysis date (YYYYMMDD)
    ODB_ANALYSIS_TIMEanalysis time (hhmmss)
    BATOR_NBSLOTnumber of timeslots needed [1, 9999]
    BATOR_WINDOW_LENwidth of the temporal assimilation window (in minutes) [1, 9999]
    BATOR_WINDOW_SHIFTshift of the temporal assimilation window relative to the analysis time (in minutes). Must be negative.
    BATOR_SLOT_LENwidth of a standard timeslot (in minutes) [1, 9999]
    BATOR_CENTER_LENwidth of the centred timeslot (in minutes) [1, 9999]

    batormap

    The batormap file lists all the input data files (BUFR,NETCDF,HDF5,OBSOUL) to translate and put in a particular ODB database. Several records can be stored in this file, each one composed by the following 4 fields (blank spaces are used as separator). The batormap file is created by scr/Bator based on settings provided in scr/include.ass and task arguments.

    • The ECMA database extension in which data will be stored, up to 8 characters.
    • The data filename extension, up to 8 characters.
    • Data filename format, up to 8 characters.
    • Kind of data or instrument, up to 16 characters. Must match a kind of data in the subroutine bator_initlong (src/odb/pandormodule/bator_init_mod.F90)

    For example:

    conv     conv     OBSOUL   conv
    +

    ODB creation: Bator

    General Description

    The pre-processing step creates ODB (Observational Data Base) from various observation data files possibly in different formats.

    • Software: The programs used for pre-processing (ShuffleBufr, oulan and BATOR) are not part of the IFS code. oulan is software developed at Météo France to extract (conventional) observations from their local database (BDM). The ASCII output from oulan, the OBSOUL file, is one of the inputs of BATOR. By default, oulan is no longer part of the observation processing chain. BATOR is also developed at Météo France to generate the ODB (Observational !DataBase) database for the ARPEGE/ALADIN/HARMONIE analysis system. ODB is a tailor made database software developed at ECMWF to manage very large observational data volumes assimilated in the IFS 4DVAR system, and to enable flexible post-processing of this data. HARMONIE's BATOR originates from the MF export-pack. The figure below describes the mechanism of the observation pre-processing in HARMONIE DA. To sum it up, !ShuffleBufr splits different observations into BUFR files and BATOR creates the ODB file using from BUFR/HDF5/NetCDF input files.
    • Compilation: BATOR is compiled using gmakpack or makeup.
    • Scripts: scr/Bator.
    • Input: BUFR/HDF5/NetCDF
    • Output: ODB databases for surface and upper-air data assimilation

    BATOR

    BATOR creates the ODB files from observational data in BUFR/HDF5/NetCDF format. BATOR also includes filtering (blacklisting) of parameters from stations of different observation types. To run the BATOR program one needs files containing blacklist rules/information, namelist(s), file containing information about observations and their format – refdata -, and some setting for the ODB environment. Documentation provided by Météo France is available at http://www.umr-cnrm.fr/gmapdoc/spip.php?article229. In particular: BATOR namelists, the param_bator.cfg file and the batormap files.

    observation window and timeslots

    The timeslots characteristics are provided by BATOR using the following environment variables. These are defined in scr/Bator based on settings provided in scr/include.ass and ecf/config_exp.h.

    Environment variableDescription
    ODB_ANALYSIS_DATEanalysis date (YYYYMMDD)
    ODB_ANALYSIS_TIMEanalysis time (hhmmss)
    BATOR_NBSLOTnumber of timeslots needed [1, 9999]
    BATOR_WINDOW_LENwidth of the temporal assimilation window (in minutes) [1, 9999]
    BATOR_WINDOW_SHIFTshift of the temporal assimilation window relative to the analysis time (in minutes). Must be negative.
    BATOR_SLOT_LENwidth of a standard timeslot (in minutes) [1, 9999]
    BATOR_CENTER_LENwidth of the centred timeslot (in minutes) [1, 9999]

    batormap

    The batormap file lists all the input data files (BUFR,NETCDF,HDF5,OBSOUL) to translate and put in a particular ODB database. Several records can be stored in this file, each one composed by the following 4 fields (blank spaces are used as separator). The batormap file is created by scr/Bator based on settings provided in scr/include.ass and task arguments.

    • The ECMA database extension in which data will be stored, up to 8 characters.
    • The data filename extension, up to 8 characters.
    • Data filename format, up to 8 characters.
    • Kind of data or instrument, up to 16 characters. Must match a kind of data in the subroutine bator_initlong (src/odb/pandormodule/bator_init_mod.F90)

    For example:

    conv     conv     OBSOUL   conv
     conv     synop    BUFR     synop

    param.cfg

    BATOR reads BUFR data according to definitions describing BUFR templates in the param.cfg file. The general layout of definitions in the param.cfg file is as follows:

    BUFR label
     a b c d
     codage  a1  desc_a1
    @@ -88,4 +88,4 @@
     		#-- create IOASSIGN file for the given sub-base
     		cd ${d_DB}/ECMA.${base}	
     		export ODB_IOASSIGN_MAXPROC=${NPOOLS}
    -		$HM_LIB/scr/create_ioassign -l "ECMA" -n ${BATOR_NBPOOL}

    where $base is the ODB base ($base can be conv (for conventional data), amsu (ATOVS/AMSU-A,AMSU-B/MHS), sev (for Sevir), iasi, radarv (radar) for example). Important: If you would like to have more bases, do not forget to take that into consideration when generating the "batormap" file for BATOR to define which observations you would like to have in each base.

    Blacklisting

    To avoid model forecast degradation, two files can be used to blacklist or exclude data from the analysis. They are also used to blacklist observations that the model cannot deal with because they are not representative (orography, breeze effects...). The reason for the existence of this method of 'blacklisting', built-in Bator, alongside with hirlam_blacklist.b (built-in Screening) is to allow simple and quick changes (and especially without changing binary) in the operational suite.

    The selection of an observation to be 'blacklisted' can be done using multi-criteria (SID/STATID, obstype, codetype, varno, channel/level, production center, sub-center producer, network (s) concerned (s), cycle (prod / assim), ..).

    LISTE_LOC

    The LISTE_LOC file can be used to blacklist satellite data and also for other data by type and / or subtype for a given parameter (described by varno or not). The contents of the LISTE_LOC are as follows:

    ColumnDescriptionFormat
    1Type of action: N: blacklisted, E: excludea1
    2The observation type (obsytpe@hdr)i3
    3The observation code-type (codetype@hdr)i4
    4The satellite ID with leading zeros (satid@sat)a9
    5The centre that produced the satellite datai4
    6The parameter ID (varno@body) or the satellite sensor ID (sensor@hdr)i4
    7Optional keywords of ZONx4, TOVSn, PPPPn, PROFn

    TOVSn C1 C2 ... Cn

    • can be aplied to ATOVS radiances
    • n can be at most 9 indicating the involved channels
    • the Ci values specify the channels to be blacklisted

    PPPPn P1 P2 ... Pn

    • can be aplied to blacklist different pressure levels
    • n can be at most 9 indicating the involved levels
    • the Pi values specify the pressure levels (in hPa) to be blacklisted

    PROFn P1a P2 ... Pn-1 I1 I2 ... In-1

    • n can be at most 9 indicating the involved layers
    • the Pi values specify the bottom and top levels of pressure layers (in hPa).
    • The first layer is always [1000,P1]
    • the Ii values indicate if blacklisting should be applied (=1) or not (=0) to the given layer.

    ZONx4 latmin latmax lonmin lonmax

    • can be applied to SATOB/GEOWIND data
    • if x=B then the pixels with lat < latmin or lat > latmax or lon < lonmin or lon > lonmax will be blacklisted
    • if x=C then the pixels with lat < latmin or lat > latmax or (lon > lonmin and lon < lonmax) will be blacklisted.

    LISTE_NOIRE_DIAP

    The LISTE_NOIRE_DIAP (const/bator_liste) can be used to blacklist conventional observations by station identifier. The contents of the LISTE_NOIRE_DIAP are as follows:

    ColumnDescriptionFormat
    1Observation type (obstype@hdr)i2
    2Observation namea10
    3Observation codetype (codetype@hdr)i3
    4Parameter ID (varno@body)i3
    5Station ID (statid@hdr)a8
    6Start date of blacklisting yyyymmdda8
    7Optional layer blacklisting (PROFn)a180

    PROFn P1a P2 ... Pn-1 I1 I2 ... In

    • n can be at most 9 indicating the involved layers
    • the Pi values specify the bottom and top levels of pressure layers (in hPa).The first layer is always [1000,P1]
    • the Ii values indicate if blacklisting should be applied (=1) or not (=0) to the given layer.
    • The Hxx keyword specifies the analysis hour that should be blacklisted e.g. H00 or H06 etc

    Particularities - the blacklisting of certain parameters involves the automatic blacklisting of other parameter summarized in the table below:

    obstypespecified parameterblacklisted parameters
    SYNOP39 (t2)39 (t2), 58 (rh2), 7 (q)
    SYNOP58 (rh2)58 (rh2), 7 (q)
    TEMP1 (z)1 (z), 29 (rh), 2 (t), 59 (td), 7 (q)
    TEMP2 (t)2 (t), 29 (rh), 7 (q)
    TEMP29 (rh)29 (rh), 7 (q)
    + $HM_LIB/scr/create_ioassign -l "ECMA" -n ${BATOR_NBPOOL}

    where $base is the ODB base ($base can be conv (for conventional data), amsu (ATOVS/AMSU-A,AMSU-B/MHS), sev (for Sevir), iasi, radarv (radar) for example). Important: If you would like to have more bases, do not forget to take that into consideration when generating the "batormap" file for BATOR to define which observations you would like to have in each base.

    Blacklisting

    To avoid model forecast degradation, two files can be used to blacklist or exclude data from the analysis. They are also used to blacklist observations that the model cannot deal with because they are not representative (orography, breeze effects...). The reason for the existence of this method of 'blacklisting', built-in Bator, alongside with hirlam_blacklist.b (built-in Screening) is to allow simple and quick changes (and especially without changing binary) in the operational suite.

    The selection of an observation to be 'blacklisted' can be done using multi-criteria (SID/STATID, obstype, codetype, varno, channel/level, production center, sub-center producer, network (s) concerned (s), cycle (prod / assim), ..).

    LISTE_LOC

    The LISTE_LOC file can be used to blacklist satellite data and also for other data by type and / or subtype for a given parameter (described by varno or not). The contents of the LISTE_LOC are as follows:

    ColumnDescriptionFormat
    1Type of action: N: blacklisted, E: excludea1
    2The observation type (obsytpe@hdr)i3
    3The observation code-type (codetype@hdr)i4
    4The satellite ID with leading zeros (satid@sat)a9
    5The centre that produced the satellite datai4
    6The parameter ID (varno@body) or the satellite sensor ID (sensor@hdr)i4
    7Optional keywords of ZONx4, TOVSn, PPPPn, PROFn

    TOVSn C1 C2 ... Cn

    • can be aplied to ATOVS radiances
    • n can be at most 9 indicating the involved channels
    • the Ci values specify the channels to be blacklisted

    PPPPn P1 P2 ... Pn

    • can be aplied to blacklist different pressure levels
    • n can be at most 9 indicating the involved levels
    • the Pi values specify the pressure levels (in hPa) to be blacklisted

    PROFn P1a P2 ... Pn-1 I1 I2 ... In-1

    • n can be at most 9 indicating the involved layers
    • the Pi values specify the bottom and top levels of pressure layers (in hPa).
    • The first layer is always [1000,P1]
    • the Ii values indicate if blacklisting should be applied (=1) or not (=0) to the given layer.

    ZONx4 latmin latmax lonmin lonmax

    • can be applied to SATOB/GEOWIND data
    • if x=B then the pixels with lat < latmin or lat > latmax or lon < lonmin or lon > lonmax will be blacklisted
    • if x=C then the pixels with lat < latmin or lat > latmax or (lon > lonmin and lon < lonmax) will be blacklisted.

    LISTE_NOIRE_DIAP

    The LISTE_NOIRE_DIAP (const/bator_liste) can be used to blacklist conventional observations by station identifier. The contents of the LISTE_NOIRE_DIAP are as follows:

    ColumnDescriptionFormat
    1Observation type (obstype@hdr)i2
    2Observation namea10
    3Observation codetype (codetype@hdr)i3
    4Parameter ID (varno@body)i3
    5Station ID (statid@hdr)a8
    6Start date of blacklisting yyyymmdda8
    7Optional layer blacklisting (PROFn)a180

    PROFn P1a P2 ... Pn-1 I1 I2 ... In

    • n can be at most 9 indicating the involved layers
    • the Pi values specify the bottom and top levels of pressure layers (in hPa).The first layer is always [1000,P1]
    • the Ii values indicate if blacklisting should be applied (=1) or not (=0) to the given layer.
    • The Hxx keyword specifies the analysis hour that should be blacklisted e.g. H00 or H06 etc

    Particularities - the blacklisting of certain parameters involves the automatic blacklisting of other parameter summarized in the table below:

    obstypespecified parameterblacklisted parameters
    SYNOP39 (t2)39 (t2), 58 (rh2), 7 (q)
    SYNOP58 (rh2)58 (rh2), 7 (q)
    TEMP1 (z)1 (z), 29 (rh), 2 (t), 59 (td), 7 (q)
    TEMP2 (t)2 (t), 29 (rh), 7 (q)
    TEMP29 (rh)29 (rh), 7 (q)
    diff --git a/previews/PR1129/Observations/Cope/index.html b/previews/PR1129/Observations/Cope/index.html index 1d0719075..0f45264fe 100644 --- a/previews/PR1129/Observations/Cope/index.html +++ b/previews/PR1129/Observations/Cope/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    ODB creation (COPE)

    General Description

    HIRLAM, ALADIN and Météo France are working together with ECMWF to develop COPE, Continuous Observation Pre-processing Environment, to replace Oulan/Bator (and BUFR2ODB at ECMWF), to improve the pre-processing of observations for use in NWP. COPE developments are made in ECMWF's git repository.

    Here are some links that may be of interest:

    "Support" software packages

    This section provides a step-by-step set of instruction on how to compile COPE and COPE related software.

    Preparation

    These instructions rely on the ODB API source code bundle odbapibundle-0.18.1-Source.tar.gz and emoslib libemos-4.4.2-Source.tar.gz. The default install location for software packages is in $HOME/metapp/.

    mkdir -p $HOME/test_ecmwf_releases
    +

    ODB creation (COPE)

    General Description

    HIRLAM, ALADIN and Météo France are working together with ECMWF to develop COPE, Continuous Observation Pre-processing Environment, to replace Oulan/Bator (and BUFR2ODB at ECMWF), to improve the pre-processing of observations for use in NWP. COPE developments are made in ECMWF's git repository.

    Here are some links that may be of interest:

    "Support" software packages

    This section provides a step-by-step set of instruction on how to compile COPE and COPE related software.

    Preparation

    These instructions rely on the ODB API source code bundle odbapibundle-0.18.1-Source.tar.gz and emoslib libemos-4.4.2-Source.tar.gz. The default install location for software packages is in $HOME/metapp/.

    mkdir -p $HOME/test_ecmwf_releases
     mkdir -p $HOME/test_ecSource
     cp odb_api_bundle-0.15.2-Source.tar.gz $HOME/test_ecmwf_releases/
     cp libemos-4.4.2-Source.tar.gz $HOME/test_ecmwf_releases/
    @@ -86,4 +86,4 @@
     make install

    COPE in HARMONIE system

    The use of COPE in HARMONIE relies on ODB-API, b2o and COPE itself.

    • ODB-API tools must be included in PATH
    • The ECMA.sch used by COPE is maintained in the b2o version described above.
    • mf_vertco_type specific changes are included in the feature/mfvertcotype branch of COPE
    • scr/Cope includes the setting of the following environment variables which rely on COPE_DIR and B2O_DIR. These can be set in your Env_system file.
    export COPE_DEFINITIONS_PATH=${COPE_DIR}/share/cope
     export ODB_SCHEMA_FILE=${B2O_DIR}/share/b2o/ECMA.sch
     export ODB_CODE_MAPPINGS=${B2O_DIR}/share/b2o/odb_code_mappings.dat
    -export ODBCODEMAPPINGS=${B2O_DIR}/share/b2o/odb_code_mappings.dat
    +export ODBCODEMAPPINGS=${B2O_DIR}/share/b2o/odb_code_mappings.dat
    diff --git a/previews/PR1129/Observations/GNSS/index.html b/previews/PR1129/Observations/GNSS/index.html index 8aed5c294..0ca07b108 100644 --- a/previews/PR1129/Observations/GNSS/index.html +++ b/previews/PR1129/Observations/GNSS/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    GNSS ZTD observations

    Introduction

    The NRT GNSS delay data contain information about the amount of water vapour above the GNSS sites. E-GVAP European program’s aim is to provide its EUMETNET members with European GNSS delay and water vapour estimates for operational meteorology in near real-time. Currently, the E-GVAP network consists of more than 1500 GNSS sites.

    • E-GVAP Programme here

    GNSS ZTD data

    Raw data from GNSS sites are collected by a number of GNSS analysis centers, which process the data to estimate the Zenith Total Delays (ZTD) and other parameters. The ZTDs are then forwarded to a data server, for distribution to meteorological institutes. The observations are currently distributed from Met Office, in two different formats: BUFR that are distributed via GTS to the meteorological centers or in ASCII format, that may be download via ftp.

    Preprocessing the GNSS ZTD data

    The preprocessing of these data should be local, depending if you want to have them in BUFR or ASCII format. ASCII option needs a local script to get the files from Metoffice server and transform them from COST format (EGVAP) into OBSOUL format. (In this case there is an optional script inside scr directory in Harmonie called GNSStoOBSOUL that could transforms ascii into OBSOUL format).

    Apart of the preprocessing, a White List of sites to be assimilated in your domain is needed. It will contain the values of:

       statid lat lon alt dts bias sd obserr

    where statid is the name of the site (NNNNPPPP: NNNN=site PPPP=Procesing centre) , dts is the frequency in minutes between obs, and sd the standard deviation of that station and obserr the observation error. You are supposed to have calculated these values before launching the experiment.

    Harmonie changes to assimilate GNSS ZTD data

    scr/

    nam/ Here it should be the White list, called list.gpssol.201512 for example /src/arpifs/obs_preproc/

    • redgps.F90 : This routine is where the horizontal thinning is done (Cy40) , so the thinning distance could be selected here.

    /src/blacklist/

    • mf_blacklist.b: here is posible to blacklist the gnss observations so to calculate the varbc coefficients. It can be done tuning to experimental the apdss variable.
    diff --git a/previews/PR1129/Observations/Iasi/index.html b/previews/PR1129/Observations/Iasi/index.html index 7d8dd94e1..215fa66f0 100644 --- a/previews/PR1129/Observations/Iasi/index.html +++ b/previews/PR1129/Observations/Iasi/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    IASI radiances (pre-) processing

    Introduction

    Typical IASI radiance data reception consists of a subset of 366 channels out of the full set of 8461. These cover the infrared absorption spectrum from 3.8 to 15.4 micrometers. In the context of NWP, the most useful IASI channels include (i) the temperature-sounding channels in the approximate channel index range 100-450, (ii) the humidity-sounding channels at 2800-3500 and 5000-5500 indices, and (iii) surface-sensing window channels at 500-1000. Most of the NWP impact from IASI is thought to come from group (i) and especially from the upper-tropospheric and lower-stratospheric channels in the range 200-300.

    Including IASI radiances in a HARMONIE run

    scr/include.ass

    scr/include.ass should be edited to "switch on" the use of AMSUA (AMSU-A), AMSUB (AMSU-B/MHS):

    export IASI_OBS=1             # IASI
    +

    IASI radiances (pre-) processing

    Introduction

    Typical IASI radiance data reception consists of a subset of 366 channels out of the full set of 8461. These cover the infrared absorption spectrum from 3.8 to 15.4 micrometers. In the context of NWP, the most useful IASI channels include (i) the temperature-sounding channels in the approximate channel index range 100-450, (ii) the humidity-sounding channels at 2800-3500 and 5000-5500 indices, and (iii) surface-sensing window channels at 500-1000. Most of the NWP impact from IASI is thought to come from group (i) and especially from the upper-tropospheric and lower-stratospheric channels in the range 200-300.

    Including IASI radiances in a HARMONIE run

    scr/include.ass

    scr/include.ass should be edited to "switch on" the use of AMSUA (AMSU-A), AMSUB (AMSU-B/MHS):

    export IASI_OBS=1             # IASI
     export ATOVS_SOURCE=mars       # local: EUMETCast;
     export IASI_SOURCE=ears        # mars:MARS | else: file in $OBDIR
     export IASI_RT_COEF=lblrtm     # genln2|kcarta|lblrtm
    @@ -70,4 +70,4 @@
     /

    Here we specify a list of 145 channels to be included in "band 1" of the cloud detection, i.e., in the main cloud detection channel band. The setup of the cloud detection involves not just the channel list but several additional tuning parameters that can be modified to make the screening more or less conservative. The default settings are specified in src/arpifs/obs_preproc/cloud_detect_setup.F90. A comprehensive description of the cloud detection scheme, including explanations of the various tuning parameter values, is given at the NWPSAF web site https://nwp-saf.eumetsat.int/site/software/aerosol-and-cloud-detection/documentation/.

    Log file of the Screening task will indicate whether the formatting of the namelist file is appropriate:

     READING CLOUD DETECTION FILE FOR IASI
      IASI  CLOUD DETECTION FILE READ OK

    In case of an error, the following is printed instead:

     READING CLOUD DETECTION FILE FOR IASI
      PROBLEM READING IASI CLOUD DETECTION FILE: Using Default Values

    The third possibility is that the namelist file does not appear in the working directory, in which case the printout statement is this:

     READING CLOUD DETECTION FILE FOR IASI
    - NO IASI  CLOUD DETECTION FILE : Using Default Values

    Please note that the use of the "Default Values" is generally not a desired outcome. This is because many of the cloud detection channels in the default list (see src/arpifs/obs_preproc/cloud_detect_setup.F90) are sensitive to higher stratosphere and therefore may be severely affected by the relatively low model top of limited-area HARMONIE systems.

    References:

    McNally, AP, and PD Watts, 2003: A cloud detection algorithm for high-spectral-resolution infrared sounders. Quarterly Journal of the Royal Meteorological Society, 129, 3411-3423, doi:10.1256/qj.02.208.

    + NO IASI CLOUD DETECTION FILE : Using Default Values

    Please note that the use of the "Default Values" is generally not a desired outcome. This is because many of the cloud detection channels in the default list (see src/arpifs/obs_preproc/cloud_detect_setup.F90) are sensitive to higher stratosphere and therefore may be severely affected by the relatively low model top of limited-area HARMONIE systems.

    References:

    McNally, AP, and PD Watts, 2003: A cloud detection algorithm for high-spectral-resolution infrared sounders. Quarterly Journal of the Royal Meteorological Society, 129, 3411-3423, doi:10.1256/qj.02.208.

    diff --git a/previews/PR1129/Observations/Modes/index.html b/previews/PR1129/Observations/Modes/index.html index d4e5bc7bc..f92baf50a 100644 --- a/previews/PR1129/Observations/Modes/index.html +++ b/previews/PR1129/Observations/Modes/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Mode-S Enhanced Surveillance

    Introduction

    From http://mode-s.knmi.nl:

    A novel method to measure wind and temperature is related to tracking and ranging by an enhanced surveillance (EHS) air traffic control (ATC) radar.

    Modern aircraft carry sensors to measure the Mach number (using pitotstatic probe) and the total air temperature (T). An EHS radar interrogates all aircraft in sight in a selective mode (Mode-S), on which the aircraft replies with a message containing, for example, magnetic heading, airspeed and Mach number. From this information wind and temperature can be extracted.

    Mode-S EHS data

    Description

    The data description is available here: http://mode-s.knmi.nl/data/

    Access

    Access to MUAC Mode-S EHS data can be requested from KNMI by signing a Non Disclosure Agreement. Send an e-mail to mode-s@knmi.nl with a request like this:

    Dear Sir/Madam,
    +

    Mode-S Enhanced Surveillance

    Introduction

    From http://mode-s.knmi.nl:

    A novel method to measure wind and temperature is related to tracking and ranging by an enhanced surveillance (EHS) air traffic control (ATC) radar.

    Modern aircraft carry sensors to measure the Mach number (using pitotstatic probe) and the total air temperature (T). An EHS radar interrogates all aircraft in sight in a selective mode (Mode-S), on which the aircraft replies with a message containing, for example, magnetic heading, airspeed and Mach number. From this information wind and temperature can be extracted.

    Mode-S EHS data

    Description

    The data description is available here: http://mode-s.knmi.nl/data/

    Access

    Access to MUAC Mode-S EHS data can be requested from KNMI by signing a Non Disclosure Agreement. Send an e-mail to mode-s@knmi.nl with a request like this:

    Dear Sir/Madam,
     
     On behalf of MyNHMS, My NHMS Full Name, I would like to request access to Mode-S EHS derived meteorological data made available by Maastricht Upper Area Centre (MUAC) of EUROCONTROL and KNMI.
     
    @@ -20,4 +20,4 @@
     
     END

    Processing using Oulan

    The processing of Mode-S EHS BUFR using Oulan is controlled by the following namelist entry in scr/Oulan:

    LMODES=.FALSE.

    Thinning of Mode-S

    Thinning of a bufr file

    A collection of python scripts which directly thin the Mode-S csv and bufr file is uploaded here https://gitlab.com/haandes/emaddc-public.

    E.g. the emaddcc-thinning4.py script works with the large Mode-S_EMADDC_KNMI_oper_${DTG}.bufr file and thins the data in 4 dimensions, horizontal, vertical and in observation time closest to analysis time. Emaddcc-thinning4.py currently assumes valid temperature and wind observations at the same time and fix vertical thinning intervals of:

    [300, 300, 600, 1000] m 

    which corresponds to the heights of the lowest, second lowest, third lowest and all above boxes. The horizontal box width is variable and in the following example 40 km.

    The .py script is triggered in scr/Prepare_ob, with:

      nMsgs=`bufr_count $OBDIR/Mode-S_EMADDC_KNMI_oper_${DTG}.bufr`
       time python3 $HM_LIB/scr/emaddcc_thinning4.py --infile $OBDIR/Mode-S_EMADDC_KNMI_oper_${DTG}.bufr --box_width 40 --DTG $DTG --nMsgs $nMsgs --outfile emaddcc_thinned.bufr
    -  cat emaddcc_thinned.bufr  /dev/null >> $BUFRFILE

    It takes about 1:35 min on Atos-Bologna and results in reduction of Mode-S data by a factor of 4-5.

    + cat emaddcc_thinned.bufr /dev/null >> $BUFRFILE

    It takes about 1:35 min on Atos-Bologna and results in reduction of Mode-S data by a factor of 4-5.

    diff --git a/previews/PR1129/Observations/ObservationData/index.html b/previews/PR1129/Observations/ObservationData/index.html index 0ddb4bb76..3db2f4141 100644 --- a/previews/PR1129/Observations/ObservationData/index.html +++ b/previews/PR1129/Observations/ObservationData/index.html @@ -3,8 +3,8 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Observation data

    In off-line experiments the Prepare_ob script extracts observations from a data archive, e.g from MARS archive at ECMWF platform, or from existing observation files available locally.

    ECMWF

    At ECMWF, the HARMONIE script prepares the retrieval (retrin) file for MARS request in scr/WriteMARSreq is executed by scr/Prepare_ob.

    WriteMARSreq -d $DATE -t $TIME -r $RANGE -o $OBSLIST -m ./retrin -z $BUFRFILE -g $GEOL

    The variables above denote

    LOCAL

    Otherwise, this step consists of fetching (or waiting for) the observations stored in $OBDIR defined in ecf/config_exp.h . In that case one can use the command "cat" to merge different observations into one BUFR file, ob${DTG}. In general, HIRLAM services are adopting SAPP, ECMWF's scalable acquisition and pre-processing system, to process (conventional) GTS reports and other observational data for use in operational NWP. SAPP produces BUFR encoded in the same way as observational BUFR data available in the MARS archive.

    diff --git a/previews/PR1129/Observations/ObservationPreprocessing/index.html b/previews/PR1129/Observations/ObservationPreprocessing/index.html index eba49aef7..1f3084506 100644 --- a/previews/PR1129/Observations/ObservationPreprocessing/index.html +++ b/previews/PR1129/Observations/ObservationPreprocessing/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    HARMONIE Observation Preprocessing

    Introduction

    The following figure shows different schematic steps in the HARMONIE data assimilation system. It is worth mentioning some differences between the observation pre-processing systems used by ECMWF, Météo France, and HIRLAM. Some of these differences are listed below:

    AROME/HARMONIE-AROMEIFS
    data format/contentBUFR, but sometimes with own tableBUFR with WMO code
    creation of ODB databaseBator converts BUFR to ODBb2o/bufr2odb converts BUFR to ODB
    blacklisting techniqueBator (LISTE_LOC, LISTE_NOIRE_DIAP), Screening (hirlam_blacklist.B) & Minim (NOTVAR namelist)Screening only

    Observation file preparation

    Preprocessing Software

    • Bator: Bator - reads BUFR/HDF5/OBSOUL observation data and writes ODBs used by data assimilation

    Other possibilities include:

    • Oulan: Oulan - Converts conventional BUFR data to OBSOUL file that is read by BATOR
    • Cope: Cope - preparation of ODBs used by data assimilation (in development)
    +

    HARMONIE Observation Preprocessing

    Introduction

    The following figure shows different schematic steps in the HARMONIE data assimilation system. It is worth mentioning some differences between the observation pre-processing systems used by ECMWF, Météo France, and HIRLAM. Some of these differences are listed below:

    AROME/HARMONIE-AROMEIFS
    data format/contentBUFR, but sometimes with own tableBUFR with WMO code
    creation of ODB databaseBator converts BUFR to ODBb2o/bufr2odb converts BUFR to ODB
    blacklisting techniqueBator (LISTE_LOC, LISTE_NOIRE_DIAP), Screening (hirlam_blacklist.B) & Minim (NOTVAR namelist)Screening only

    Observation file preparation

    Preprocessing Software

    • Bator: Bator - reads BUFR/HDF5/OBSOUL observation data and writes ODBs used by data assimilation

    Other possibilities include:

    • Oulan: Oulan - Converts conventional BUFR data to OBSOUL file that is read by BATOR
    • Cope: Cope - preparation of ODBs used by data assimilation (in development)
    diff --git a/previews/PR1129/Observations/Oulan/index.html b/previews/PR1129/Observations/Oulan/index.html index 7cf8e9d1a..fe8cf6a46 100644 --- a/previews/PR1129/Observations/Oulan/index.html +++ b/previews/PR1129/Observations/Oulan/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    OBSOUL creation: Oulan

    General Description

    The pre-processing step creates ODB (Observational Data Base) from various observation data files possibly in different formats.

    • Software: The programs used for pre-processing (Shufflebufr, oulan and BATOR) are not part of the IFS code. oulan is software developed at Météo France to extract observations from their local database (BDM). The output of oulan (OBSOUL) is one of the inputs of BATOR. BATOR is also software developed at Météo France to generate the ODB (Observational !DataBase) database for the ARPEGE/ALADIN/HARMONIE analysis system. ODB is a tailor made database software developed at ECMWF to manage very large observational data volumes assimilated in the IFS 4DVAR system, and to enable flexible post-processing of this data. We use oulan to generate an OBSOUL file from different BUFR files (note you can easily change the oulan program to handle data in different format than BUFR. For example in OPLACE data processing some files are in netCDF format). OBSOUL file is an ASCII formatted file, the content of which is similar to that of the CMA (Central Memory Array, packing format actually in use in the HIRLAM data assimilation system). Our version of ouland is placed under “util” directory in the repository. HARMONIE BATOR originates from the MF export-pack. The figure bellow describes the mechanism of the observation pre-processing in HARMONIE DA. To sum it up, !ShuffleBufr splits different observations into BUFR files, then oulan creates the OBSOUL file, and BATOR creates the ODB file using satellite BUFR/GRIB/BIN files and the OBSOUL one.
    • Compilation: oulan, Shufflebufr are compiled using gmkpack or makeup.
    • Scripts: Oulan
    • Input/output
    • oulan input: BUFR files; output: the OBSOUL file in ASCII format

    ShuffleBufr

    ShuffleBufr splits different observations into separate BUFR files according the IFS observation type/sub-type definition. Some of them (essentially those of conventional observations) are then fed to OULAN; the others go directly into BATOR.

        PROGRAM SHUFFLEBUFR
    +

    OBSOUL creation: Oulan

    General Description

    The pre-processing step creates ODB (Observational Data Base) from various observation data files possibly in different formats.

    • Software: The programs used for pre-processing (Shufflebufr, oulan and BATOR) are not part of the IFS code. oulan is software developed at Météo France to extract observations from their local database (BDM). The output of oulan (OBSOUL) is one of the inputs of BATOR. BATOR is also software developed at Météo France to generate the ODB (Observational !DataBase) database for the ARPEGE/ALADIN/HARMONIE analysis system. ODB is a tailor made database software developed at ECMWF to manage very large observational data volumes assimilated in the IFS 4DVAR system, and to enable flexible post-processing of this data. We use oulan to generate an OBSOUL file from different BUFR files (note you can easily change the oulan program to handle data in different format than BUFR. For example in OPLACE data processing some files are in netCDF format). OBSOUL file is an ASCII formatted file, the content of which is similar to that of the CMA (Central Memory Array, packing format actually in use in the HIRLAM data assimilation system). Our version of ouland is placed under “util” directory in the repository. HARMONIE BATOR originates from the MF export-pack. The figure bellow describes the mechanism of the observation pre-processing in HARMONIE DA. To sum it up, !ShuffleBufr splits different observations into BUFR files, then oulan creates the OBSOUL file, and BATOR creates the ODB file using satellite BUFR/GRIB/BIN files and the OBSOUL one.
    • Compilation: oulan, Shufflebufr are compiled using gmkpack or makeup.
    • Scripts: Oulan
    • Input/output
    • oulan input: BUFR files; output: the OBSOUL file in ASCII format

    ShuffleBufr

    ShuffleBufr splits different observations into separate BUFR files according the IFS observation type/sub-type definition. Some of them (essentially those of conventional observations) are then fed to OULAN; the others go directly into BATOR.

        PROGRAM SHUFFLEBUFR
         Split and shuffle BUFR file into  specific BUFR files for OULAN
     
         Usage: SHUFFLEBUFR -i <bufr_file> [-s1|-s2|-s3]  [-a] [-r]
    @@ -29,4 +29,4 @@
           -e "s/SLNEWSHIPBUFR/$SLNEWSHIPBUFR/" \
           -e "s/SLNEWBUOYBUFR/$SLNEWBUOYBUFR/" \
           -e "s/SLNEWTEMPBUFR/$SLNEWTEMPBUFR/" \
    -      ${NAMELIST} >NAMELIST
  • run oulan

    $BINDIR/oulan
  • process GNSS data. If $GNSS_OBS is set to 1 then GNSS observations are added to the OBSOUL file and whitelisting is carried out using PREGPSSOL

  • New BUFR templates

    Valid for HARMONIE 40h1 and later

    The use of new format (GTS WMO) BUFR is controlled in scr/include.ass by LNEWSYNOPBUFR, LNEWSHIPBUFR, LNEWBUOYBUFR, LNEWTEMPBUFR (set to 0 or 1). These environment variables control namelist settings in the Oulan script. GTS and ECMWF BUFR were used to guide the code changes so Oulan assumes either "flavour" of BUFR. Local changes may be required if your locally produced BUFR, in particular section 1 data sub-type settings, do not follow WMO and/or ECMWF practices.

    The ECMWF wiki contains updates regarding the quality of the new BUFR HR observations. See the following ECMWF wiki pages for furher information.

    + ${NAMELIST} >NAMELIST
  • run oulan

    $BINDIR/oulan
  • process GNSS data. If $GNSS_OBS is set to 1 then GNSS observations are added to the OBSOUL file and whitelisting is carried out using PREGPSSOL

  • New BUFR templates

    Valid for HARMONIE 40h1 and later

    The use of new format (GTS WMO) BUFR is controlled in scr/include.ass by LNEWSYNOPBUFR, LNEWSHIPBUFR, LNEWBUOYBUFR, LNEWTEMPBUFR (set to 0 or 1). These environment variables control namelist settings in the Oulan script. GTS and ECMWF BUFR were used to guide the code changes so Oulan assumes either "flavour" of BUFR. Local changes may be required if your locally produced BUFR, in particular section 1 data sub-type settings, do not follow WMO and/or ECMWF practices.

    The ECMWF wiki contains updates regarding the quality of the new BUFR HR observations. See the following ECMWF wiki pages for furher information.

    diff --git a/previews/PR1129/Observations/RadarData/index.html b/previews/PR1129/Observations/RadarData/index.html index 309905891..7e75f00b1 100644 --- a/previews/PR1129/Observations/RadarData/index.html +++ b/previews/PR1129/Observations/RadarData/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Assimilation of Radar Data

    This documentation outlines how to retrieve, process and assimilate HDF5 radar data

    HARMONIE compilation

    HIRLAM have made code changes to BATOR to allow the direct reading of HDF5 radar data and conversion to ODB suitable for use in the HARMONIE data assimilation system. If you wish to use these changes you must compile HARMONIE with support for HDF5. This requires the addition of -DUSE_HDF5 to the FDEFS in your makeup config file as well has adding hdf5 to EXTMODS. util/makeup/config.ECMWF.atos.gnu is an example of a makeup config file

    Format

    The BATOR code assumes the HDF5 radar data being read uses the OPERA Data Information Model (ODIM). See https://www.eumetnet.eu/wp-content/uploads/2021/07/ODIMH5v2.4.pdf for further information.

    Data retrieval

    Quality-controlled radar data can be retrieved from local archives, the OPERA Nimbus server (contact: Lukas Tuechler (Geosphere)), or the ODE (OPERA Development Environment) server (contact: Günther Haase (SMHI)).

    Data processing

    The HARMONIE script system requires that the OPERA HDF5 data files be stored in RADARDIR (defined in ecf/config_exp.h ) and have a file name using the format: ${HDFID}_qcvol_${DATE}T${HH}00.h5 where:

    • HDFID is a 5 digit OPERA radar identifier
    • DATE is the date
    • HH is the hour

    Common pitfalls

    • Forgetting to add -DUSE_HDF5 correctly to your config file
    • Incorrect RADARDIR
    • Incorrect file names
    • Incorrect format entered in refdata - BATOR is quite strict about how it reads the information in refdata:
    02918zh  HDF5     radarv           20100808 03 

    Further reading

    Martin Ridal's radar data assimilation presentation

    +

    Assimilation of Radar Data

    This documentation outlines how to retrieve, process and assimilate HDF5 radar data

    HARMONIE compilation

    HIRLAM have made code changes to BATOR to allow the direct reading of HDF5 radar data and conversion to ODB suitable for use in the HARMONIE data assimilation system. If you wish to use these changes you must compile HARMONIE with support for HDF5. This requires the addition of -DUSE_HDF5 to the FDEFS in your makeup config file as well has adding hdf5 to EXTMODS. util/makeup/config.ECMWF.atos.gnu is an example of a makeup config file

    Format

    The BATOR code assumes the HDF5 radar data being read uses the OPERA Data Information Model (ODIM). See https://www.eumetnet.eu/wp-content/uploads/2021/07/ODIMH5v2.4.pdf for further information.

    Data retrieval

    Quality-controlled radar data can be retrieved from local archives, the OPERA Nimbus server (contact: Lukas Tuechler (Geosphere)), or the ODE (OPERA Development Environment) server (contact: Günther Haase (SMHI)).

    Data processing

    The HARMONIE script system requires that the OPERA HDF5 data files be stored in RADARDIR (defined in ecf/config_exp.h ) and have a file name using the format: ${HDFID}_qcvol_${DATE}T${HH}00.h5 where:

    • HDFID is a 5 digit OPERA radar identifier
    • DATE is the date
    • HH is the hour

    Common pitfalls

    • Forgetting to add -DUSE_HDF5 correctly to your config file
    • Incorrect RADARDIR
    • Incorrect file names
    • Incorrect format entered in refdata - BATOR is quite strict about how it reads the information in refdata:
    02918zh  HDF5     radarv           20100808 03 

    Further reading

    Martin Ridal's radar data assimilation presentation

    diff --git a/previews/PR1129/Observations/SYNOP/index.html b/previews/PR1129/Observations/SYNOP/index.html index ca84181c9..fc9fbe5da 100644 --- a/previews/PR1129/Observations/SYNOP/index.html +++ b/previews/PR1129/Observations/SYNOP/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    SYNOP observations

    This page documents how SYNOP observations (z, ps, u10m, t2m, rh2m) are assimilated. An overview of GTS messages, BUFR, conversion to ODB and screening options is provided.

    SYNOP on the GTS

    BUFR

    Conversion to ODB

    At this stage we decide whether we want to assimilate geopotential, calculated from mean sea level pressure (MSLP) or surface pressure (Ps), or Ps available in the SYNOP BUFR report.

    Surface pressure

    In the context of Bator:

    diff --git a/previews/PR1129/Observations/Scatt/index.html b/previews/PR1129/Observations/Scatt/index.html index 17979e9c9..96447f9be 100644 --- a/previews/PR1129/Observations/Scatt/index.html +++ b/previews/PR1129/Observations/Scatt/index.html @@ -3,5 +3,5 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Scatterometers

    Background

    The EUMETSAT OSI SAF produces different scatterometer wind products at KNMI and more will become available in 2019:

    • C-band ASCAT-A/B/C overpassing at 9:30/21:30 Local Solar Time (LST), since 2007/2011/2019;
    • Ku-band !ScatSat overpassing at 8:45/20:45 LST, since 2017;
    • Ku-band HY2A/B overpassing at 6:00/18:00 LST, since 2013 (n.a. in NRT)/2019;
    • Ku-band CFOSAT overpassing at 7:00/19:00 LST, expected 2019;
    • Ku-band OSCAT3 overpassing at 12:00/24:00, expected 2019;
    • C/Ku-band !WindRad overpassing at 6:00/18:00, expected 2020.

    Note that the products have different ambiguity and noise properties, that are handled in the generic KNMI processing. We distinguish two types of scatterometers with (1) static beams (ASCAT) and with (2) rotating beams (the rest).

    In the ECMWF model (on ~200 km scales) the availability of three hourly observations is motivated from the experience of assimilating ASCAT and OSCAT (2.5 hours overpass time difference), which showed double the impact of assimilating ASCAT only. So, they appear as independent data sources for the model.

    Since ASCAT overpasses only twice per day we cannot fulfil the temporal requirement and can therefore not expect to analyze open ocean surface winds deterministically at 25 km scales with ASCAT only. Based on this analysis we should therefore focus on larger than 25 km scales (as ECMWF does), also for Harmonie, so typically focus on 100 km scales. This means that scales between ~25-100 km in Harmonie over open sea is mostly noise, which can be removed through supermodding (ref: Mate Mile's project). Note that more scatterometers will be available next year at more times a day (see above).

    ECMWF is testing ASCAT with different aggregation, thinning and weights in order to optimize scatterometer data assimilation, which results may be useful for HARMONIE data assimilation strategy as well.

    ASCAT

    1. ASCAT-12.5km (or ASCAT-coastal) data are available on a 12.5 km grid.
    2. The resolution of ASCAT-12.5km is about 25 km (through the application of a Hanning with tails extending beyond 12.5 km)
    3. As a result, the errors of neighbouring observations are correlated. For the 6.25 km product:
      • along-track wind component l : neighbor 0.60; next-neighbor 0.19; next-next neighbor 0.02; total noise variance 0.385
      • cross-track wind component t : neighbor 0.51; next-neighbor 0.11; next-next neighbor 0.00; total noise variance 0.214
      This agrees well with the footprint overlap (see point 2). We expect similar values for ASCAT-12.5km, but this could be easily assessed more dedicated.
    4. Triple collocation tests show obervation error standard deviation for ASCAT-12.5km (or ASCAT-coastal) of ~ 0.7 m/s for u and v.
    5. The effective model resolution of Harmonie (with 2.5 km grid) is about 20-25 km.

    Based on this one may conclude that the resolution of ASCAT-12.5km and Harmonie is about the same, so the representativeness error is negligible, and the total error equal to the observation error, i.e., 0.7 m/s and use this value for giving weight to ASCAT in Harmonie.

    However, we think this will not give the best impact. This is because if you want to analyse model states on 25 km scales (Harmonie effective resolution) deterministically, you need a forcing term which accounts for this resolution. Forcing can be either from orography (over land only) or observations. So, over sea we have to rely on the density of the observation network. To analyse scales up to 25 km deterministically over sea requires high density observations both in space and time, i.e., for the latter at least every hour. This is corroborated by studies with ASCAT A and B, separated in time by 50 minutes, showing high correlation of ASCAT divergence and convergence with moist convection rain, but negligible correlation between convergence or divergence of the two passes.

    Since ASCAT overpasses only twice per day we can not fulfil the temporal requirement and can therefore not expect to analyse ocean surface winds deterministically at 25 km scales with ASCAT only. Based on this analysis we should therefore focus on larger than 25 km scales (as ECMWF does), also for Harmonie, so typically focus on 100 km scales. This means that scales between ~25-100 km in Harmonie over sea is mostly noise, which can be removed through supermodding, i.e., the project where Mate Mile is working on.

    KNMI are waiting for a data feed from EUMETSAT. Level 1 ASCAT data available 14 March 2019 here

    Other scatterometers

    1. 25km data are generally available on a the satellite swath grid of WVCs
    2. The resolution of this 25 km data is around 100 km (through the application of a spatial filter that successfully suppresses both wind direction ambiguities and noise)
    3. As a result, the errors of neighboring observations are correlated over a distance of 100 km or more
    4. Triple collocation tests show observation error standard deviation ~ 0.7 m/s for u and v
    5. Biases exist at warm and cold SST of up to 0.5 m/s, which are being corrected; also winds around nadir and, to a lesser extent, in the outer swath are sometimes biased; the IFS takes account of this, but may need retuning for CFOSAT

    Further reading

    More information is available on the OSI SAF wind site in the form of training material, product manuals, scientific publications, verification reports and monitoring information. Support and services messages for all products can be obtained through scat at knmi.nl .

    The EUMETSAT NWP SAF provides the following reports:

    Model

    Enable assimilation

    • Set SCATT_OBS=1 in scr/include.ass
    • Ensure ascat${DTG} files are available in $OBDIR (defined in ecf/config_exp.h )

    Technical information

    • Referred to as NSCAT3 in arpifs (see src/arpifs/module/yomcoctp.F90)
    • From https://apps.ecmwf.int/odbgov
      • obstype=9
      • codetype=139
      • sensor=190
      • varno=125/124 for ambiguos u/v wind component

    Issues (CY40/CY43)

    Thinning: NASCAWVC

    • Number of ASCAT wave vector cells
    • Defined in src/arpifs/module/yomthlim.F90
    • Default, set in src/arpifs/obs_preproc/sufglim.F90, is 42 (for 25-km product)
    • Set to 82 for 12.5-km scatterometer product in nam/harmonie_namelists.pm (possibly also in sufglim.F90. To be checked)

    Observation error

    • Set by Bator (src/odb/pandor/module/bator_init_mod.F90) u_err=1.39, v_err=1.54
    • Suggested values from KNMI: u_err=1.4, v_err=1.4
    • ZWE=2.0 set in src/arpifs/obs_preproc/nscatin.F90 but not used (I think)
    • ObsErr in Jo-table is RMS of all ASCAT obs_error values (SQRT(0.5*(u_err^2 + v_err^2)
    • sigma_o can be set by Bator in NADIRS using NADIRS:
      ECTERO(9,139,125,1) = 1.39_JPRB
      -ECTERO(9,139,124,1) = 1.54_JPRB
    +

    Scatterometers

    Background

    The EUMETSAT OSI SAF produces different scatterometer wind products at KNMI and more will become available in 2019:

    • C-band ASCAT-A/B/C overpassing at 9:30/21:30 Local Solar Time (LST), since 2007/2011/2019;
    • Ku-band !ScatSat overpassing at 8:45/20:45 LST, since 2017;
    • Ku-band HY2A/B overpassing at 6:00/18:00 LST, since 2013 (n.a. in NRT)/2019;
    • Ku-band CFOSAT overpassing at 7:00/19:00 LST, expected 2019;
    • Ku-band OSCAT3 overpassing at 12:00/24:00, expected 2019;
    • C/Ku-band !WindRad overpassing at 6:00/18:00, expected 2020.

    Note that the products have different ambiguity and noise properties, that are handled in the generic KNMI processing. We distinguish two types of scatterometers with (1) static beams (ASCAT) and with (2) rotating beams (the rest).

    In the ECMWF model (on ~200 km scales) the availability of three hourly observations is motivated from the experience of assimilating ASCAT and OSCAT (2.5 hours overpass time difference), which showed double the impact of assimilating ASCAT only. So, they appear as independent data sources for the model.

    Since ASCAT overpasses only twice per day we cannot fulfil the temporal requirement and can therefore not expect to analyze open ocean surface winds deterministically at 25 km scales with ASCAT only. Based on this analysis we should therefore focus on larger than 25 km scales (as ECMWF does), also for Harmonie, so typically focus on 100 km scales. This means that scales between ~25-100 km in Harmonie over open sea is mostly noise, which can be removed through supermodding (ref: Mate Mile's project). Note that more scatterometers will be available next year at more times a day (see above).

    ECMWF is testing ASCAT with different aggregation, thinning and weights in order to optimize scatterometer data assimilation, which results may be useful for HARMONIE data assimilation strategy as well.

    ASCAT

    1. ASCAT-12.5km (or ASCAT-coastal) data are available on a 12.5 km grid.
    2. The resolution of ASCAT-12.5km is about 25 km (through the application of a Hanning with tails extending beyond 12.5 km)
    3. As a result, the errors of neighbouring observations are correlated. For the 6.25 km product:
      • along-track wind component l : neighbor 0.60; next-neighbor 0.19; next-next neighbor 0.02; total noise variance 0.385
      • cross-track wind component t : neighbor 0.51; next-neighbor 0.11; next-next neighbor 0.00; total noise variance 0.214
      This agrees well with the footprint overlap (see point 2). We expect similar values for ASCAT-12.5km, but this could be easily assessed more dedicated.
    4. Triple collocation tests show obervation error standard deviation for ASCAT-12.5km (or ASCAT-coastal) of ~ 0.7 m/s for u and v.
    5. The effective model resolution of Harmonie (with 2.5 km grid) is about 20-25 km.

    Based on this one may conclude that the resolution of ASCAT-12.5km and Harmonie is about the same, so the representativeness error is negligible, and the total error equal to the observation error, i.e., 0.7 m/s and use this value for giving weight to ASCAT in Harmonie.

    However, we think this will not give the best impact. This is because if you want to analyse model states on 25 km scales (Harmonie effective resolution) deterministically, you need a forcing term which accounts for this resolution. Forcing can be either from orography (over land only) or observations. So, over sea we have to rely on the density of the observation network. To analyse scales up to 25 km deterministically over sea requires high density observations both in space and time, i.e., for the latter at least every hour. This is corroborated by studies with ASCAT A and B, separated in time by 50 minutes, showing high correlation of ASCAT divergence and convergence with moist convection rain, but negligible correlation between convergence or divergence of the two passes.

    Since ASCAT overpasses only twice per day we can not fulfil the temporal requirement and can therefore not expect to analyse ocean surface winds deterministically at 25 km scales with ASCAT only. Based on this analysis we should therefore focus on larger than 25 km scales (as ECMWF does), also for Harmonie, so typically focus on 100 km scales. This means that scales between ~25-100 km in Harmonie over sea is mostly noise, which can be removed through supermodding, i.e., the project where Mate Mile is working on.

    KNMI are waiting for a data feed from EUMETSAT. Level 1 ASCAT data available 14 March 2019 here

    Other scatterometers

    1. 25km data are generally available on a the satellite swath grid of WVCs
    2. The resolution of this 25 km data is around 100 km (through the application of a spatial filter that successfully suppresses both wind direction ambiguities and noise)
    3. As a result, the errors of neighboring observations are correlated over a distance of 100 km or more
    4. Triple collocation tests show observation error standard deviation ~ 0.7 m/s for u and v
    5. Biases exist at warm and cold SST of up to 0.5 m/s, which are being corrected; also winds around nadir and, to a lesser extent, in the outer swath are sometimes biased; the IFS takes account of this, but may need retuning for CFOSAT

    Further reading

    More information is available on the OSI SAF wind site in the form of training material, product manuals, scientific publications, verification reports and monitoring information. Support and services messages for all products can be obtained through scat at knmi.nl .

    The EUMETSAT NWP SAF provides the following reports:

    Model

    Enable assimilation

    • Set SCATT_OBS=1 in scr/include.ass
    • Ensure ascat${DTG} files are available in $OBDIR (defined in ecf/config_exp.h )

    Technical information

    • Referred to as NSCAT3 in arpifs (see src/arpifs/module/yomcoctp.F90)
    • From https://apps.ecmwf.int/odbgov
      • obstype=9
      • codetype=139
      • sensor=190
      • varno=125/124 for ambiguos u/v wind component

    Issues (CY40/CY43)

    Thinning: NASCAWVC

    • Number of ASCAT wave vector cells
    • Defined in src/arpifs/module/yomthlim.F90
    • Default, set in src/arpifs/obs_preproc/sufglim.F90, is 42 (for 25-km product)
    • Set to 82 for 12.5-km scatterometer product in nam/harmonie_namelists.pm (possibly also in sufglim.F90. To be checked)

    Observation error

    • Set by Bator (src/odb/pandor/module/bator_init_mod.F90) u_err=1.39, v_err=1.54
    • Suggested values from KNMI: u_err=1.4, v_err=1.4
    • ZWE=2.0 set in src/arpifs/obs_preproc/nscatin.F90 but not used (I think)
    • ObsErr in Jo-table is RMS of all ASCAT obs_error values (SQRT(0.5*(u_err^2 + v_err^2)
    • sigma_o can be set by Bator in NADIRS using NADIRS:
      ECTERO(9,139,125,1) = 1.39_JPRB
      +ECTERO(9,139,124,1) = 1.54_JPRB
    diff --git a/previews/PR1129/Observations/Seviri/index.html b/previews/PR1129/Observations/Seviri/index.html index 6b91479c5..d5cbfc382 100644 --- a/previews/PR1129/Observations/Seviri/index.html +++ b/previews/PR1129/Observations/Seviri/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    SEVIRI

    Background

    The Spinning Enhanced Visible and InfraRed Imager (SEVIRI) is MSG's primary instrument and has the capacity to observe the Earth in 12 spectral channels. Eight of the channels take measurements in the infrared band of the spectra, the rest for the visible channels (one of them in high resolution). The horizontal resolution is 3 km, except for the high resolution visible channel that is 1 km. Time resolution is one image every 15 minutes.

    SEVIRI ChannelsMain characteristics
    IR 3.9Used at night to detect fog and very low clouds. Window channel of CO2
    WV 6.2Water Vapour Channel
    WV 7.3Water Vapour Channel
    IR 8.7Window channel of H2O
    IR 9.7Ozone absorption channel. Not suitable to data assimilation
    IR 10.8Window channel of H2O
    IR 12.0Window channel of H2O
    IR 13.4CO2 absorption band

    SEVIRI channels and their characteristics

    Normalised Weighting Functions

    Figure of normalised weighting functions

    See Eumetsat seviri for more detail on the instrument.

    Radiance data from geostationary satellites have been assimilated in global numerical weather prediction models for some time (Köpken et al., 2003; Szyndel et al., 2004). Their high spatial and temporal resolution are particularly beneficial on global scales and in regions with a paucity of ground based measurements. For example, the assimilation of radiance data from Meteosat-8 and Meteosat-11 continue to be of value to the ECMWF's IFS data assimilation system (Burrows, 2020). Similarly, limited area models can benefit from these radiances.

    Preparing data

    This describes the work flow for preparing SEVIRI observations suitable for assimilation.

    Radiances are processed in order to reduce the size and change the format of the files using the NWC-SAF products to discriminate between clear-sky and cloudy pixels.

    Links to NWC-SAF software and description:

    Model settings

    Enable assimilation

    • Set SEVIRI_OBS=1 in scr/include.ass
    • Ensure seviri${DTG}.nc files are available in $OBDIR (defined in ecf/config_exp.h )

    Preparing ODB

    The processing (NetCDF –> ODB) by Bator is controlled by namelist entries. A brief some of the more relevant namelist entries is provided here.

    NADIRS

    Namelist entryTypeDescription
    InbTypeNetcdfIntegerNumber of NETCDF templates to read in the param.cfg file. Default value : 0
    MinSeviriSatidIntegerLowest SID waited in the SEVIRI data (NETCDF format). It is used as lower bound when allocating NSEVIRI type. Default value : 0
    MaxSeviriSatidIntegerHighest SID waited in the SEVIRI data (NETCDF format). It is used as upper bound when allocating NSEVIRI type. Default value : 0
    MinMtvzaSatidIntegerLowest SID waited in the MTVZA data (HDF5 format). It is used as lower bound when allocating HMTVZA type. Default value : 0
    MaxMtvzaSatidIntegerHighest SID waited in the MTVZA data (HDF5 format). It is used as upper bound when allocating HMTVZA type. Default value : 0

    e.g.

    &NADIRS
    +

    SEVIRI

    Background

    The Spinning Enhanced Visible and InfraRed Imager (SEVIRI) is MSG's primary instrument and has the capacity to observe the Earth in 12 spectral channels. Eight of the channels take measurements in the infrared band of the spectra, the rest for the visible channels (one of them in high resolution). The horizontal resolution is 3 km, except for the high resolution visible channel that is 1 km. Time resolution is one image every 15 minutes.

    SEVIRI ChannelsMain characteristics
    IR 3.9Used at night to detect fog and very low clouds. Window channel of CO2
    WV 6.2Water Vapour Channel
    WV 7.3Water Vapour Channel
    IR 8.7Window channel of H2O
    IR 9.7Ozone absorption channel. Not suitable to data assimilation
    IR 10.8Window channel of H2O
    IR 12.0Window channel of H2O
    IR 13.4CO2 absorption band

    SEVIRI channels and their characteristics

    Normalised Weighting Functions

    Figure of normalised weighting functions

    See Eumetsat seviri for more detail on the instrument.

    Radiance data from geostationary satellites have been assimilated in global numerical weather prediction models for some time (Köpken et al., 2003; Szyndel et al., 2004). Their high spatial and temporal resolution are particularly beneficial on global scales and in regions with a paucity of ground based measurements. For example, the assimilation of radiance data from Meteosat-8 and Meteosat-11 continue to be of value to the ECMWF's IFS data assimilation system (Burrows, 2020). Similarly, limited area models can benefit from these radiances.

    Preparing data

    This describes the work flow for preparing SEVIRI observations suitable for assimilation.

    Radiances are processed in order to reduce the size and change the format of the files using the NWC-SAF products to discriminate between clear-sky and cloudy pixels.

    Links to NWC-SAF software and description:

    Model settings

    Enable assimilation

    • Set SEVIRI_OBS=1 in scr/include.ass
    • Ensure seviri${DTG}.nc files are available in $OBDIR (defined in ecf/config_exp.h )

    Preparing ODB

    The processing (NetCDF –> ODB) by Bator is controlled by namelist entries. A brief some of the more relevant namelist entries is provided here.

    NADIRS

    Namelist entryTypeDescription
    InbTypeNetcdfIntegerNumber of NETCDF templates to read in the param.cfg file. Default value : 0
    MinSeviriSatidIntegerLowest SID waited in the SEVIRI data (NETCDF format). It is used as lower bound when allocating NSEVIRI type. Default value : 0
    MaxSeviriSatidIntegerHighest SID waited in the SEVIRI data (NETCDF format). It is used as upper bound when allocating NSEVIRI type. Default value : 0
    MinMtvzaSatidIntegerLowest SID waited in the MTVZA data (HDF5 format). It is used as lower bound when allocating HMTVZA type. Default value : 0
    MaxMtvzaSatidIntegerHighest SID waited in the MTVZA data (HDF5 format). It is used as upper bound when allocating HMTVZA type. Default value : 0

    e.g.

    &NADIRS
       InbTypeBufr = 200,
       InbTypeNetcdf = 1,
       InbTypeHdf5 = 2,
    @@ -37,4 +37,4 @@
       NSEVIRI(57)%NbChannels= 8,
       NSEVIRI(57)%Channels(1:8)= 1,2,3,4,5,6,7,8,
       NSEVIRI(57)%NamChannels(1:8)='IR_039','WV_062','WV_073','IR_087','IR_097','IR_108','IR_120','IR_134',
    -/

    Model settings (Screening and Minimisation)

    • Thinning:
      • One pixel in every five is selected in Bator.
      • In Screening the thinning distance is defined as 0.65°.
    • Observation error:
      • Set in Bator namelist
      • 1.7x0.9(SIGMAO_COEF)=1.54 for channels 2,3 & 8
      • 1.05x0.9(SIGMAO_COEF)=0.945 for the channels 4,6,7
    • VarBC:
      • AEMET use p0 for all channels with 24 h cycling.
    • Anything else:
      • "Safe" active channels are WV6.2 and WV7.3 (channels 2 and 3) over sea.
      • AEMET only use Infrared channels so in Bator there is a conversion of the channels list. ch 4 -> ch 1

    References

    Technical stuff:

    Further reading and links to reports/presentations:

    +/

    Model settings (Screening and Minimisation)

    • Thinning:
      • One pixel in every five is selected in Bator.
      • In Screening the thinning distance is defined as 0.65°.
    • Observation error:
      • Set in Bator namelist
      • 1.7x0.9(SIGMAO_COEF)=1.54 for channels 2,3 & 8
      • 1.05x0.9(SIGMAO_COEF)=0.945 for the channels 4,6,7
    • VarBC:
      • AEMET use p0 for all channels with 24 h cycling.
    • Anything else:
      • "Safe" active channels are WV6.2 and WV7.3 (channels 2 and 3) over sea.
      • AEMET only use Infrared channels so in Bator there is a conversion of the channels list. ch 4 -> ch 1

    References

    Technical stuff:

    Further reading and links to reports/presentations:

    diff --git a/previews/PR1129/Overview/Binaries/index.html b/previews/PR1129/Overview/Binaries/index.html index bb52fec67..5f5fed0e8 100644 --- a/previews/PR1129/Overview/Binaries/index.html +++ b/previews/PR1129/Overview/Binaries/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    HARMONIE binaries

    An installation of HARMONIE produces the following binaries:

    • ACADFA1D : Tool to generate initial and boundary data for MUSC
    • ADDPERT : Create initial perturbations
    • ADDSURF : Allows you to mix different files and add different fields
    • ALTO : Also known as PINUTS. Contains several diagnostic tools.
    • BATOR : Generate ODB from observations in various formats
    • bl95.x : Blacklist compiler, help program to generate object files from the blacklist
    • BLEND : Mixes to files
    • BLENDSUR : Mixes to files
    • cluster : Cluster ensemble members
    • CONVERT_ECOCLIMAP_PARAM : Generate binary files from ECOCLIMAP ascii files
    • dcagen : ODB handling tool
    • domain_prop : Helper program to return various model domain properties
    • FESTAT : Background error covariance calculations.
    • fldextr : Extracts data for verification from model history files. Reads FA from HARMONIE and GRIB from ECMWF/HIRLAM.
    • gl : Converts/interpolates between different file formats and projections. Used for boundary interpolation.
    • IOASSIGN/ioassign : ODB IO setup
    • LSMIX : Scale dependent mixing of two model states.
    • jbconv : Interpolates/extrapolates background error statistics files. For technical experimentation
    • lfitools : FA/LFI file manipulation tool
    • MASTERODB : The main binary for the forecast model, surface assimilation, climate generation, 3DVAR, fullpos and much more.
    • MTEN : Computation of moist tendencies
    • obsextr : Extract data for verification from BUFR files.
    • obsmon : Extract data for observation monitoring
    • odb98.x : ODB manipulation program
    • OFFLINE : The SURFEX offline model. Also called SURFEX
    • oulan : Converts observations in BUFR to OBSOUL format used by BATOR
    • PERTCMA : Perturbation of observations in ODB
    • PERTSFC : Surface perturbation scheme
    • PGD : Generates physiography files for SURFEX.
    • PREGPSSOL : Processing of GNSS data
    • PREP : Generate SURFEX initial files. Interpolates/translates between two SURFEX domains.
    • SFXTOOLS : Converts SURFEX output between FA and LFI format.
    • shuffle : Manipulation of ODB. Also called ODBTOOLS
    • ShuffleBufr : Split bufr data according to observation type, used in the observation preprocessing.
    • SODA : Surfex offline data assimilation
    • SPG : Stochastic pattern generator, https://github.com/gayfulin/SPG
    • SURFEX : The SURFEX offline model. Also called OFFLINE
    • tot_energy : Calculates the total energy of a model state. Is used for boundary perturbation scaling.
    • xtool : Compares two FA/LFI/GRIB files.
    +

    HARMONIE binaries

    An installation of HARMONIE produces the following binaries:

    • ACADFA1D : Tool to generate initial and boundary data for MUSC
    • ADDPERT : Create initial perturbations
    • ADDSURF : Allows you to mix different files and add different fields
    • ALTO : Also known as PINUTS. Contains several diagnostic tools.
    • BATOR : Generate ODB from observations in various formats
    • bl95.x : Blacklist compiler, help program to generate object files from the blacklist
    • BLEND : Mixes to files
    • BLENDSUR : Mixes to files
    • cluster : Cluster ensemble members
    • CONVERT_ECOCLIMAP_PARAM : Generate binary files from ECOCLIMAP ascii files
    • dcagen : ODB handling tool
    • domain_prop : Helper program to return various model domain properties
    • FESTAT : Background error covariance calculations.
    • fldextr : Extracts data for verification from model history files. Reads FA from HARMONIE and GRIB from ECMWF/HIRLAM.
    • gl : Converts/interpolates between different file formats and projections. Used for boundary interpolation.
    • IOASSIGN/ioassign : ODB IO setup
    • LSMIX : Scale dependent mixing of two model states.
    • jbconv : Interpolates/extrapolates background error statistics files. For technical experimentation
    • lfitools : FA/LFI file manipulation tool
    • MASTERODB : The main binary for the forecast model, surface assimilation, climate generation, 3DVAR, fullpos and much more.
    • MTEN : Computation of moist tendencies
    • obsextr : Extract data for verification from BUFR files.
    • obsmon : Extract data for observation monitoring
    • odb98.x : ODB manipulation program
    • OFFLINE : The SURFEX offline model. Also called SURFEX
    • oulan : Converts observations in BUFR to OBSOUL format used by BATOR
    • PERTCMA : Perturbation of observations in ODB
    • PERTSFC : Surface perturbation scheme
    • PGD : Generates physiography files for SURFEX.
    • PREGPSSOL : Processing of GNSS data
    • PREP : Generate SURFEX initial files. Interpolates/translates between two SURFEX domains.
    • SFXTOOLS : Converts SURFEX output between FA and LFI format.
    • shuffle : Manipulation of ODB. Also called ODBTOOLS
    • ShuffleBufr : Split bufr data according to observation type, used in the observation preprocessing.
    • SODA : Surfex offline data assimilation
    • SPG : Stochastic pattern generator, https://github.com/gayfulin/SPG
    • SURFEX : The SURFEX offline model. Also called OFFLINE
    • tot_energy : Calculates the total energy of a model state. Is used for boundary perturbation scaling.
    • xtool : Compares two FA/LFI/GRIB files.
    diff --git a/previews/PR1129/Overview/Content/index.html b/previews/PR1129/Overview/Content/index.html index 7c10e7215..227501216 100644 --- a/previews/PR1129/Overview/Content/index.html +++ b/previews/PR1129/Overview/Content/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Harmonie Content

    Overview

    Harmonie is HIRLAM's adaptation of the LAM version of the IFS/ARPEGE project. The common code shared with the ALADIN program, Meteo France and ECMWF only contains the source code. Harmonie adds the build environment, scripts, support for a scheduler, and a number of diagnostics tools for file conversion and postprocessing. In summary a git clone of harmonie from github contains the following main directories

    • config-sh : Configuration and job submission files for different platforms.
    • const : A selected number of constant files for bias correction, assimilation and different internal schemes. A large number of data for climate generation and the RTTOV software is kept outside of the repository. See [wiki:HarmonieSystemDocumentation#Downloaddata].
    • ecf : Directory for the main configuration file config_exp.h and the containers for the scheduler ECFLOW.
    • suites Scripts and suit definition files for ECFLOW, the scheduler for HARMONIE.
    • nam : Namelists for different configurations.
    • scr : Scripts to run the different tasks.
    • src : The IFS/ARPEGE source code.
    • util : A number of utilities and support libraries.

    util

    The util directory contains the following main directories

    • auxlibs : Contains gribex, bufr, rgb and some dummy routines
    • binutils : https://www.gnu.org/software/binutils/
    • checknorms : Script for code norm checking
    • gl_grib_api : Boundary file generator and file converter
    • makeup : HIRLAM style compilation tool
    • musc : MUSC scripts
    • obsmon : Code to produce obsmon sqlite files
    • offline : SURFEX offline code
    • oulan : Converts conventional BUFR data to OBSOUL format read by bator.
    • RadarDAbyFA : Field alignment code
    +

    Harmonie Content

    Overview

    Harmonie is HIRLAM's adaptation of the LAM version of the IFS/ARPEGE project. The common code shared with the ALADIN program, Meteo France and ECMWF only contains the source code. Harmonie adds the build environment, scripts, support for a scheduler, and a number of diagnostics tools for file conversion and postprocessing. In summary a git clone of harmonie from github contains the following main directories

    • config-sh : Configuration and job submission files for different platforms.
    • const : A selected number of constant files for bias correction, assimilation and different internal schemes. A large number of data for climate generation and the RTTOV software is kept outside of the repository. See [wiki:HarmonieSystemDocumentation#Downloaddata].
    • ecf : Directory for the main configuration file config_exp.h and the containers for the scheduler ECFLOW.
    • suites Scripts and suit definition files for ECFLOW, the scheduler for HARMONIE.
    • nam : Namelists for different configurations.
    • scr : Scripts to run the different tasks.
    • src : The IFS/ARPEGE source code.
    • util : A number of utilities and support libraries.

    util

    The util directory contains the following main directories

    • auxlibs : Contains gribex, bufr, rgb and some dummy routines
    • binutils : https://www.gnu.org/software/binutils/
    • checknorms : Script for code norm checking
    • gl_grib_api : Boundary file generator and file converter
    • makeup : HIRLAM style compilation tool
    • musc : MUSC scripts
    • obsmon : Code to produce obsmon sqlite files
    • offline : SURFEX offline code
    • oulan : Converts conventional BUFR data to OBSOUL format read by bator.
    • RadarDAbyFA : Field alignment code
    diff --git a/previews/PR1129/Overview/FileFormats/index.html b/previews/PR1129/Overview/FileFormats/index.html index 210450c11..5fbaf88dc 100644 --- a/previews/PR1129/Overview/FileFormats/index.html +++ b/previews/PR1129/Overview/FileFormats/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    File formats in HARMONIE

    Introduction

    The HARMONIE system reads and writes a number of different formats.

    FA files

    Default internal format input/output for HARMONIE for gridpoint, spectral and SURFEX data. GRIB is used as a way to pack data, but the grib record cannot be used as such.

    • The header contains information about model domain, projection, spectral truncation, extension zone, boundary zone, vertical levels.
    • Only one date/time per file.
    • FA routines are found under ifsaux/fa
    • List or convert a file with gl
    • Other listing tool PINUTS

    Read more

    GRIB/GRIB2

    All FA files may be converted to GRIB after the forecast run. For the conversion between FA names and GRIB parameters check this table.

    • List or convert a GRIB file with gl

    NETCDF

    In climate mode all FA files may converted to NETCDF after the forecast run. For the conversion between FA names and NETCDF parameters check util/gl/inc/nc_tab.h.

    • For the manipulation and listing of NETCDF files we refer to standard NETCDF tools.
    • NETCDF is also used as output data from some SURFEX tools.

    BUFR and ODB

    BUFR is the archiving/exchange format for observations. Observation Database is used for efficient handling of observations on IFS. ODB used for both input data and feedback information.

    Read more about observations in HARMONIE here.

    DDH (LFA files )

    Diagnostics by Horizontal Domains allows you to accumulate fluxes from different packages over different areas/points.

    • LFA files ( Autodocumented File Software )
    • gmapdoc
    • under util/ddh

    Misc

    • vfld/vobs files in a simple ASCII format used by the verification.
    • Obsmon files are stored in sqlite format.
    +

    File formats in HARMONIE

    Introduction

    The HARMONIE system reads and writes a number of different formats.

    FA files

    Default internal format input/output for HARMONIE for gridpoint, spectral and SURFEX data. GRIB is used as a way to pack data, but the grib record cannot be used as such.

    • The header contains information about model domain, projection, spectral truncation, extension zone, boundary zone, vertical levels.
    • Only one date/time per file.
    • FA routines are found under ifsaux/fa
    • List or convert a file with gl
    • Other listing tool PINUTS

    Read more

    GRIB/GRIB2

    All FA files may be converted to GRIB after the forecast run. For the conversion between FA names and GRIB parameters check this table.

    • List or convert a GRIB file with gl

    NETCDF

    In climate mode all FA files may converted to NETCDF after the forecast run. For the conversion between FA names and NETCDF parameters check util/gl/inc/nc_tab.h.

    • For the manipulation and listing of NETCDF files we refer to standard NETCDF tools.
    • NETCDF is also used as output data from some SURFEX tools.

    BUFR and ODB

    BUFR is the archiving/exchange format for observations. Observation Database is used for efficient handling of observations on IFS. ODB used for both input data and feedback information.

    Read more about observations in HARMONIE here.

    DDH (LFA files )

    Diagnostics by Horizontal Domains allows you to accumulate fluxes from different packages over different areas/points.

    • LFA files ( Autodocumented File Software )
    • gmapdoc
    • under util/ddh

    Misc

    • vfld/vobs files in a simple ASCII format used by the verification.
    • Obsmon files are stored in sqlite format.
    diff --git a/previews/PR1129/Overview/Source/index.html b/previews/PR1129/Overview/Source/index.html index 12423c274..ad5f53425 100644 --- a/previews/PR1129/Overview/Source/index.html +++ b/previews/PR1129/Overview/Source/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Harmonie Source Code

    Introduction

    This wiki page summaries the ARPEGE/IFS source code made available in the HARMONIE system. It is based on documents made available by YESSAD K. (METEO-FRANCE/CNRM/GMAP/ALGO). The relevant document for cycle 40 is available here (or directly here).

    HARMONIE Source Library Structure

    The main source of HARMONIE system originates from IFS/ARPEGE and it consists of a number of "project" sources. These are:

    • aeolus: Aeolous source code, a package for pre-processing satellite lidar wind data. Inactive for us.
    • aladin: specific routines only relevant to LAM, (limited area models, in particular ALADIN and AROME).
    • algor: application routines, e.g. to read LFI or Arpege files,interface routines for distributed memory environment, some linear algebra routines, such as lanczos algorithm, minimizers.
    • arpifs: global model routines (ARPEGE, IFS), and routines common to global and LAM models. This is the core of the ARPEGE/IFS software. The core of ARPEGE/IFS software.
    • biper: Biperiodization routines for the LAM
    • blacklist: package for blacklisting
    • coupling: lateral coupling and spectral nudging for LAM models
    • etrans: spectral transforms for plane geometry, used for LAM
    • ifsaux: some application routines, for example reading or writing on “LFI” or ARPEGE files, interface routines for distributed memory environment
    • mpa: upper air meso-NH/AROME physics (also used in ARPEGE/ALADIN)
    • mse: surface processes in meso-NH/AROME (interface for SURFEX)
    • odb: ODB (Observational Data Base software), needed by ARPEGE/ALADIN for their analysis or their assimilation cycle
    • satrad: satellite data handling package, needed to run the model analysis/assimilation
    • surf: ECMWF surface scheme
    • surfex: surface processes in meso-NH/AROME - the externalized surface scheme SURFEX
    • trans: spectral transforms for spherical geometry, used for ARPEGE/IFS
    • utilities: utility packages, for operational FA to GRIB (PROGRID), OULAN, BATOR, or programs to operate on ODB and radiances bias correction

    Dependencies and hierarchy between each project

    Note: these project names are no longer valid – need to update

    • ARP+TFL+XRD+XLA+MPA+MSE+SURFEX: for ARPEGE forecasts with METEO-FRANCE physics.
    • ARP+ALD+TFL+TAL+XRD+XLA+BIP+MPA+MSE+SURFEX: for ALADIN or AROME forecasts.
    • ARP+TFL+XRD+XLA+SUR: for IFS forecasts with ECMWF physics.
    • ARP+TFL+XRD+XLA+MPA+MSE+SURFEX+BLA+ODB+SAT+AEO: for ARPEGE assimilations with METEO-FRANCE physics.
    • ARP+ALD+TFL+TAL+XRD+XLA+BIP+MPA+MSE+SURFEX+BLA+ODB+SAT+AEO: for ALADIN or AROME assimilations.
    • ARP+TFL+XRD+XLA+SUR+BLA+ODB+SAT+OBT+SCR+AEO: for IFS assimilations with ECMWF physics.

    Libraries under each project

    Note: this information made need to be updated for CY40

    ARPIFS

    • adiab
      • Adiabatic dynamics
      • Adiabatic diagnostics and intermediate quantities calculation, for example the geopotential height (routines GP... or GNH...).
      • Eulerian advections
      • Semi-Lagrangian advection and interpolators (routines LA...)
      • Semi-implicit scheme and linear terms calculation (routines SI..., SP..SI..)
      • Horizontal diffusion (routines SP..HOR..)
    • ald inc
      • function: functions used only in ALADIN
      • namelist: namelists read by ALADIN.
    • c9xx: specific configurations 901 to 999 routines (mainly configuration 923). Routines INCLI.. are used in configuration 923. Routines INTER... are interpolators used in configurations 923, 931, 932.
    • canari: routines used in the CANARI optimal interpolation. Their names generally starts by CA.
    • canari common: empty directory to be deleted.
    • climate: some specific ARPEGE-CLIMAT routines.
    • common: often contains includes
    • control: control routines. Contains in particular STEPO and CNT... routines.
    • dfi: routines used in the DFI (digital filter initialisation) algorithm
    • dia: diagnostics other than FULL-POS. One finds some setup SU... routines specific to some diagnostics and some WR... routines doing file writing.
    • function: functions (in includes). The qa....h functions are used in CANARI, the fc....h functions are used in a large panel of topics.
    • interface: not automatic interfaces (currently empty).
    • kalman: Kalman filter.
    • module: all the types of module (variables declarations, type definition, active code).
    • mwave: micro-wave observations (SSM/I) treatment.* namelist: all namelists.
    • nmi: routines used in the NMI (normal mode initialisation) algorithm.
    • obs error: treatment of the observation errors in the assimilation.
    • obs preproc: observation pre-processing (some of them are called in the screening).
    • ocean: oceanic coupling, for climatic applications.
    • onedvar: 1D-VAR assimilation scheme used at ECMWF.
    • parallel: parallel environment, communications between processors.
    • parameter: empty directory to be deleted.
    • phys dmn: physics parameterizations used at METEO-FRANCE, and HIRLAM physics, ALARO physics.
    • phys ec: ECMWF physics. Some of these routines (FMR radiation scheme, Lopez convection scheme) are now also used in the METEO-FRANCE physics.
    • pointer: empty directory to be deleted.
    • pp obs: several applications
      • observation horizontal and vertical interpolator.
      • FULL-POS.
      • vertical interpolator common to FULL-POS and the observation interpolator; some of these routines may be used elsewhere.
    • setup: setup routines not linked with a very specific domain. More specific setup routines are spread among some other subdirectories.
    • sinvect: singular vectors calculation (configuration 601).
    • support: empty directory to be deleted.
    • transform: hat routines for spectral transforms.
    • utility: miscellaneous utilitaries, linear algebra routines, array deallocation routines.
    • var: routines involved in the 3DVAR and 4DVAR assimilation, some minimizers (N1CG1, CONGRAD), some specific 3DVAR and 4DVAR setup routines.
    • wave: empty directory to be deleted.

    ALADIN

    • adiab: adiabatic dynamics.
    • blending: blending scheme (currently only contains the procedure blend.ksh).
    • c9xx: specific configurations E901 to E999 routines (mainly configuration E923). Routines EINCLI.. are used in configuration E923. Routines EINTER... are interpolators used in configurations E923, E931, E932.
    • control: control routines.
    • coupling: lateral coupling by external lateral boundary conditions.
    • dia: diagnostics other than FULL-POS.
    • inidata: setup routines specific to file reading (initial conditions, LBC).
    • module: active code modules only used in ALADIN.
    • obs preproc: observation pre-processing (some of them are called in the screening).
    • parallel: parallel environment, communications between processors.
    • pp obs: several applications:
      • observation horizontal and vertical interpolator.
      • FULL-POS.
      • vertical interpolator common to FULL-POS and the observation interpolator; some of these routines may be used elsewhere.
    • programs: probably designed to contain procedures, but currently contains among others some blending routines, the place of which would be probably better in subdirectory "blending".
    • setup: setup routines not linked with a very specific domain. More specific setup routines are spread among some other subdirectories.
    • sinvect: singular vectors calculation (configuration E601).
    • transform: hat routines for spectral transforms.
    • utility: miscellaneous utilitaries, array deallocation routines.
    • var: routines involved in the 3DVAR and 4DVAR assimilation, some specific 3DVAR and 4DVAR setup routines.

    TFL

    • build: contains procedures.
    • external: routines which can be called from another project.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • module: all the types of module (variables declarations, type definition, active code).
      • tpm ...F90: variable declaration + type definition modules.
      • lt.... mod.F90: active code modules for Legendre transforms.
      • ft.... mod.F90: active code modules for Fourier transforms.
      • tr.... mod.F90: active code modules for transpositions.
      • su.... mod.F90: active code modules for setup.
    • programs: specific entries which can be used for TFL code validation. These routines are not called elsewhere.

    TAL

    • external: routines which can be called from another project.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • module: all the types of module (variables declarations, type definition, active code).
      • tpmald ...F90: variable declaration + type definition modules.
      • elt.... mod.F90: active code modules for N-S Fourier transforms.
      • eft.... mod.F90: active code modules for E-W Fourier transforms.
      • sue.... mod.F90: active code modules for setup.
    • programs: specific entries which can be used for TAL code validation. These routines are not called elsewhere.

    XRD

    • arpege: empty directory to be deleted.
    • bufr io: BUFR format files reading and writing.
    • cma: CMA format files reading and writing.
    • ddh: DDH diagnostics.
    • fa: ARPEGE (FA) files reading and writing.
    • grib io: ECMWF GRIB format files reading and writing.
    • grib mf: METEO-FRANCE GRIB format files reading and writing.
    • ioassign: empty directory to be deleted.
    • lanczos: linear algebra routines for Lanczos algorithm.
    • lfi: LFI format files reading and writing.
    • minim: linear algebra routines for minimizations. Contains the M1QN3 (quasi-Newton) minimizer.
    • misc: miscellaneous decks.* module: all the types of module (variables declarations, type definition, active code). There are a lot of mpl...F90 modules for parallel environment (interface to MPI parallel environment).
    • mrfstools: empty directory to be deleted.
    • newbufrio: empty directory to be deleted.
    • newcmaio: empty directory to be deleted.
    • not used: miscellaneous decks (unused decks to be deleted?).
    • pcma: empty directory to be deleted.
    • support: miscellaneous routines. Some of them do Fourier transforms, some others do linear algebra.
    • svipc: contains only svipc.c .
    • utilities: miscellaneous utilitaries.

    SUR

    • build: contains procedures.
    • external: routines which can be called from another project.* function: specific functions.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • module: all the types of module (variables declarations, type definition, active code).
      • yos ...F90: variable declaration + type definition modules.
      • su.... mod.F90 but not surf.... mod.F90: active code modules for setup.
      • surf.... mod.F90, v.... mod.F90: other active code modules.
    • offline: specific entries which can be used for SUR code validation. These routines are not called elsewhere.

    BLA

    • compiler.
    • include: not automatically generated interfaces, functions, and some other includes.
    • library: the only containing .F90 decks.
    • old2new.
    • scripts.

    SAT

    • bias.
    • emiss.
    • interface.
    • module.
    • mwave.
    • onedvar.
    • pre screen.
    • rtlimb.
    • rttov.
    • satim.
    • test. (Not described in detail; more information has to be provided by someone who knows the content of this project, but there is currently no specific documentation about this topic)

    UTI

    • add cloud fields: program to add 4 cloud variables (liquid water, ice, rainfall, snow) in ARPEGE files.
    • bator: BATOR software (reads observations data in a ASCII format file named OBSOUL and the blacklist, writes them on a ODB format file with some additional information).
    • combi: combination of perturbations in an ensemble forecast (PEARP).
    • controdb: control of the number of observations.
    • extrtovs: unbias TOVS.
    • fcq: does quality control and writes this quality control in ODB files.
    • gobptout: PROGRIB? (convert ARPEGE files contained post-processed data into GRIB files).
    • include: all .h decks (functions, COMMON blocks, parameters).
    • mandalay: software MANDALAY.
    • module: all types of modules.
    • namelist: namelists specific to the applications stored in UTI (for example OULAN, BATOR).
    • oulan: OULAN software (the step just before BATOR: observation extractions in the BDM, samples data in space and time, and writes the sampled data in an ASCII file called "OBSOUL").
    • pregpssol: Surface GPS processing.
    • prescat: Scatterometer data processing.
    • progrid: PROGRID? (convert ARPEGE files contained post-processed data into GRIB files).
    • progrid cadre: cf. progrid?
    • sst nesdis: program to read the SST on the BDAP. This project has its own entries.

    MPA

    It contains first layer of directory

    • chem: chemistry.
    • conv: convection.
    • micro: microphysics.
    • turb: turbulence.

    Each directory contains the following subdirectories

    • externals: routines which can be called from another project.
    • include: all the "include" decks (functions, COMMON blocks, parameters).
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • internals: other non-module routines; they cannot be called from another project.
    • module: all types of modules.

    SURFEX

    • ASSIM: Surface assimilation routines (please note that programs soda.F90, oi_main.F90 and varassim.F90 are located under mse/programs).
    • OFFLIN: Surface offline routines (please note that programs pgd.F90, prep.F90 and offline.F90 are located under mse/programs).
    • SURFEX: Surface routines for physiography (PGD), initialisation (PREP) and physical processes including e.g. land (ISBA), sea, town (TEB) and lakes.
    • TOPD: TOPMODEL (TOPography based MODEL) for soil hydrology.
    • TRIP: River routing model TRIP

    MSE

    • dummy: empty versions of some routines.
    • externals: routines which can be called from another project.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • internals: other non-module routines; they cannot be called from another project.
    • module: all types of modules.
    • new: file conversion routines, e.g. fa2lfi, lfi2fa
    • programs: SURFEX programs

    References and documentation

    +

    Harmonie Source Code

    Introduction

    This wiki page summaries the ARPEGE/IFS source code made available in the HARMONIE system. It is based on documents made available by YESSAD K. (METEO-FRANCE/CNRM/GMAP/ALGO). The relevant document for cycle 40 is available here (or directly here).

    HARMONIE Source Library Structure

    The main source of HARMONIE system originates from IFS/ARPEGE and it consists of a number of "project" sources. These are:

    • aeolus: Aeolous source code, a package for pre-processing satellite lidar wind data. Inactive for us.
    • aladin: specific routines only relevant to LAM, (limited area models, in particular ALADIN and AROME).
    • algor: application routines, e.g. to read LFI or Arpege files,interface routines for distributed memory environment, some linear algebra routines, such as lanczos algorithm, minimizers.
    • arpifs: global model routines (ARPEGE, IFS), and routines common to global and LAM models. This is the core of the ARPEGE/IFS software. The core of ARPEGE/IFS software.
    • biper: Biperiodization routines for the LAM
    • blacklist: package for blacklisting
    • coupling: lateral coupling and spectral nudging for LAM models
    • etrans: spectral transforms for plane geometry, used for LAM
    • ifsaux: some application routines, for example reading or writing on “LFI” or ARPEGE files, interface routines for distributed memory environment
    • mpa: upper air meso-NH/AROME physics (also used in ARPEGE/ALADIN)
    • mse: surface processes in meso-NH/AROME (interface for SURFEX)
    • odb: ODB (Observational Data Base software), needed by ARPEGE/ALADIN for their analysis or their assimilation cycle
    • satrad: satellite data handling package, needed to run the model analysis/assimilation
    • surf: ECMWF surface scheme
    • surfex: surface processes in meso-NH/AROME - the externalized surface scheme SURFEX
    • trans: spectral transforms for spherical geometry, used for ARPEGE/IFS
    • utilities: utility packages, for operational FA to GRIB (PROGRID), OULAN, BATOR, or programs to operate on ODB and radiances bias correction

    Dependencies and hierarchy between each project

    Note: these project names are no longer valid – need to update

    • ARP+TFL+XRD+XLA+MPA+MSE+SURFEX: for ARPEGE forecasts with METEO-FRANCE physics.
    • ARP+ALD+TFL+TAL+XRD+XLA+BIP+MPA+MSE+SURFEX: for ALADIN or AROME forecasts.
    • ARP+TFL+XRD+XLA+SUR: for IFS forecasts with ECMWF physics.
    • ARP+TFL+XRD+XLA+MPA+MSE+SURFEX+BLA+ODB+SAT+AEO: for ARPEGE assimilations with METEO-FRANCE physics.
    • ARP+ALD+TFL+TAL+XRD+XLA+BIP+MPA+MSE+SURFEX+BLA+ODB+SAT+AEO: for ALADIN or AROME assimilations.
    • ARP+TFL+XRD+XLA+SUR+BLA+ODB+SAT+OBT+SCR+AEO: for IFS assimilations with ECMWF physics.

    Libraries under each project

    Note: this information made need to be updated for CY40

    ARPIFS

    • adiab
      • Adiabatic dynamics
      • Adiabatic diagnostics and intermediate quantities calculation, for example the geopotential height (routines GP... or GNH...).
      • Eulerian advections
      • Semi-Lagrangian advection and interpolators (routines LA...)
      • Semi-implicit scheme and linear terms calculation (routines SI..., SP..SI..)
      • Horizontal diffusion (routines SP..HOR..)
    • ald inc
      • function: functions used only in ALADIN
      • namelist: namelists read by ALADIN.
    • c9xx: specific configurations 901 to 999 routines (mainly configuration 923). Routines INCLI.. are used in configuration 923. Routines INTER... are interpolators used in configurations 923, 931, 932.
    • canari: routines used in the CANARI optimal interpolation. Their names generally starts by CA.
    • canari common: empty directory to be deleted.
    • climate: some specific ARPEGE-CLIMAT routines.
    • common: often contains includes
    • control: control routines. Contains in particular STEPO and CNT... routines.
    • dfi: routines used in the DFI (digital filter initialisation) algorithm
    • dia: diagnostics other than FULL-POS. One finds some setup SU... routines specific to some diagnostics and some WR... routines doing file writing.
    • function: functions (in includes). The qa....h functions are used in CANARI, the fc....h functions are used in a large panel of topics.
    • interface: not automatic interfaces (currently empty).
    • kalman: Kalman filter.
    • module: all the types of module (variables declarations, type definition, active code).
    • mwave: micro-wave observations (SSM/I) treatment.* namelist: all namelists.
    • nmi: routines used in the NMI (normal mode initialisation) algorithm.
    • obs error: treatment of the observation errors in the assimilation.
    • obs preproc: observation pre-processing (some of them are called in the screening).
    • ocean: oceanic coupling, for climatic applications.
    • onedvar: 1D-VAR assimilation scheme used at ECMWF.
    • parallel: parallel environment, communications between processors.
    • parameter: empty directory to be deleted.
    • phys dmn: physics parameterizations used at METEO-FRANCE, and HIRLAM physics, ALARO physics.
    • phys ec: ECMWF physics. Some of these routines (FMR radiation scheme, Lopez convection scheme) are now also used in the METEO-FRANCE physics.
    • pointer: empty directory to be deleted.
    • pp obs: several applications
      • observation horizontal and vertical interpolator.
      • FULL-POS.
      • vertical interpolator common to FULL-POS and the observation interpolator; some of these routines may be used elsewhere.
    • setup: setup routines not linked with a very specific domain. More specific setup routines are spread among some other subdirectories.
    • sinvect: singular vectors calculation (configuration 601).
    • support: empty directory to be deleted.
    • transform: hat routines for spectral transforms.
    • utility: miscellaneous utilitaries, linear algebra routines, array deallocation routines.
    • var: routines involved in the 3DVAR and 4DVAR assimilation, some minimizers (N1CG1, CONGRAD), some specific 3DVAR and 4DVAR setup routines.
    • wave: empty directory to be deleted.

    ALADIN

    • adiab: adiabatic dynamics.
    • blending: blending scheme (currently only contains the procedure blend.ksh).
    • c9xx: specific configurations E901 to E999 routines (mainly configuration E923). Routines EINCLI.. are used in configuration E923. Routines EINTER... are interpolators used in configurations E923, E931, E932.
    • control: control routines.
    • coupling: lateral coupling by external lateral boundary conditions.
    • dia: diagnostics other than FULL-POS.
    • inidata: setup routines specific to file reading (initial conditions, LBC).
    • module: active code modules only used in ALADIN.
    • obs preproc: observation pre-processing (some of them are called in the screening).
    • parallel: parallel environment, communications between processors.
    • pp obs: several applications:
      • observation horizontal and vertical interpolator.
      • FULL-POS.
      • vertical interpolator common to FULL-POS and the observation interpolator; some of these routines may be used elsewhere.
    • programs: probably designed to contain procedures, but currently contains among others some blending routines, the place of which would be probably better in subdirectory "blending".
    • setup: setup routines not linked with a very specific domain. More specific setup routines are spread among some other subdirectories.
    • sinvect: singular vectors calculation (configuration E601).
    • transform: hat routines for spectral transforms.
    • utility: miscellaneous utilitaries, array deallocation routines.
    • var: routines involved in the 3DVAR and 4DVAR assimilation, some specific 3DVAR and 4DVAR setup routines.

    TFL

    • build: contains procedures.
    • external: routines which can be called from another project.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • module: all the types of module (variables declarations, type definition, active code).
      • tpm ...F90: variable declaration + type definition modules.
      • lt.... mod.F90: active code modules for Legendre transforms.
      • ft.... mod.F90: active code modules for Fourier transforms.
      • tr.... mod.F90: active code modules for transpositions.
      • su.... mod.F90: active code modules for setup.
    • programs: specific entries which can be used for TFL code validation. These routines are not called elsewhere.

    TAL

    • external: routines which can be called from another project.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • module: all the types of module (variables declarations, type definition, active code).
      • tpmald ...F90: variable declaration + type definition modules.
      • elt.... mod.F90: active code modules for N-S Fourier transforms.
      • eft.... mod.F90: active code modules for E-W Fourier transforms.
      • sue.... mod.F90: active code modules for setup.
    • programs: specific entries which can be used for TAL code validation. These routines are not called elsewhere.

    XRD

    • arpege: empty directory to be deleted.
    • bufr io: BUFR format files reading and writing.
    • cma: CMA format files reading and writing.
    • ddh: DDH diagnostics.
    • fa: ARPEGE (FA) files reading and writing.
    • grib io: ECMWF GRIB format files reading and writing.
    • grib mf: METEO-FRANCE GRIB format files reading and writing.
    • ioassign: empty directory to be deleted.
    • lanczos: linear algebra routines for Lanczos algorithm.
    • lfi: LFI format files reading and writing.
    • minim: linear algebra routines for minimizations. Contains the M1QN3 (quasi-Newton) minimizer.
    • misc: miscellaneous decks.* module: all the types of module (variables declarations, type definition, active code). There are a lot of mpl...F90 modules for parallel environment (interface to MPI parallel environment).
    • mrfstools: empty directory to be deleted.
    • newbufrio: empty directory to be deleted.
    • newcmaio: empty directory to be deleted.
    • not used: miscellaneous decks (unused decks to be deleted?).
    • pcma: empty directory to be deleted.
    • support: miscellaneous routines. Some of them do Fourier transforms, some others do linear algebra.
    • svipc: contains only svipc.c .
    • utilities: miscellaneous utilitaries.

    SUR

    • build: contains procedures.
    • external: routines which can be called from another project.* function: specific functions.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • module: all the types of module (variables declarations, type definition, active code).
      • yos ...F90: variable declaration + type definition modules.
      • su.... mod.F90 but not surf.... mod.F90: active code modules for setup.
      • surf.... mod.F90, v.... mod.F90: other active code modules.
    • offline: specific entries which can be used for SUR code validation. These routines are not called elsewhere.

    BLA

    • compiler.
    • include: not automatically generated interfaces, functions, and some other includes.
    • library: the only containing .F90 decks.
    • old2new.
    • scripts.

    SAT

    • bias.
    • emiss.
    • interface.
    • module.
    • mwave.
    • onedvar.
    • pre screen.
    • rtlimb.
    • rttov.
    • satim.
    • test. (Not described in detail; more information has to be provided by someone who knows the content of this project, but there is currently no specific documentation about this topic)

    UTI

    • add cloud fields: program to add 4 cloud variables (liquid water, ice, rainfall, snow) in ARPEGE files.
    • bator: BATOR software (reads observations data in a ASCII format file named OBSOUL and the blacklist, writes them on a ODB format file with some additional information).
    • combi: combination of perturbations in an ensemble forecast (PEARP).
    • controdb: control of the number of observations.
    • extrtovs: unbias TOVS.
    • fcq: does quality control and writes this quality control in ODB files.
    • gobptout: PROGRIB? (convert ARPEGE files contained post-processed data into GRIB files).
    • include: all .h decks (functions, COMMON blocks, parameters).
    • mandalay: software MANDALAY.
    • module: all types of modules.
    • namelist: namelists specific to the applications stored in UTI (for example OULAN, BATOR).
    • oulan: OULAN software (the step just before BATOR: observation extractions in the BDM, samples data in space and time, and writes the sampled data in an ASCII file called "OBSOUL").
    • pregpssol: Surface GPS processing.
    • prescat: Scatterometer data processing.
    • progrid: PROGRID? (convert ARPEGE files contained post-processed data into GRIB files).
    • progrid cadre: cf. progrid?
    • sst nesdis: program to read the SST on the BDAP. This project has its own entries.

    MPA

    It contains first layer of directory

    • chem: chemistry.
    • conv: convection.
    • micro: microphysics.
    • turb: turbulence.

    Each directory contains the following subdirectories

    • externals: routines which can be called from another project.
    • include: all the "include" decks (functions, COMMON blocks, parameters).
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • internals: other non-module routines; they cannot be called from another project.
    • module: all types of modules.

    SURFEX

    • ASSIM: Surface assimilation routines (please note that programs soda.F90, oi_main.F90 and varassim.F90 are located under mse/programs).
    • OFFLIN: Surface offline routines (please note that programs pgd.F90, prep.F90 and offline.F90 are located under mse/programs).
    • SURFEX: Surface routines for physiography (PGD), initialisation (PREP) and physical processes including e.g. land (ISBA), sea, town (TEB) and lakes.
    • TOPD: TOPMODEL (TOPography based MODEL) for soil hydrology.
    • TRIP: River routing model TRIP

    MSE

    • dummy: empty versions of some routines.
    • externals: routines which can be called from another project.
    • interface: not automatically generated interfaces which match with the "external" directory routines.
    • internals: other non-module routines; they cannot be called from another project.
    • module: all types of modules.
    • new: file conversion routines, e.g. fa2lfi, lfi2fa
    • programs: SURFEX programs

    References and documentation

    diff --git a/previews/PR1129/Overview/da_graph/index.html b/previews/PR1129/Overview/da_graph/index.html index a6cb238ea..1b178729d 100644 --- a/previews/PR1129/Overview/da_graph/index.html +++ b/previews/PR1129/Overview/da_graph/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/previews/PR1129/PostProcessing/Diagnostics/index.html b/previews/PR1129/PostProcessing/Diagnostics/index.html index fabaa51f5..522ab1380 100644 --- a/previews/PR1129/PostProcessing/Diagnostics/index.html +++ b/previews/PR1129/PostProcessing/Diagnostics/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Diagnostics

    Xtool

    xtool

    DDH

    Diagnostics par Domaines Horizontaux (Diagnostics by Horizontal Domains) is a tool to create budgets of different processes in the model. Please read on in the gmap documentation

    EZDIAG

    From Lisa: Note, this is for printing out full 3D fields from the model physics to the FA-file.

    1. In the routine that you would like to print out your fields add args:

      & PDIAG, KNDIAG,&

      and declare them

      INTEGER(KIND=JPIM),INTENT(IN) :: KNDIAG
      +

      Diagnostics

      Xtool

      xtool

      DDH

      Diagnostics par Domaines Horizontaux (Diagnostics by Horizontal Domains) is a tool to create budgets of different processes in the model. Please read on in the gmap documentation

      EZDIAG

      From Lisa: Note, this is for printing out full 3D fields from the model physics to the FA-file.

      1. In the routine that you would like to print out your fields add args:

        & PDIAG, KNDIAG,&

        and declare them

        INTEGER(KIND=JPIM),INTENT(IN) :: KNDIAG
         REAL(KIND=JPRB)   ,INTENT(OUT)   :: PDIAG(KLON,KLEV,KNDIAG)

        Put values in the array if its dimension allows it, e.g.

        IF (KNDIAG.GE.1) THEN
            PDIAG(KIDIA:KFDIA,KTDIA:KLEV,1)= YOURVAL(KIDIA:KFDIA,KTDIA:KLEV)
         ENDIF

        or anything you wish. Note that the variable YOURVAL is now stored in NGFL_EZDIAG=1.

      You can store this way up to 25 diagnostic 3D fields in the historic files.

      If you want to store 2D fields, you can put them at different levels in the same 3D array.

      1. Remake the interfaces if running AROME (not needed if running ALARO), before recompiling.

      2. In the NAMGFL namelist:

        ! ADDITIONAL FIELDS FOR DIAGNOSTIC
        @@ -11,4 +11,4 @@
            YEZDIAG_NL(1)%CNAME='YOURVAL',
           YEZDIAG_NL(1)%LADV=.F.,

        If you add more fields (e.g. you set NGFL_EZDIAG=4), I think you will also need to set the grib parameter, e.g. (the default is 999, that you can leave for the first one).

        YEZDIAG_NL(2)%IGRBCODE=998,
         YEZDIAG_NL(3)%IGRBCODE=997,
        -YEZDIAG_NL(4)%IGRBCODE=996,

        Note that the two first places are already defined in harmonie_namelist.pm.

      3. In order to have your variable converted from FA to grib, add the new variable in util/gl/inc/trans_tab.h

      +YEZDIAG_NL(4)%IGRBCODE=996,

      Note that the two first places are already defined in harmonie_namelist.pm.

    2. In order to have your variable converted from FA to grib, add the new variable in util/gl/inc/trans_tab.h

    diff --git a/previews/PR1129/PostProcessing/FileConversions/index.html b/previews/PR1129/PostProcessing/FileConversions/index.html index a8205259a..e6e0a2db9 100644 --- a/previews/PR1129/PostProcessing/FileConversions/index.html +++ b/previews/PR1129/PostProcessing/FileConversions/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    File conversions - this page under construction

    FA –> GRIB

    The default HARMONIE output is in FA format. HIRLAM/HARMONIE users are more used to dealing with data encoded according to GRIB, a WMO code for the representation of gridded data. Users have the option to convert HARMONIE FA format files to GRIB1 (short for GRIB edition 1), GRIB2 (short for GRIB edition 2) or NETCDF. Note that the NETCDF conversion is still experimental. References about different WMO GRIB editions (1, 2 and 3) can be found here.

    ecf/config_exp.h

    The option to convert model output can be selected in the ecf/config_exp.h experiment configuration file:

    
    +

    File conversions - this page under construction

    FA –> GRIB

    The default HARMONIE output is in FA format. HIRLAM/HARMONIE users are more used to dealing with data encoded according to GRIB, a WMO code for the representation of gridded data. Users have the option to convert HARMONIE FA format files to GRIB1 (short for GRIB edition 1), GRIB2 (short for GRIB edition 2) or NETCDF. Note that the NETCDF conversion is still experimental. References about different WMO GRIB editions (1, 2 and 3) can be found here.

    ecf/config_exp.h

    The option to convert model output can be selected in the ecf/config_exp.h experiment configuration file:

    
     # **** GRIB ****
     CONVERTFA=yes                    # Conversion of FA file to grib/nc (yes|no)
     ARCHIVE_FORMAT=GRIB1|2           # Format of archive files (GRIB1|GRIB2|nc). Currently nc format is only available in climate mode

    Details

    From the perspective of harmonie suite, the conversion FA to GRIB is carried out in the following tasks:

    • ecf/Makegrib_an.ecf - for fields produced in the analysis. This task is part of the /Expe/Date/Hour/Cycle/PostAnalysis family.
    • ecf/Listen2file.ecf - for fields produced in the forecast. This task is part of the /Expe/Date/Hour/Cycle/Forecast family, possibly through a set of intermediate families Process-i (depending on the values of variables MULTITASK and MAKEGRIB_LISTENERS as set in the ecf/config_exp.h experiment configuration file).

    If ARCHIVE_FORMAT is set to GRIB1 or GRIB2, the scr/Makegrib bash script will be run from the tasks mentioned above (possibly through intermediate scripts). Finally, from the scr/Makegrib script the gl tool will be called to convert HARMONIE output from FA to GRIB. Notice that if a more verbose job output is needed, e.g. for debugging, variable PRINTLEV can be set, at the beginning of Makegrib, to something else than 0.

    Conversion of FA/lfi files to GRIB by gl:

        gl [-c] [-p] FILE [ -o OUTPUT_FILE] [ -n NAMELIST_FILE]
    @@ -26,4 +26,4 @@
      fstart(16) = $fstart,
      fstart(162) = $fstart,
      fstart(163) = $fstart,
    -/
    In the namelist:
    • $YY/$MM/$DD/$HH is the forecast initial time
    • $time_unit is the units of time to be used min/h
    • pppkey: selection of requested post-processed products (See: Postprocessing with gl for more details)
    • $fstart is the start hour for time-range products such as maximum temperature.

    WMO GRIB editions and references

    Currently (Aug 2019) there are several editions of GRIB in use or in experimental phase.

    • GRIB edition 2 is currently the main GRIB edition.
    • GRIB edition 1 is nowadays considered a legacy code. However it is still used, not only for legacy gridded data, but also to encode currently generated data.
    • There is an experimental WMO GRIB edition 3.
    +/
    In the namelist:
    • $YY/$MM/$DD/$HH is the forecast initial time
    • $time_unit is the units of time to be used min/h
    • pppkey: selection of requested post-processed products (See: Postprocessing with gl for more details)
    • $fstart is the start hour for time-range products such as maximum temperature.

    WMO GRIB editions and references

    Currently (Aug 2019) there are several editions of GRIB in use or in experimental phase.

    • GRIB edition 2 is currently the main GRIB edition.
    • GRIB edition 1 is nowadays considered a legacy code. However it is still used, not only for legacy gridded data, but also to encode currently generated data.
    • There is an experimental WMO GRIB edition 3.
    diff --git a/previews/PR1129/PostProcessing/Fullpos/index.html b/previews/PR1129/PostProcessing/Fullpos/index.html index cbd6fa505..711a66d09 100644 --- a/previews/PR1129/PostProcessing/Fullpos/index.html +++ b/previews/PR1129/PostProcessing/Fullpos/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Postprocessing with FULL-POS

    Introduction

    FULL-POS is a powerful postprocessing package, which is part of the common ARPEGE/IFS cycle. FULL-POS is documented by

    • Yessad (2011): This documentation describes the software FULL-POS doing post-processing on different kind of vertical levels. In particular, post-processable variables and organigramme are given. Some aspects of horizontal and vertical interpolators (which may be used in some other applications) are also described.
    • ykfpos38.pdf: FULL-POS in cycle 38
    • El Khatib (2002): Older documentation with a link to an old FULL-POS website.

    FULL-POS is a special configuration (9xx) of the full model for setup and initialization. In other words it is a 0 hour forecast, with extra namelist settings for variables to (post)process and to write out. When generating initial or boundary files we are calling a special configuration of FULL-POS, e927.

    ecf/config_exp.h

    The use of FULL-POS is controlled by the POSTP variable inecf/config_exp.h:

    POSTP="inline"                          # Postprocessing by Fullpos (inline|offline|none).
    +

    Postprocessing with FULL-POS

    Introduction

    FULL-POS is a powerful postprocessing package, which is part of the common ARPEGE/IFS cycle. FULL-POS is documented by

    • Yessad (2011): This documentation describes the software FULL-POS doing post-processing on different kind of vertical levels. In particular, post-processable variables and organigramme are given. Some aspects of horizontal and vertical interpolators (which may be used in some other applications) are also described.
    • ykfpos38.pdf: FULL-POS in cycle 38
    • El Khatib (2002): Older documentation with a link to an old FULL-POS website.

    FULL-POS is a special configuration (9xx) of the full model for setup and initialization. In other words it is a 0 hour forecast, with extra namelist settings for variables to (post)process and to write out. When generating initial or boundary files we are calling a special configuration of FULL-POS, e927.

    ecf/config_exp.h

    The use of FULL-POS is controlled by the POSTP variable inecf/config_exp.h:

    POSTP="inline"                          # Postprocessing by Fullpos (inline|offline|none).
                                             # See Setup_postp.pl for selection of fields.
                                             # inline: this is run inside of the forecast
                                             # offline: this is run in parallel to the forecast in a separate task

    inline is the default which means FULL-POS postprocessing is called from the forecast model as it runs. If you select offline the model is called independently of the running forecast model using the forecast model output files as inputs to be postprocessed. By selecting none no FULL-POS postprocessing will be carried out.

    Output frequency by FULL-POS is controlled by PWRITUPTIMES, FPOUTINT and FREQ_RESET:

    # Postprocessing times (space separated list)
    @@ -22,4 +22,4 @@
     132c132
     <  @namfpdyh_lev = (1,2,3,4,5,6,7,8,9,10,11,12,13) ;
     ---
    ->  @namfpdyh_lev = (1,2,3,4,5,6,7,8,9,10,11,12) ;

    Expert users

    In the FULL-POS namelist NAMFPC (variables explained in src/arp/module/yomfpc.F90), the variables are placed into different categories:

    • LFPCAPEX: if true XFU fields used for CAPE and CIN computation (with NFPCAPE).
    • LFPMOIS: month allowed for climatology usage:
      • .F. => month of the model (forecast).
      • .T. => month of the file.
    • NFPCLI: usage level for climatology:
      • 0: no climatology
      • 1: orography and land-sea mask of output only
      • 2: all available climatological fields of the current month
      • 3: shifting mean from the climatological fields of the current month to the ones of the closest month
    • NFPCAPE: kind of computation for CAPE and CIN:
      • 1 => from bottom model layer
      • 2 => from the most unstable layer
      • 3 => from mto standard height (2 meters) as recomputed values
      • 4 => from mto standard height (2 meters) out of fluxes (for analysis)
    • CFPFMT: format of the output files, can take the following values:
      • ’MODEL’ for output in spherical harmonics.
      • ’GAUSS’ for output in grid-point space on Gaussian grid (covering the global sphere).
      • ’LELAM’ for output on a grid of kind ALADIN (spectral or grid-point coefficients).
      • ’LALON’ for a grid of kind "latitudes * longitudes".
      Default is ’GAUSS’ in ARPEGE/IFS, ’LELAM’ in ALADIN.
    • CFPDOM: names of the subdomains. Names have at maximum 7 characters.
      • If CFPFMT=’GAUSS’ or ’LELAM’ only one output domain is allowed.
      • If CFPFMT=’LALON’ the maximum of output subdomains allowed is 10.
      By default, one output domain is requested, CFPDOM(1)=’000’ and CFPDOM(i)=’’ for i>1.
    • L_READ_MODEL_DATE: if: .TRUE. read date from the model

    The default FA-names for parameters in different categories can be found from src/arp/setup/suafn1.F90 L687.

    It's worth mentioning some of the variables postprocessed by FULL-POS

    • True vertical velocity w [VW]. (for NH ALADIN only).
    • Potential vorticity P V [PV].
    • Pressure coordinate vertical velocity ω [VV].
    • Eta coordinate vertical velocity η [ETAD].
    • Absolute vorticity ζ + f [ABS].
    • Relative vorticity ζ [VOR].
    • Divergence D [DIV].
    • Satellite equivalents
      • MSAT7 MVIRI channels 1 and 2 ([MSAT7C1] and [MSAT7C2]).
      • MSAT8 MVIRI channels 1 to 8 ([MSAT8C1] to [MSAT8C8]).
      • MSAT9 MVIRI channels 1 to 8 ([MSAT9C1] to [MSAT9C8]).
      • GOES11 IMAGER channels 1 to 4 ([GOES11C1] to [GOES11C4]).
      • GOES12 IMAGER channels 1 to 4 ([GOES12C1] to [GOES12C4]).
      • MTSAT1 IMAGER channels 1 to 4 ([MTSAT1C1] to [MTSAT1C4]).

    Problems

    Problems may be encountered with FULL-POS when running on large domains. Here are some things to look out for:

    • Increase the MBX_SIZE if you run out of MPI buffer space.
    • Increase number of cores if you run out of memory.
    • Make sure NFPROMA and NFPROMA_DEP are small and equal to NPROMA.
    • Set NSTRIN=NSTROUT=NPROC in nampar0 if one of the above mentioned doesn't help.
    +> @namfpdyh_lev = (1,2,3,4,5,6,7,8,9,10,11,12) ;

    Expert users

    In the FULL-POS namelist NAMFPC (variables explained in src/arp/module/yomfpc.F90), the variables are placed into different categories:

    • LFPCAPEX: if true XFU fields used for CAPE and CIN computation (with NFPCAPE).
    • LFPMOIS: month allowed for climatology usage:
      • .F. => month of the model (forecast).
      • .T. => month of the file.
    • NFPCLI: usage level for climatology:
      • 0: no climatology
      • 1: orography and land-sea mask of output only
      • 2: all available climatological fields of the current month
      • 3: shifting mean from the climatological fields of the current month to the ones of the closest month
    • NFPCAPE: kind of computation for CAPE and CIN:
      • 1 => from bottom model layer
      • 2 => from the most unstable layer
      • 3 => from mto standard height (2 meters) as recomputed values
      • 4 => from mto standard height (2 meters) out of fluxes (for analysis)
    • CFPFMT: format of the output files, can take the following values:
      • ’MODEL’ for output in spherical harmonics.
      • ’GAUSS’ for output in grid-point space on Gaussian grid (covering the global sphere).
      • ’LELAM’ for output on a grid of kind ALADIN (spectral or grid-point coefficients).
      • ’LALON’ for a grid of kind "latitudes * longitudes".
      Default is ’GAUSS’ in ARPEGE/IFS, ’LELAM’ in ALADIN.
    • CFPDOM: names of the subdomains. Names have at maximum 7 characters.
      • If CFPFMT=’GAUSS’ or ’LELAM’ only one output domain is allowed.
      • If CFPFMT=’LALON’ the maximum of output subdomains allowed is 10.
      By default, one output domain is requested, CFPDOM(1)=’000’ and CFPDOM(i)=’’ for i>1.
    • L_READ_MODEL_DATE: if: .TRUE. read date from the model

    The default FA-names for parameters in different categories can be found from src/arp/setup/suafn1.F90 L687.

    It's worth mentioning some of the variables postprocessed by FULL-POS

    • True vertical velocity w [VW]. (for NH ALADIN only).
    • Potential vorticity P V [PV].
    • Pressure coordinate vertical velocity ω [VV].
    • Eta coordinate vertical velocity η [ETAD].
    • Absolute vorticity ζ + f [ABS].
    • Relative vorticity ζ [VOR].
    • Divergence D [DIV].
    • Satellite equivalents
      • MSAT7 MVIRI channels 1 and 2 ([MSAT7C1] and [MSAT7C2]).
      • MSAT8 MVIRI channels 1 to 8 ([MSAT8C1] to [MSAT8C8]).
      • MSAT9 MVIRI channels 1 to 8 ([MSAT9C1] to [MSAT9C8]).
      • GOES11 IMAGER channels 1 to 4 ([GOES11C1] to [GOES11C4]).
      • GOES12 IMAGER channels 1 to 4 ([GOES12C1] to [GOES12C4]).
      • MTSAT1 IMAGER channels 1 to 4 ([MTSAT1C1] to [MTSAT1C4]).

    Problems

    Problems may be encountered with FULL-POS when running on large domains. Here are some things to look out for:

    • Increase the MBX_SIZE if you run out of MPI buffer space.
    • Increase number of cores if you run out of memory.
    • Make sure NFPROMA and NFPROMA_DEP are small and equal to NPROMA.
    • Set NSTRIN=NSTROUT=NPROC in nampar0 if one of the above mentioned doesn't help.
    diff --git a/previews/PR1129/PostProcessing/Interpolation/index.html b/previews/PR1129/PostProcessing/Interpolation/index.html index 90ef34d68..011dab910 100644 --- a/previews/PR1129/PostProcessing/Interpolation/index.html +++ b/previews/PR1129/PostProcessing/Interpolation/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Interpolations with gl

    Introduction

    In the following we describe the geometrical routines in gl. gl can handle the following projections

    • lat/lon
    • Rotated lat/lon
    • Lambert
    • Polar stereographic
    • Rotated Mercator

    Interpolation

    All interpolations are handled within the module util/gl/mod/module_interpol.f90. The module contains

    • clear_interpol to clear the interpolation setup
    • setup_interpol where the position of the output gridpoints in the input grid are calculated
    • setup_weights where we calculate the interpolation weights. Interpolation can be nearest gridpoint or bilinear. The interpolation can be masked with a field that tells which gridpoints from the input fields that can be used.

    The setup routines are only called once.

    • interpolate runs the interpolation
    • resample works like the interpolation if the input grid is coarser than the output grid. If reversed it takes the averages of the input gridpoints belonging to each output gridpoit.

    Interpolation can be done between different projections as wall as to geographical points. The most general example on the usage of the interpolatin can be found in util/gl/grb/any2any.F90.

    For practical usage see the section about postprocessing

    Rotations

    All rotations are handled within the module util/gl/mod/module_rotations.f90. The module contains

    • clear_rotation to clear the rotation setup
    • prepare_rotation prepare rotations from input geometry to output geometry via north south components.
    • rotate_winds runs the actual rotation.

    Staggering

    The staggering of an input file is based on the knowledge about the model and is in util/gl/mod/module_griblist.f90. The restaggering is done in util/gl/grb/restag.f90 as a simple average between gridpoints. The staggering of the output geomtery is defined by OUTGEO@ARKAWA, where A and C are available options.

    +

    Interpolations with gl

    Introduction

    In the following we describe the geometrical routines in gl. gl can handle the following projections

    • lat/lon
    • Rotated lat/lon
    • Lambert
    • Polar stereographic
    • Rotated Mercator

    Interpolation

    All interpolations are handled within the module util/gl/mod/module_interpol.f90. The module contains

    • clear_interpol to clear the interpolation setup
    • setup_interpol where the position of the output gridpoints in the input grid are calculated
    • setup_weights where we calculate the interpolation weights. Interpolation can be nearest gridpoint or bilinear. The interpolation can be masked with a field that tells which gridpoints from the input fields that can be used.

    The setup routines are only called once.

    • interpolate runs the interpolation
    • resample works like the interpolation if the input grid is coarser than the output grid. If reversed it takes the averages of the input gridpoints belonging to each output gridpoit.

    Interpolation can be done between different projections as wall as to geographical points. The most general example on the usage of the interpolatin can be found in util/gl/grb/any2any.F90.

    For practical usage see the section about postprocessing

    Rotations

    All rotations are handled within the module util/gl/mod/module_rotations.f90. The module contains

    • clear_rotation to clear the rotation setup
    • prepare_rotation prepare rotations from input geometry to output geometry via north south components.
    • rotate_winds runs the actual rotation.

    Staggering

    The staggering of an input file is based on the knowledge about the model and is in util/gl/mod/module_griblist.f90. The restaggering is done in util/gl/grb/restag.f90 as a simple average between gridpoints. The staggering of the output geomtery is defined by OUTGEO@ARKAWA, where A and C are available options.

    diff --git a/previews/PR1129/PostProcessing/gl/index.html b/previews/PR1129/PostProcessing/gl/index.html index df7ca2318..9589d3aa9 100644 --- a/previews/PR1129/PostProcessing/gl/index.html +++ b/previews/PR1129/PostProcessing/gl/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Post processing with gl

    Introduction

    gl ( as in griblist ) is a multi purpose tool for file manipulation and conversion. It uses ECMWF's ecCodes library, and can be compiled with and without support for HARMONIE FA/LFI or NETCDF files. The gl package also includes software for extraction for verification, fldextr, and field comparison, xtool.

     USAGE: gl file [-n namelist_file] [-o output_file] -[lfgmicp(nc)sdtq] [-lbc CONF]
    +

    Post processing with gl

    Introduction

    gl ( as in griblist ) is a multi purpose tool for file manipulation and conversion. It uses ECMWF's ecCodes library, and can be compiled with and without support for HARMONIE FA/LFI or NETCDF files. The gl package also includes software for extraction for verification, fldextr, and field comparison, xtool.

     USAGE: gl file [-n namelist_file] [-o output_file] -[lfgmicp(nc)sdtq] [-lbc CONF]
     
      gl [-f] file, list the content of a file, -f for FA/lfi files  
      -c    : Convert a FA/lfi file to grib ( -f implicit )          
    @@ -233,4 +233,4 @@
     -NLON $NLON -NLAT $NLAT \
     -LATC $LATC -LONC $LONC \
     -LAT0 $LAT0 -LON0 $LON0 \
    --GSIZE $GSIZE

    To get the geographical position of the lower left corner use

    domain_prop -f -LOW_LEFT FAFILE  

    To print out the important projection parameters in a file use:

    domain_prop -f -4JB FAFILE

    Get time information from a file

    domain_prop -f -DATE FAFILE

    fldextr and obsextr

    Read about the verification extraction programs here

    +-GSIZE $GSIZE

    To get the geographical position of the lower left corner use

    domain_prop -f -LOW_LEFT FAFILE  

    To print out the important projection parameters in a file use:

    domain_prop -f -4JB FAFILE

    Get time information from a file

    domain_prop -f -DATE FAFILE

    fldextr and obsextr

    Read about the verification extraction programs here

    diff --git a/previews/PR1129/PostProcessing/xtool/index.html b/previews/PR1129/PostProcessing/xtool/index.html index b793ae682..f4ae86a87 100644 --- a/previews/PR1129/PostProcessing/xtool/index.html +++ b/previews/PR1129/PostProcessing/xtool/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    xtool

    Xtool, part of the gl package, provides a utility to calculate differences between GRIB/FA files and produce the result in a new GRIB/NetCDF file. See xtool part of util/gl/README. The main commands are:

     
    +

    xtool

    Xtool, part of the gl package, provides a utility to calculate differences between GRIB/FA files and produce the result in a new GRIB/NetCDF file. See xtool part of util/gl/README. The main commands are:

     
                               xtool                                 
      
      Simple usage:                                                  
    @@ -67,4 +67,4 @@
     outkey%time = 0000
     outkey%endstep = 8
     outkey%startstep = 7
    -/

    This is used scr/convertFA to deaccumulate fields to NetCDF for climate simulations.

    SAL

    Structure Amplitude Location (SAL) is object based quality measure for the verification of QPFs (Wernli et al., 2008). SAL contains three independent components that focus on Structure, Amplitude and Location of the precipitation field in a specified domain.

    • S: Measure of structure of the precipitation area (-2 - +2). Large S, if model predicts too large precipitation areas.

    • A: Measure of strength of the precipitation (-2 - +2). Large A, if model predicts too intense precipitation.

    • L: Measure of location of the precipitation object (0 - +2). Large L, if modelled precipitation objects are far from the observed conterparts.

    • SAL can be activated in xtool by using -op SAL option. e.g.

       xtool -f1 model.grib -f2 observation.grib -op SAL -n namelist
    • Output of the SAL are 2 simple ascii-files:

      1. scatter_plot.dat containing date, S,A and L parameters.
      2. sal_output.dat containing more detailed statistics collected during the verification (location of center of mass, number of objects, measure of object size etc.).
    +/

    This is used scr/convertFA to deaccumulate fields to NetCDF for climate simulations.

    SAL

    Structure Amplitude Location (SAL) is object based quality measure for the verification of QPFs (Wernli et al., 2008). SAL contains three independent components that focus on Structure, Amplitude and Location of the precipitation field in a specified domain.

    • S: Measure of structure of the precipitation area (-2 - +2). Large S, if model predicts too large precipitation areas.

    • A: Measure of strength of the precipitation (-2 - +2). Large A, if model predicts too intense precipitation.

    • L: Measure of location of the precipitation object (0 - +2). Large L, if modelled precipitation objects are far from the observed conterparts.

    • SAL can be activated in xtool by using -op SAL option. e.g.

       xtool -f1 model.grib -f2 observation.grib -op SAL -n namelist
    • Output of the SAL are 2 simple ascii-files:

      1. scatter_plot.dat containing date, S,A and L parameters.
      2. sal_output.dat containing more detailed statistics collected during the verification (location of center of mass, number of objects, measure of object size etc.).
    diff --git a/previews/PR1129/SuiteManagement/ECFLOW/index.html b/previews/PR1129/SuiteManagement/ECFLOW/index.html index 54dc169c4..8ab40d9bc 100644 --- a/previews/PR1129/SuiteManagement/ECFLOW/index.html +++ b/previews/PR1129/SuiteManagement/ECFLOW/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Running Harmonie under ecFlow

    Introduction

    This document describes how to run Harmonie under ecFlow scheduler at ECMWF. ecFlow is the ECMWF workflow manager and it has been written using python to improve maintainability, allow easier modification and introduce object orientated features as compared to the old scheduler SMS. ecFlow can be used in any HARMONIE version in and above harmonie-40h1.1.beta.1.

    New users

    On the ECMWF Atos machine in Bologna, each user has a virtual machine on which ecFlow is running. If you don't have a VM yet, ask ECMWF to set it up for you. If you are starting ecFlow for the first time at ECMWF, you may have to add your ssh key to the authorized_keys file to allow passwordless access, as ssh is used to communicate between the servers:

          cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys

    Start your experiment supervised by ecFlow

    Launch the experiment in the usual manner by giving start time, DTG, end time, DTGEND and other optional arguments

          ~hlam/Harmonie start DTG=YYYYMMDDHH

    If successful, ecFlow will identify your experiment name and start building your binaries and run your forecast. If not, you need to examine the ecFlow log file $HM_DATA/ECF.log. $HM_DATA is defined in your Env_system file. At ECMWF $HM_DATA=$SCRATCH/hm_home/$EXP where $EXP is your experiment name.

    The ecFlow viewer starts automatically. To view any suite for your server or other servers, the server must be added to the ecFlow viewer (via Servers -> Manage servers, Add server) and selected in Servers. See below on how to find the port and server name.

    • More than one experiment is not allowed with the same name monitored in the same server so Harmonie will start the server and delete previous non-active suite for you.
    • For deleting a suite manually using ecflow_client --port XXXX --host XXXX --delete force yes /suite or using the GUI: right-click on the suite, then click "Remove" (if you don't see the Remove option, go to Tools -> Preferences -> Menus, and make yourself Administrator)
    • If other manual intervention in server or client is needed you can use ecflow commands. See here.

    ecFlow control

    Finding the port and host of the ecFlow server

    The server on which ecFlow is running is defined with variable $ECF_HOST, the port with ECF_PORT, set in Env_system or derived. On the VMs on ECMWF Atos machine in Bologna ECF_HOST=ecflow-gen-${USER}-001 and ECF_PORT=3141 for all users.

    Information about server variables can be found by running:

    • On ECMWF's Atos:
          ssh ecflow-gen-${USER}-001 ecflow_server status 
    • Or if ecFlow is running on the machine you are logged into:
          ecflow_server status 

    You can also find ECF_PORT/ECF_HOST by checking the files under $ECF_HOME, like:

    > ls -rlt ~/ecflow_server
    +

    Running Harmonie under ecFlow

    Introduction

    This document describes how to run Harmonie under ecFlow scheduler at ECMWF. ecFlow is the ECMWF workflow manager and it has been written using python to improve maintainability, allow easier modification and introduce object orientated features as compared to the old scheduler SMS. ecFlow can be used in any HARMONIE version in and above harmonie-40h1.1.beta.1.

    New users

    On the ECMWF Atos machine in Bologna, each user has a virtual machine on which ecFlow is running. If you don't have a VM yet, ask ECMWF to set it up for you. If you are starting ecFlow for the first time at ECMWF, you may have to add your ssh key to the authorized_keys file to allow passwordless access, as ssh is used to communicate between the servers:

          cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys

    Start your experiment supervised by ecFlow

    Launch the experiment in the usual manner by giving start time, DTG, end time, DTGEND and other optional arguments

          ~hlam/Harmonie start DTG=YYYYMMDDHH

    If successful, ecFlow will identify your experiment name and start building your binaries and run your forecast. If not, you need to examine the ecFlow log file $HM_DATA/ECF.log. $HM_DATA is defined in your Env_system file. At ECMWF $HM_DATA=$SCRATCH/hm_home/$EXP where $EXP is your experiment name.

    The ecFlow viewer starts automatically. To view any suite for your server or other servers, the server must be added to the ecFlow viewer (via Servers -> Manage servers, Add server) and selected in Servers. See below on how to find the port and server name.

    • More than one experiment is not allowed with the same name monitored in the same server so Harmonie will start the server and delete previous non-active suite for you.
    • For deleting a suite manually using ecflow_client --port XXXX --host XXXX --delete force yes /suite or using the GUI: right-click on the suite, then click "Remove" (if you don't see the Remove option, go to Tools -> Preferences -> Menus, and make yourself Administrator)
    • If other manual intervention in server or client is needed you can use ecflow commands. See here.

    ecFlow control

    Finding the port and host of the ecFlow server

    The server on which ecFlow is running is defined with variable $ECF_HOST, the port with ECF_PORT, set in Env_system or derived. On the VMs on ECMWF Atos machine in Bologna ECF_HOST=ecflow-gen-${USER}-001 and ECF_PORT=3141 for all users.

    NOTE: New naming convention of the ecflow servers has been implemented by ECMWF. The old server name still will be available for some users. If the new naming is used for your user you need to update in Env_system ECF_HOST=ecfg-${USER}-1 and ECF_PORT=3141.

    Information about server variables can be found by running:

    • On ECMWF's Atos:
          ssh ecflow-gen-${USER}-001 ecflow_server status 
    • Or if ecFlow is running on the machine you are logged into:
          ecflow_server status 

    You can also find ECF_PORT/ECF_HOST by checking the files under $ECF_HOME, like:

    > ls -rlt ~/ecflow_server
     total 12
     -rw-r--r-- 1 hlam accord 2529 Jun 15 16:20 ecflow-gen-hlam-001.3141.ecf.check.b
     -rw-r--r-- 1 hlam accord 2529 Jun 20 17:36 ecflow-gen-hlam-001.3141.ecf.check
    @@ -18,4 +18,4 @@
     source ~/.bash_profile
     module unload ecflow
     module load ecflow/5.7.0
    -$@

    The ecFlow server version may change over time.

    Add another user to your ecFlow viewer

    Sometimes it's handy to be able to follow, and control, your colleagues experiments. To be able to do this do the following steps:

    • Find the port number of your colleague as described above.
    • In the ecFlow viewer choose Servers -> Manage servers, click on "Add server" and fill in the appropriate host and port and give it a useful name. Click on OK to save it.
    • If you click on Servers in the viewer the name should appear and you can make it visible by clicking on it.

    Changing the port

    By default, the port is set by

    export ECF_PORT=$((1500+usernumber))

    in mSMS.job (40h1.1), Start_ecFlow.sh (up to #b6d58dd), or Main (currently).

    For the VMs at ECMWF it is set to 3141 in Env_system. If you want to change this number (for example, if that port is in use already), you will also need to add a -p flag when calling ecflow_start.sh as follows:

    ecflow_start.sh -p $ECF_PORT -d $JOBOUTDIR

    Otherwise, ecflow_start.sh tries to open the default port.

    Note: if you already have an ecFlow server running at your new port number before launching an experiment, this won't be an issue.

    More info

    +$@

    The ecFlow server version may change over time.

    Add another user to your ecFlow viewer

    Sometimes it's handy to be able to follow, and control, your colleagues experiments. To be able to do this do the following steps:

    • Find the port number of your colleague as described above.
    • In the ecFlow viewer choose Servers -> Manage servers, click on "Add server" and fill in the appropriate host and port and give it a useful name. Click on OK to save it.
    • If you click on Servers in the viewer the name should appear and you can make it visible by clicking on it.

    Changing the port

    By default, the port is set by

    export ECF_PORT=$((1500+usernumber))

    in mSMS.job (40h1.1), Start_ecFlow.sh (up to #b6d58dd), or Main (currently).

    For the VMs at ECMWF it is set to 3141 in Env_system. If you want to change this number (for example, if that port is in use already), you will also need to add a -p flag when calling ecflow_start.sh as follows:

    ecflow_start.sh -p $ECF_PORT -d $JOBOUTDIR

    Otherwise, ecflow_start.sh tries to open the default port.

    Note: if you already have an ecFlow server running at your new port number before launching an experiment, this won't be an issue.

    More info

    diff --git a/previews/PR1129/System/Build_local_docs/index.html b/previews/PR1129/System/Build_local_docs/index.html index 991ea765a..465f91f0c 100644 --- a/previews/PR1129/System/Build_local_docs/index.html +++ b/previews/PR1129/System/Build_local_docs/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Build system documentation locally

    This page explains how to build the system documentation locally

    Install Julia

    Documentation is build using Julia. It is strongly recommended that the official generic binaries from the downloads page be used to install Julia

    1. Download Julia and untar
    2. Add the julia bin directory to your PATH
    export PATH=/path/to/julia/bin:$PATH
    Note

    On Atos you can

    module load julia

    but make sure to set JULIA_DEPOT_PATH to a place with sufficient quota. E.g set in .bashrc

    export JULIA_DEPOT_PATH=$PERM/juliadepot

    Install dependencies

    To install the Documenter.jl dependency

    julia --project=docs/ -e 'using Pkg; Pkg.instantiate()' 

    Build documentation

    To create the HTML pages from the markdown files run

    julia --project=docs/ docs/make.jl 
    [ Info: SetupBuildDirectory: setting up build directory.
    +

    Build system documentation locally

    This page explains how to build the system documentation locally

    Install Julia

    Documentation is build using Julia. It is strongly recommended that the official generic binaries from the downloads page be used to install Julia

    1. Download Julia and untar
    2. Add the julia bin directory to your PATH
    export PATH=/path/to/julia/bin:$PATH
    Note

    On Atos you can

    module load julia

    but make sure to set JULIA_DEPOT_PATH to a place with sufficient quota. E.g set in .bashrc

    export JULIA_DEPOT_PATH=$PERM/juliadepot

    Install dependencies

    To install the Documenter.jl dependency

    julia --project=docs/ -e 'using Pkg; Pkg.instantiate()' 

    Build documentation

    To create the HTML pages from the markdown files run

    julia --project=docs/ docs/make.jl 
    [ Info: SetupBuildDirectory: setting up build directory.
     [ Info: Doctest: running doctests.
     [ Info: ExpandTemplates: expanding markdown templates.
     [ Info: CrossReferences: building cross-references.
    @@ -17,4 +17,4 @@
     │ - ✘ ENV["GITHUB_REF"] matches devbranch="pre-CY46h1"
     │ - ✘ ENV["GITHUB_ACTOR"] exists
     │ - ✘ ENV["DOCUMENTER_KEY"] or ENV["GITHUB_TOKEN"]  exists
    -└ Deploying: ✘

    The HTML pages will be put in docs/build. Open index.html in a browser

    firefox docs/build/index.html

    A local build will not deploy the HTML pages to github.com/Hirlam/HarmonieSystemDocumentation.git.

    Also see .github/workflows/documentation.yml

    +└ Deploying: ✘

    The HTML pages will be put in docs/build. Open index.html in a browser

    firefox docs/build/index.html

    A local build will not deploy the HTML pages to github.com/Hirlam/HarmonieSystemDocumentation.git.

    Also see .github/workflows/documentation.yml

    diff --git a/previews/PR1129/System/DrHook/index.html b/previews/PR1129/System/DrHook/index.html index 1e07040fb..3fa997873 100644 --- a/previews/PR1129/System/DrHook/index.html +++ b/previews/PR1129/System/DrHook/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Profiling & traceback tool Dr.Hook

    Background

    Dr.Hook (& the Medicine Head :-) was developed at ECMWF in 2003 to overcome problems in catching runtime errors. Their IBM system at the time was quite impotent to produce meaningful traceback upon crash. It was decided that something need to be done urgently.

    Dr.Hook gets its name from Fujitsu VPP's hook-functionality in their Fortran compiler, which enabled to call user functions upon enter and exit of a routine. Dr.Hook is of course a former US rock-band from 70's, which probably did not survive to this millenium due to heavy drug use!

    In about week or so late 2003 the first version of Dr.Hook saw daylight. It turned out nearly immediately that we could try to gather information for profiling purposes, too, like wall & CPU clock times, possibly MFlop/s and memory consumption information.

    One drawback with Dr.Hook was that (initially just) Fortran code needed to be instrumented by subroutine calls, which was a bother. However, for IFS code and automatique insertion script was developed greatly simplifiying the task.

    Activating Dr.Hook

    Two things have to be in place in order to use Dr.Hook:

    1. Fortran (or C) codes must contain explicit Dr.Hook calls to enable instrumentation, starting from the main program
    2. Certain environment variable(s) need to be set

    An example of Fortran instrumentation

    SUBROUTINE HOP(KDLEN,KDBDY,KSET,KHORIZ)
    +

    Profiling & traceback tool Dr.Hook

    Background

    Dr.Hook (& the Medicine Head :-) was developed at ECMWF in 2003 to overcome problems in catching runtime errors. Their IBM system at the time was quite impotent to produce meaningful traceback upon crash. It was decided that something need to be done urgently.

    Dr.Hook gets its name from Fujitsu VPP's hook-functionality in their Fortran compiler, which enabled to call user functions upon enter and exit of a routine. Dr.Hook is of course a former US rock-band from 70's, which probably did not survive to this millenium due to heavy drug use!

    In about week or so late 2003 the first version of Dr.Hook saw daylight. It turned out nearly immediately that we could try to gather information for profiling purposes, too, like wall & CPU clock times, possibly MFlop/s and memory consumption information.

    One drawback with Dr.Hook was that (initially just) Fortran code needed to be instrumented by subroutine calls, which was a bother. However, for IFS code and automatique insertion script was developed greatly simplifiying the task.

    Activating Dr.Hook

    Two things have to be in place in order to use Dr.Hook:

    1. Fortran (or C) codes must contain explicit Dr.Hook calls to enable instrumentation, starting from the main program
    2. Certain environment variable(s) need to be set

    An example of Fortran instrumentation

    SUBROUTINE HOP(KDLEN,KDBDY,KSET,KHORIZ)
     !**** *HOP* - Operator routine for all types of observations.
     !     E. ANDERSSON            ECMWF          01/04/99
     ...
    @@ -39,4 +39,4 @@
     !-- The following now does NOT initialize MPL nor MPI for you
     IF (LHOOK) CALL DR_HOOK('SOME_UTILGL_TOOL',0,ZHOOK_HANDLE)
     ...
    -IF (LHOOK) CALL DR_HOOK('SOME_UTILGL_TOOL',1,ZHOOK_HANDLE)

    Overheads

    The DR_HOOK=1 has practically no overhead on a scalar machine. Profiling with DR_HOOK_OPT=prof causes some 5% overhead.

    On a vector machine overhead are so big that Dr.Hook should not be used there, unfortunately.

    +IF (LHOOK) CALL DR_HOOK('SOME_UTILGL_TOOL',1,ZHOOK_HANDLE)

    Overheads

    The DR_HOOK=1 has practically no overhead on a scalar machine. Profiling with DR_HOOK_OPT=prof causes some 5% overhead.

    On a vector machine overhead are so big that Dr.Hook should not be used there, unfortunately.

    diff --git a/previews/PR1129/System/ECMWF/ECMWF_teleport/index.html b/previews/PR1129/System/ECMWF/ECMWF_teleport/index.html index 2a3ae7cb6..82a894f40 100644 --- a/previews/PR1129/System/ECMWF/ECMWF_teleport/index.html +++ b/previews/PR1129/System/ECMWF/ECMWF_teleport/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Monitoring Harmonie suites with Teleport

    In order to monitor the progress of your Harmonie suite(s) at ECMWF the ecFlow GUI ecflow_ui can be used directly from your local PC/server. This relies on teleport and ssh port forwarding which is described in more detail below.

    Open Teleport connection

    • This relies on a Teleport connection to ECMWF. Further details on Teleport are available here:
    [itops@reaserve ~]$ tsh status
    +

    Monitoring Harmonie suites with Teleport

    In order to monitor the progress of your Harmonie suite(s) at ECMWF the ecFlow GUI ecflow_ui can be used directly from your local PC/server. This relies on teleport and ssh port forwarding which is described in more detail below.

    Open Teleport connection

    • This relies on a Teleport connection to ECMWF. Further details on Teleport are available here:
    [itops@reaserve ~]$ tsh status
     > Profile URL:  https://jump.ecmwf.int:443
       Logged in as: itops@met.ie
       Cluster:      jump.ecmwf.int
    @@ -20,4 +20,4 @@
       User dui
       IdentityFile ~/.tsh/keys/jump.ecmwf.int/eoin.whelan@met.ie
       ProxyCommand bash -c "tsh login; ssh -W %h:%p %r@jump.ecmwf.int"
    -[ewhelan@reaserve ~]$ 

    Open ecFlow ports

    • The following opens ports to ECMWF (dui, ECF_PORT=3141) ecFlow server. Based on instructions provided by [https://confluence.ecmwf.int/display/ECFLOW/Teleport+-+using+local+ecflow_ui]. In a new terminal:
    ssh hpc-login -C -N -L 3141:ecflow-gen-dui-001:3141
    +[ewhelan@reaserve ~]$

    Open ecFlow ports

    • The following opens ports to ECMWF (dui, ECF_PORT=3141) ecFlow server. Based on instructions provided by [https://confluence.ecmwf.int/display/ECFLOW/Teleport+-+using+local+ecflow_ui]. In a new terminal:
    ssh hpc-login -C -N -L 3141:ecflow-gen-dui-001:3141
    diff --git a/previews/PR1129/System/ECMWF/RunningHarmonieOnAtos/index.html b/previews/PR1129/System/ECMWF/RunningHarmonieOnAtos/index.html index 84152dc35..846b2eb43 100644 --- a/previews/PR1129/System/ECMWF/RunningHarmonieOnAtos/index.html +++ b/previews/PR1129/System/ECMWF/RunningHarmonieOnAtos/index.html @@ -3,9 +3,9 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Running Harmonie on Atos

    Before you start

    New Harmonie users will require membership of the accord user group at ECMWF. Please contact the HIRLAM System Manager, Daniel Santos, to make this request on your behalf. Futhermore ECMWF will have to setup a virtual machine for you to run the ecFlow server on (see here). Finally, make sure that your login shell is set to /bin/bash.

    Tip

    To share your experiments with the members of the accord group do:

    chmod 755 $HOME $SCRATCH $PERM $HPCPERM
    -chgrp -R accord $HOME/hm_home $SCRATCH/hm_home $PERM/HARMONIE $HPCPERM/hm_home
    -chmod g+s $HOME/hm_home $SCRATCH/hm_home $PERM/HARMONIE $HPCPERM/hm_home

    The chmod g+s sets the SGID bit which will ensure that new experiments created in hm_home will automatically be in the accord group

    Configure your experiment (option 1)

    • Create an experiment directory under $HOME/hm_home and use the master script Harmonie to set up a minimum environment for your experiment.

      mkdir -p $HOME/hm_home/my_exp
      +

      Running Harmonie on Atos

      Before you start

      New Harmonie users will require membership of the accord user group at ECMWF. Please contact the HIRLAM System Manager, Daniel Santos, to make this request on your behalf. Futhermore ECMWF will have to setup a virtual machine for you to run the ecFlow server on (see here). Finally, make sure that your login shell is set to /bin/bash.

      Tip

      To share your experiments with the members of the accord group do:

      chmod 755 $HOME $SCRATCH $PERM $HPCPERM
      +chgrp -R accord $HOME/hm_home $SCRATCH/hm_home $HOME/HARMONIE $HPCPERM/hm_home
      +chmod g+s $HOME/hm_home $SCRATCH/hm_home $HOME/HARMONIE $HPCPERM/hm_home

      The chmod g+s sets the SGID bit which will ensure that new experiments created in hm_home will automatically be in the accord group

      Configure your experiment (option 1)

      • Create an experiment directory under $HOME/hm_home and use the master script Harmonie to set up a minimum environment for your experiment.

        mkdir -p $HOME/hm_home/my_exp
         cd $HOME/hm_home/my_exp
         ln -sf /path/to/git/repository/config-sh/Harmonie
         ./Harmonie setup -r /path/to/git/repository -h ECMWF.atos

        where

        • -r Specifies the path to the git repository. Make sure you have checkout-ed the correct branch.
        • -h tells which configuration files to use. At ECMWF config.ECMWF.atos is the default one. For harmonie-43h2.2 use -h config.aa
          Tip

          An Atos tagged versions of Harmonie are available in ~hlam/harmonie_release/git/tags/

          ln -sf ~hlam/harmonie_release/git/tags/<taggedversion>/config-sh/Harmonie 
          @@ -18,4 +18,4 @@
           git commit --author "Name <name@host>" -m "Commit message"
           git push --set-upstream origin <feature/branch_name>

          Specifying --set-upstream origin <feature/branch_name> to git push is only necessary the first time you push your branch to the remote. When ready you can now go to GitHub and make a pull-request to the Harmonie repository from your fork.

          Start your experiment

          Launch the experiment by giving start time, DTG, end time, DTGEND

          ./Harmonie start DTG=YYYYMMDDHH DTGEND=YYYYMMDDHH
           # e.g., ./Harmonie start DTG=2022122400 DTGEND=2022122406

          If successful, Harmonie will identify your experiment name and start building your binaries and run your forecast. If not, you need to examine the ECFLOW log file $HM_DATA/ECF.log. $HM_DATA is defined in your Env_system file. At ECMWF $HM_DATA=$SCRATCH/hm_home/$EXP where $EXP is your experiment name. Read more about where things happen further down.

          Continue your experiment

          If your experiment have successfully completed and you would like to continue for another period you should write

          ./Harmonie prod DTGEND=YYYYMMDDHH

          By using prod you tell the system that you are continuing the experiment and using the first guess from the previous cycle. The start date is take from a file progress.log created in your $HOME/hm_home/my_exp directory. If you would have used start the initial data would have been interpolated from the boundaries, a cold start in other words.

          Start/Restart of ecflow_ui

          To start the graphical window for ECFLOW

          ./Harmonie mon

          The graphical window runs independently of the experiment and can be closed and restarted again with the same command. With the graphical interface you can control and view logfiles of each task.

          Making local changes

          Very soon you will find that you need to do changes in a script or in the source code. Once you have identified which file to edit you put it into the current $HOME/hm_home/my_exp directory, with exactly the same subdirectory structure as in the reference. e.g, if you want to modify a namelist setting

          ./Harmonie co nam/harmonie_namelists.pm   # retrieve default namelist harmonie_namelists.pm
          -vi nam/harmonie_namelists.pm              # modify the namelist

          Next time you run your experiment the changed file will be used. You can also make changes in a running experiment. Make the change you wish and rerun the InitRun task from the viewer. The InitRun task copies all files from your local experiment directory to your working directory $HM_DATA. Once your InitRun task is complete your can rerun the task you are interested in. If you wish to recompile something you will also have to rerun the Build tasks.

          Issues

          Harmonie exp stop at ECMWF(Atos) due $PERM mounting problem https://github.com/Hirlam/Harmonie/issues/628

          Account

          In order to change the billing account, open Env_submit and find the definition of scalar_job. Then add a line like

          'ACCOUNT' => $submit_type.' --account=account_name' to the definition of the dictionary.

          Directory structure

          $SCRATCH

          In $SCRATCH/hm_home/$EXP you will find

          DirectoryContent
          binBinaries
          libSource code synced from $HM_LIB and compiled code
          lib/srcObject files and source code (if you build with makeup, set by MAKEUP_BUILD_DIR)
          lib/utilUtilities such as makeup, gl_grib_api or oulan
          climateClimate files
          YYYYMMDD_HHWorking directory for the current cycle. If an experiment fails it is useful to check the IFS log file, NODE.001_01, in the working directory of the current cycle. The failed job will be in a directory called something like Failed_this_job.
          archiveArchived files. A YYYY/MM/DD/HH structure for per cycle data. ICMSHHARM+NNNN and ICMSHHARM+NNNN.sfx are atmospheric and surfex forecast output files
          extractVerification input data. This is also stored on the permanent disk $HPCPERM/HARMONIE/archive/$EXP/parchive/archive/extract
          ECF.logLog of job submission

          ECFS

          • Since $SCRATCH is cleaned regularly we need to store data permanently on ECFS, the EC file system, as well. There are two options for ECFS, ectmp and ec. The latter is a permanent storage and first one is cleaned after 90 days. Which one you use is defined by the`ECFSLOC variable. To view your data type e.g.

            els ectmp:/$USER/harmonie/my_exp
          • The level of archiving depends on ARSTRATEGY in ecf/config_exp.h. The default setting will give you one YYYY/MM/DD/HH structure per cycle data containing:

            • Surface analysis, ICMSHANAL+0000[.sfx]
            • Atmospheric analysis result MXMIN1999+0000
            • Blending between surface/atmospheric analysis and cloud variable from the first guess LSMIXBCout
            • ICMSHHARM+NNNN and ICMSHHARM+NNNN.sfx are atmospheric and surfex forecast model state files
            • PFHARM* files produced by the inline postprocessing
            • ICMSHSELE+NNNN.sfx are surfex files with selected output
            • GRIB files for fullpos and surfex select files
            • Logfiles in a tar file logfiles.tar
            • Observation database and feedback information in odb_stuff.tar.
            • Extracted files for obsmon in sqlite.tar
          • Climate files are stored in the climate directory

          • One directory each for vfld and vobs data respectively for verification data

          $PERM

          DirectoryContent
          HARMONIE/$EXPecflow log and job files
          hm_lib/$EXP/libScipts, config files, ecf and suite, source code (not compiled, set by $HM_LIB). Reference with experiment's changes on top

          $HPCPERM

          In $HPCPERM/hm_home/$EXP

          DirectoryContent
          parchive/archive/extract/Verification input data.

          $HOME on ecflow-gen-${user}-001

          DirectoryContent
          ecflow_server/ecFlow checkpoint and log files

          Cleanup of old experiments

          Danger

          These commands may not work properly in all versions. Do not run the removal before you're sure it's OK

          Once you have complete your experiment you may wish to remove code, scripts and data from the disks. Harmonie provides some simple tools to do this. First check the content of the different disks by

          Harmonie CleanUp -ALL

          Once you have convinced yourself that this is OK you can proceed with the removal.

          Harmonie CleanUp -ALL -go 

          If you would like to exclude the data stored on e.g ECFS ( at ECMWF ) or in more general terms stored under HM_EXP ( as defined in Env_system ) you run

          Harmonie CleanUp -d

          to list the directories intended for cleaning. Again, convince yourself that this is OK and proceed with the cleaning by

          Harmonie CleanUp -d -go

          You can always remove the data from ECFS directly by running e.g.

          erm -R ec:/YOUR_USER/harmonie/EXPERIMENT_NAME 

          or

          erm -R ectmp:/YOUR_USER/harmonie/EXPERIMENT_NAME 
          • For more information about cleaning with Harmonie read here
          • For more information about the ECFS commands read here

          Debugging Harmonie with ARM DDT

          Follow instructions here. Use Run DDT client on your Personal Computer or End User Device

      +vi nam/harmonie_namelists.pm # modify the namelist

      Next time you run your experiment the changed file will be used. You can also make changes in a running experiment. Make the change you wish and rerun the InitRun task from the viewer. The InitRun task copies all files from your local experiment directory to your working directory $HM_DATA. Once your InitRun task is complete your can rerun the task you are interested in. If you wish to recompile something you will also have to rerun the Build tasks.

      Issues

      Harmonie exp stop at ECMWF(Atos) due $PERM mounting problem https://github.com/Hirlam/Harmonie/issues/628

      Account

      In order to change the billing account, open Env_submit and find the definition of scalar_job. Then add a line like

      'ACCOUNT' => $submit_type.' --account=account_name' to the definition of the dictionary.

      Directory structure

      $SCRATCH

      In $SCRATCH/hm_home/$EXP you will find

      DirectoryContent
      binBinaries
      libSource code synced from $HM_LIB and compiled code
      lib/srcObject files and source code (if you build with makeup, set by MAKEUP_BUILD_DIR)
      lib/utilUtilities such as makeup, gl_grib_api or oulan
      climateClimate files
      YYYYMMDD_HHWorking directory for the current cycle. If an experiment fails it is useful to check the IFS log file, NODE.001_01, in the working directory of the current cycle. The failed job will be in a directory called something like Failed_this_job.
      archiveArchived files. A YYYY/MM/DD/HH structure for per cycle data. ICMSHHARM+NNNN and ICMSHHARM+NNNN.sfx are atmospheric and surfex forecast output files
      extractVerification input data. This is also stored on the permanent disk $HPCPERM/HARMONIE/archive/$EXP/parchive/archive/extract
      ECF.logLog of job submission

      ECFS

      • Since $SCRATCH is cleaned regularly we need to store data permanently on ECFS, the EC file system, as well. There are two options for ECFS, ectmp and ec. The latter is a permanent storage and first one is cleaned after 90 days. Which one you use is defined by the`ECFSLOC variable. To view your data type e.g.

        els ectmp:/$USER/harmonie/my_exp
      • The level of archiving depends on ARSTRATEGY in ecf/config_exp.h. The default setting will give you one YYYY/MM/DD/HH structure per cycle data containing:

        • Surface analysis, ICMSHANAL+0000[.sfx]
        • Atmospheric analysis result MXMIN1999+0000
        • Blending between surface/atmospheric analysis and cloud variable from the first guess LSMIXBCout
        • ICMSHHARM+NNNN and ICMSHHARM+NNNN.sfx are atmospheric and surfex forecast model state files
        • PFHARM* files produced by the inline postprocessing
        • ICMSHSELE+NNNN.sfx are surfex files with selected output
        • GRIB files for fullpos and surfex select files
        • Logfiles in a tar file logfiles.tar
        • Observation database and feedback information in odb_stuff.tar.
        • Extracted files for obsmon in sqlite.tar
      • Climate files are stored in the climate directory

      • One directory each for vfld and vobs data respectively for verification data

      $PERM

      DirectoryContent
      HARMONIE/$EXPecflow log and job files
      hm_lib/$EXP/libScipts, config files, ecf and suite, source code (not compiled, set by $HM_LIB). Reference with experiment's changes on top

      $HPCPERM

      In $HPCPERM/hm_home/$EXP

      DirectoryContent
      parchive/archive/extract/Verification input data.

      $HOME on ecflow-gen-${user}-001

      DirectoryContent
      ecflow_server/ecFlow checkpoint and log files

      Cleanup of old experiments

      Danger

      These commands may not work properly in all versions. Do not run the removal before you're sure it's OK

      Once you have complete your experiment you may wish to remove code, scripts and data from the disks. Harmonie provides some simple tools to do this. First check the content of the different disks by

      Harmonie CleanUp -ALL

      Once you have convinced yourself that this is OK you can proceed with the removal.

      Harmonie CleanUp -ALL -go 

      If you would like to exclude the data stored on e.g ECFS ( at ECMWF ) or in more general terms stored under HM_EXP ( as defined in Env_system ) you run

      Harmonie CleanUp -d

      to list the directories intended for cleaning. Again, convince yourself that this is OK and proceed with the cleaning by

      Harmonie CleanUp -d -go

      You can always remove the data from ECFS directly by running e.g.

      erm -R ec:/YOUR_USER/harmonie/EXPERIMENT_NAME 

      or

      erm -R ectmp:/YOUR_USER/harmonie/EXPERIMENT_NAME 
      • For more information about cleaning with Harmonie read here
      • For more information about the ECFS commands read here

      Debugging Harmonie with ARM DDT

      Follow instructions here. Use Run DDT client on your Personal Computer or End User Device

    diff --git a/previews/PR1129/System/GitDeveloperDocumentation/index.html b/previews/PR1129/System/GitDeveloperDocumentation/index.html index efccb00a6..95171d8ee 100644 --- a/previews/PR1129/System/GitDeveloperDocumentation/index.html +++ b/previews/PR1129/System/GitDeveloperDocumentation/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Developing in the Hirlam GitHub organization

    Introduction

    Since 2018 and CY43 HIRLAM have used git for code revision control and gitolite as the git server on hirlam.org. HIRLAM is now moving to using Github for ''software development and version control''. This page provides information on how to access the GitHub Hirlam organisation and how to commit your developments, specifically Harmonie. As was the case with hirlam.org's gitolite a fork-and-branch workflow will be used to manage developments.

    Becoming a member of Hirlam

    1. Create a GitHub account: https://github.com – click on Sign up. Details here
    2. Go to settings and add your full name, company and location to make it easier to identify you
    3. Add your public ssh key(s) to the account. Details here
    4. Contact your friendly System-core to be invited to the GitHub Hirlam organisation

    Fork and branch

    WP repo=Hirlam/Harmonie

    You can create a user fork of Harmonie by doing the following:

    1. Go to https://github.com/Hirlam/Harmonie

    2. Click on Fork to create a fork of Harmonie for your user (USER)

    3. Clone your fork:

      git clone git@github.com:USER/Harmonie.git $HOME/git/github/USER/Harmonie

    Further information is available here

    Keep your fork synced

    1. In a terminal change directory to the clone of you fork:

      cd $HOME/git/github/USER/Harmonie
    2. List the current configured remote repository for your fork.

      git remote -v
      +

      Developing in the Hirlam GitHub organization

      Introduction

      Since 2018 and CY43 HIRLAM have used git for code revision control and gitolite as the git server on hirlam.org. HIRLAM is now moving to using Github for ''software development and version control''. This page provides information on how to access the GitHub Hirlam organisation and how to commit your developments, specifically Harmonie. As was the case with hirlam.org's gitolite a fork-and-branch workflow will be used to manage developments.

      Becoming a member of Hirlam

      1. Create a GitHub account: https://github.com – click on Sign up. Details here
      2. Go to settings and add your full name, company and location to make it easier to identify you
      3. Add your public ssh key(s) to the account. Details here
      4. Contact your friendly System-core to be invited to the GitHub Hirlam organisation

      Fork and branch

      WP repo=Hirlam/Harmonie

      You can create a user fork of Harmonie by doing the following:

      1. Go to https://github.com/Hirlam/Harmonie

      2. Click on Fork to create a fork of Harmonie for your user (USER)

      3. Clone your fork:

        git clone git@github.com:USER/Harmonie.git $HOME/git/github/USER/Harmonie

      Further information is available here

      Keep your fork synced

      1. In a terminal change directory to the clone of you fork:

        cd $HOME/git/github/USER/Harmonie
      2. List the current configured remote repository for your fork.

        git remote -v
         origin	git@github.com:USER/Harmonie.git (fetch)
         origin	git@github.com:USER/Harmonie.git (push)
      3. Specify a new remote upstream repository that will be synced with the fork.

        git remote add upstream git@github.com:Hirlam/Harmonie.git
      4. Verify the new upstream repository you've specified for your fork.

        git remote -v
         origin	git@github.com:USER/Harmonie.git (fetch)
        @@ -33,4 +33,4 @@
         remote: 
      5. Follow this link

        • request a reviewer
        • add labels to the development (feature/enhancement/...)
        • add comments to help with the review process (Testbed members used/Changes expected if any/...)
      6. Once the pull request has been approved by the System-core team it will be merged in to the dev-CY46h1 branch

      Further information is available here

      Moving my branches from hirlam.org

      1. Add your hirlam.org fork as a remote (HLUSER is your hirlam.org username)

        cd $HOME/git/github/USER/Harmonie
         git remote add hirlamorgfork https://git.hirlam.org/users/HLUSER/Harmonie
         git fetch hirlamorgfork
      2. For each branch BRANCHNAME you want to move to github

        git checkout -t hirlamorgfork/BRANCHNAME
        -git push origin BRANCHNAME

      learn git branching is an excellent interactive tool to understand git.

      Coding Standards

      See Coding standards for Arpège, IFS and Aladin and Arpege/IFS Fortran coding standard (requires ECMWF account)

      +git push origin BRANCHNAME

    learn git branching is an excellent interactive tool to understand git.

    Coding Standards

    See Coding standards for Arpège, IFS and Aladin and Arpege/IFS Fortran coding standard (requires ECMWF account)

    diff --git a/previews/PR1129/System/HarmonieTestbed/index.html b/previews/PR1129/System/HarmonieTestbed/index.html index 2d317cb19..f8c248712 100644 --- a/previews/PR1129/System/HarmonieTestbed/index.html +++ b/previews/PR1129/System/HarmonieTestbed/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    The HARMONIE testbed

    The HARMONIE testbed provides a facility to run a number of well defined test cases using the existing script environment in HARMONIE. The ALADIN testbed, Mitraillette runs test on the hart of the model, the dynamical core. The HARMONIE testbed tests the full script system as it is supposed to be used.

    Defining the configurations

    General

    The testbed is a suite that launches and follows new experiments one at a time in a controlled environment. The testbed experiment takes care of compilation and also hosts the climate files generated by the tested configurations. Source and scripts changes shall be done in the testbed experiment and will be synchronized to the child experiment using the hm_CMODS option in HARMONIE.

    A number of basic configurations have been defined in scr/Harmonie_configurations.pm as the deviation from the default setup in ecf/config_exp.h, scr/include.ass and suites/harmonie.pm. These configurations are controlled by the script scr/Harmonie_testbed.pl. The script also contains a number of extra configurations tested from time to time. With the current settings, a test of AROME without 3DVAR would look like.

    
    +

    The HARMONIE testbed

    The HARMONIE testbed provides a facility to run a number of well defined test cases using the existing script environment in HARMONIE. The ALADIN testbed, Mitraillette runs test on the hart of the model, the dynamical core. The HARMONIE testbed tests the full script system as it is supposed to be used.

    Defining the configurations

    General

    The testbed is a suite that launches and follows new experiments one at a time in a controlled environment. The testbed experiment takes care of compilation and also hosts the climate files generated by the tested configurations. Source and scripts changes shall be done in the testbed experiment and will be synchronized to the child experiment using the hm_CMODS option in HARMONIE.

    A number of basic configurations have been defined in scr/Harmonie_configurations.pm as the deviation from the default setup in ecf/config_exp.h, scr/include.ass and suites/harmonie.pm. These configurations are controlled by the script scr/Harmonie_testbed.pl. The script also contains a number of extra configurations tested from time to time. With the current settings, a test of AROME without 3DVAR would look like.

    
         # AROME no 3D-VAR but default blending of upper air from boundaries
         'AROME' => {
           'description' => 'Standard AROME settings without upper air DA',
    @@ -122,4 +122,4 @@
     [ Status: OK]
     
      For more details please check /scratch/hlam/hm_home/testbed_ECMWF.atos.gnu_12414/testbed_comp_12414.log_details
    -

    All the logs from a testbed experiment are posted to the mail address MAIL_TESTBED set in ecf_config_exp.h. If a github token GH_TOKEN is set in scr/Testbed_comp the results will also be posted on Testbed output discussions on github using the GraphQL API. See github settings to create a token. Tick at least the repo box. Save your token in $HOME/.ssh/gh_testbed.token or in $HOME/env/gh_testbed.token and chmod 600 and it will be used. The test returns three different status signals

    • OK means that all configurations reproduces the result of your reference experiment.
    • OK, BUT NO COMPARISON means that the suit run through but that there was nothing to compare with
    • FAILED means that the internal comparisons failed
    • DIFFER means that one more configurations differ from your reference experiment
    • FAILED and DIFFER is a combination of the last two

    In addition to the summary information detailed information can be found in the archive about the art of the difference.

    When to use the testbed

    It is recommended to use the testbed when adding new options or make other changes in the configurations. If your new option is not activated the result compared with the reference experiment should be the same, if not you have to start debugging. When changing things for one configuration it's easy to break other ones. In such cases the testbed is a very good tool make sure you haven't destroyed anything.

    +

    All the logs from a testbed experiment are posted to the mail address MAIL_TESTBED set in ecf_config_exp.h. If a github token GH_TOKEN is set in scr/Testbed_comp the results will also be posted on Testbed output discussions on github using the GraphQL API. See github settings to create a token. Tick at least the repo box. Save your token in $HOME/.ssh/gh_testbed.token or in $HOME/env/gh_testbed.token and chmod 600 and it will be used. The test returns three different status signals

    • OK means that all configurations reproduces the result of your reference experiment.
    • OK, BUT NO COMPARISON means that the suit run through but that there was nothing to compare with
    • FAILED means that the internal comparisons failed
    • DIFFER means that one more configurations differ from your reference experiment
    • FAILED and DIFFER is a combination of the last two

    In addition to the summary information detailed information can be found in the archive about the art of the difference.

    When to use the testbed

    It is recommended to use the testbed when adding new options or make other changes in the configurations. If your new option is not activated the result compared with the reference experiment should be the same, if not you have to start debugging. When changing things for one configuration it's easy to break other ones. In such cases the testbed is a very good tool make sure you haven't destroyed anything.

    diff --git a/previews/PR1129/System/Local/QuickStartLocal/index.html b/previews/PR1129/System/Local/QuickStartLocal/index.html index a41740b2d..4d1d403a5 100644 --- a/previews/PR1129/System/Local/QuickStartLocal/index.html +++ b/previews/PR1129/System/Local/QuickStartLocal/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Running Harmonie on your local platform

    Introduction

    These "quick start instructions" assumes that someone has already put in place a valid configuration for your local platform, CONFIG=linux.local for example.

    The Harmonie system runs through a number of steps to help you complete your experiment. The chain can be summarized like:

    • Configure and start the experiment: This is where you define your domain, choose your settings and specify the period for your experiment.

    Once you have done this you can start the system and let it create the basic infrastructure

    • Setup the necessary directories and copy the system files needed (InitRun, Prepare_cycle)
    • Compile the binaries you need to run your experiment (Build)
    • Create the constant climate files specifying your domain (Climate)

    With the basic setup and files in place we can proceed to the integration part where we have three loops taking care of

    • Prepare boundaries and observations (MakeCycleInput)
    • Run assimilation and forecasts (Date)
    • Post process and archive the result (Postprocessing)

    The three different task are allowed to run ahead/after each other to get a good throughput.

    The configuration, the full suite and the relation between different tasks is controlled by the scheduler. This documentation describes how to get started with your first experiment. The description is general for a single host.

    Following example shows the steps to launch an Harmonie experiment my_exp.

    If this is the first time to install HARMONIE on your local platform please take a look at the basic install instructions here.

    Configure your experiment

    • Create an experiment directory under $HOME/hm_home and use the master script Harmonie to set up a minimum environment for your experiment. bash mkdir -p $HOME/hm_home/my_exp cd $HOME/hm_home/my_exp PATH_TO_HARMONIE/config-sh/Harmonie setup -r PATH_TO_HARMONIE -h YOURHOST

    where

    • -r is the path to your downloaded version of HARMONIE
    • -h tells which configuration files to use. At ECMWF config.ECMWF.atos is the default one. List PATH_TO_HARMONIE/config-sh/config.* for available HOST configurations
    • This setup command provides the default setup which currently is AROME physics with CANARI+OI_MAIN surface assimilation and 3DVAR upper air assimilations with 3h cycling on a domain covering Denmark using 2.5km horizontal resolution and 65 levels in the vertical.
    • Now you can edit the basic configuration file ecf/config_exp.h to configure your experiment scenarios. Modify specifications for model domain, physics (AROME, ALARO), data locations, settings for dynamics, physics, domain, coupling host model etc. Read more about the options in here. You can also use some of the predefined configurations by calling Harmonie with the -c option:
    mkdir $HOME/hm_home/my_exp
    +

    Running Harmonie on your local platform

    Introduction

    These "quick start instructions" assumes that someone has already put in place a valid configuration for your local platform, CONFIG=linux.local for example.

    The Harmonie system runs through a number of steps to help you complete your experiment. The chain can be summarized like:

    • Configure and start the experiment: This is where you define your domain, choose your settings and specify the period for your experiment.

    Once you have done this you can start the system and let it create the basic infrastructure

    • Setup the necessary directories and copy the system files needed (InitRun, Prepare_cycle)
    • Compile the binaries you need to run your experiment (Build)
    • Create the constant climate files specifying your domain (Climate)

    With the basic setup and files in place we can proceed to the integration part where we have three loops taking care of

    • Prepare boundaries and observations (MakeCycleInput)
    • Run assimilation and forecasts (Date)
    • Post process and archive the result (Postprocessing)

    The three different task are allowed to run ahead/after each other to get a good throughput.

    The configuration, the full suite and the relation between different tasks is controlled by the scheduler. This documentation describes how to get started with your first experiment. The description is general for a single host.

    Following example shows the steps to launch an Harmonie experiment my_exp.

    If this is the first time to install HARMONIE on your local platform please take a look at the basic install instructions here.

    Configure your experiment

    • Create an experiment directory under $HOME/hm_home and use the master script Harmonie to set up a minimum environment for your experiment. bash mkdir -p $HOME/hm_home/my_exp cd $HOME/hm_home/my_exp PATH_TO_HARMONIE/config-sh/Harmonie setup -r PATH_TO_HARMONIE -h YOURHOST

    where

    • -r is the path to your downloaded version of HARMONIE
    • -h tells which configuration files to use. At ECMWF config.ECMWF.atos is the default one. List PATH_TO_HARMONIE/config-sh/config.* for available HOST configurations
    • This setup command provides the default setup which currently is AROME physics with CANARI+OI_MAIN surface assimilation and 3DVAR upper air assimilations with 3h cycling on a domain covering Denmark using 2.5km horizontal resolution and 65 levels in the vertical.
    • Now you can edit the basic configuration file ecf/config_exp.h to configure your experiment scenarios. Modify specifications for model domain, physics (AROME, ALARO), data locations, settings for dynamics, physics, domain, coupling host model etc. Read more about the options in here. You can also use some of the predefined configurations by calling Harmonie with the -c option:
    mkdir $HOME/hm_home/my_exp
     cd $HOME/hm_home/my_exp
     PATH_TO_HARMONIE/config-sh/Harmonie setup -r PATH_TO_HARMONIE -h YOURHOST -c CONFIG 

    where CONFIG is one of the setups defined in scr/Harmonie_configurations.pm. If you give -c with out an argument or a non existing configuration a list of configurations will be printed.

    • In some cases you might have to edit the general system configuration file, Env_system. See here for further information
    • The rules for how to submit jobs are defined in Env_submit. See here for further information
    • If you experiment in data assimilation you might also want to change scr/include.ass

    Start your experiment

    Launch the experiment by giving start time, DTG, end time, DTGEND, and forecast length, LL

    cd $HOME/hm_home/my_exp
     PATH_TO_HARMONIE/config-sh/Harmonie start DTG=YYYYMMDDHH DTGEND=YYYYMMDDHH LL=12
    @@ -12,4 +12,4 @@
     PATH_TO_HARMONIE/config-sh/Harmonie prod DTGEND=YYYYMMDDHH LL=12 

    By using prod you tell the system that you are continuing the experiment and using the first guess from the previous cycle. The start date is take from a file progress.log created in your $HOME/hm_home/my_exp directory. If you would have used start the initial data would have been interpolated from the boundaries, a cold start in other words.

    Start/Restart of ecflow_ui

    To start the graphical window for ecFlow on ECMWF type

    cd $HOME/hm_home/my_exp
     PATH_TO_HARMONIE/config-sh/Harmonie mon

    The graphical window, mXCdp runs independently of the mSMS job and can be closed and restarted again with the same command. With the graphical interface you can control and view logfiles of each task.

    Making local changes

    Very soon you will find that you need to do changes in a script or in the source code. Once you have identified which file to edit you put it into the current $HOME/hm_home/my_exp directory, with exactly the same subdirectory structure as in the reference. e.g, if you want to modify a namelist setting

    cd $HOME/hm_home/my_exp
     PATH_TO_HARMONIE/config-sh/Harmonie co nam/harmonie_namelists.pm         # retrieve default namelist harmonie_namelists.pm
    -vi nam/harmonie_namelists.pm                        # modify the namelist

    Next time you run your experiment the changed file will be used. You can also make changes in a running experiment. Make the change you wish and rerun the InitRun task in the mXCdp window. The !InitRun task copies all files from your local experiment directory to your working directory $HM_DATA. Once your InitRun task is complete your can rerun the task you are interested in. If you wish to recompile something you will also have to rerun the Build tasks. Read more about how to control and rerun tasks in mini-SMS from mXCdp.

    Directory structure

    On most platforms HARMONIE compiles and produces all its output data under $HM_DATA (defined in ~/hm_home/my_exp/Env_system)

    = Description == Location =
    Binaries$BINDIR (set in ecf/config_exp.h ), default is $HM_DATA/bin
    libraries, object files & source code$HM_DATA/lib/src if MAKEUP=yes, $HMDATA/gmkpack_build if MAKEUP=no
    Scripts$HM_LIB/scr
    config files (Envsystem & Envsystem$HM_LIB linked to files in $HM_LIB/config-sh
    ecf scripts and main config$HM_LIB/ecf
    ecFlow suite definitions$HM_LIB/suites
    Utilities such as gmkpack, gl & monitor$HM_DATA/lib/util
    Climate files$HM_DATA/climate
    Working directory for the current cycle$HM_DATA/YYYYMMDD_HH
    Archived files$HM_DATA/archive
    Archived cycle output$HM_DATA/archive/YYYY/MM/DD/HH
    Archived log files$HM_DATA/archive/log/HM_TaskFamily_YYYYMMDDHH.html where TaskFamily=MakeCycleInput,Date,Postprocessing
    Task log files$JOBOUTDIR (set in Env_system) usually $HM_DATA/sms_logfiles
    Verification data (vfld/vobs/logmonitor)$HM_DATA/archive/extract
    Verification (monitor) results$HM_DATA/archive/extract/WebgraF
    "Fail" directory$HM_DATA/YYYYMMDD_HH/Failed_Family_Task (look at ifs.stat,NODE.001_01, fort.4

    Archive contents

    $HM_DATA/archive/YYYY/MM/DD/HH is used to store "archived" output from HARMONIE cycles. The level of archiving depends on ARSTRATEGY in ecf/config_exp.h . The default setting is medium which will keep the following cycle data:

    • Surface analysis: ICMSHANAL+0000
    • Atmospheric analysis result: MXMIN1999+0000
    • Blending between surface/atmospheric analysis and cloud variable from the first guess: ANAB1999+0000
    • ICMSHHARM+NNNN and ICMSHHARM+NNNN.sfx are atmospheric and surfex forecast output files
    • PFHARM* files produced by the inline postprocessing
    • GRIB files produced by the conversion of FA output files to GRIB if MAKEGRIB=yes in ecf/config_exp.h
    • ODB databases and feedback information in odb_stuff.tar

    Cleanup of old experiments

    Once you have complete your experiment you may wish to remove code, scripts and data from the disks. Harmonie provides some simple tools to do this. First check the content of the different disks by

     Harmonie CleanUp -ALL

    Once you have convinced yourself that this is OK you can proceed with the removal.

     Harmonie CleanUp -ALL -go 

    If you would like to exclude the data stored HM_DATA ( as defined in Env_system ) you run

     Harmonie CleanUp -d

    to list the directories intended for cleaning. Again, convince yourself that this is OK and proceed with the cleaning by

     Harmonie CleanUp -d -go

    NOTE that these commands may not work properly in all versions. Do not run the removal before you're sure it's OK

    +vi nam/harmonie_namelists.pm # modify the namelist

    Next time you run your experiment the changed file will be used. You can also make changes in a running experiment. Make the change you wish and rerun the InitRun task in the mXCdp window. The !InitRun task copies all files from your local experiment directory to your working directory $HM_DATA. Once your InitRun task is complete your can rerun the task you are interested in. If you wish to recompile something you will also have to rerun the Build tasks. Read more about how to control and rerun tasks in mini-SMS from mXCdp.

    Directory structure

    On most platforms HARMONIE compiles and produces all its output data under $HM_DATA (defined in ~/hm_home/my_exp/Env_system)

    = Description == Location =
    Binaries$BINDIR (set in ecf/config_exp.h ), default is $HM_DATA/bin
    libraries, object files & source code$HM_DATA/lib/src if MAKEUP=yes, $HMDATA/gmkpack_build if MAKEUP=no
    Scripts$HM_LIB/scr
    config files (Envsystem & Envsystem$HM_LIB linked to files in $HM_LIB/config-sh
    ecf scripts and main config$HM_LIB/ecf
    ecFlow suite definitions$HM_LIB/suites
    Utilities such as gmkpack, gl & monitor$HM_DATA/lib/util
    Climate files$HM_DATA/climate
    Working directory for the current cycle$HM_DATA/YYYYMMDD_HH
    Archived files$HM_DATA/archive
    Archived cycle output$HM_DATA/archive/YYYY/MM/DD/HH
    Archived log files$HM_DATA/archive/log/HM_TaskFamily_YYYYMMDDHH.html where TaskFamily=MakeCycleInput,Date,Postprocessing
    Task log files$JOBOUTDIR (set in Env_system) usually $HM_DATA/sms_logfiles
    Verification data (vfld/vobs/logmonitor)$HM_DATA/archive/extract
    Verification (monitor) results$HM_DATA/archive/extract/WebgraF
    "Fail" directory$HM_DATA/YYYYMMDD_HH/Failed_Family_Task (look at ifs.stat,NODE.001_01, fort.4

    Archive contents

    $HM_DATA/archive/YYYY/MM/DD/HH is used to store "archived" output from HARMONIE cycles. The level of archiving depends on ARSTRATEGY in ecf/config_exp.h . The default setting is medium which will keep the following cycle data:

    • Surface analysis: ICMSHANAL+0000
    • Atmospheric analysis result: MXMIN1999+0000
    • Blending between surface/atmospheric analysis and cloud variable from the first guess: ANAB1999+0000
    • ICMSHHARM+NNNN and ICMSHHARM+NNNN.sfx are atmospheric and surfex forecast output files
    • PFHARM* files produced by the inline postprocessing
    • GRIB files produced by the conversion of FA output files to GRIB if MAKEGRIB=yes in ecf/config_exp.h
    • ODB databases and feedback information in odb_stuff.tar

    Cleanup of old experiments

    Once you have complete your experiment you may wish to remove code, scripts and data from the disks. Harmonie provides some simple tools to do this. First check the content of the different disks by

     Harmonie CleanUp -ALL

    Once you have convinced yourself that this is OK you can proceed with the removal.

     Harmonie CleanUp -ALL -go 

    If you would like to exclude the data stored HM_DATA ( as defined in Env_system ) you run

     Harmonie CleanUp -d

    to list the directories intended for cleaning. Again, convince yourself that this is OK and proceed with the cleaning by

     Harmonie CleanUp -d -go

    NOTE that these commands may not work properly in all versions. Do not run the removal before you're sure it's OK

    diff --git a/previews/PR1129/System/MFaccess/index.html b/previews/PR1129/System/MFaccess/index.html index 61976e77f..00c35d441 100644 --- a/previews/PR1129/System/MFaccess/index.html +++ b/previews/PR1129/System/MFaccess/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Using Météo-France Servers

    Introduction

    The procedure to get access to MF servers and their read-only git repository is outlined here

    First steps

    • Discuss your requirements for access to MF servers with the HIRLAM System project leader, Daniel Santos (dsantosm@aemet.es).
    • Download two forms "Undertaking for the use of Météo-France computer resources" and "Demande d'authorisation de conexion au résau de Météo Franc" from http://www.cnrm.meteo.fr/aladin/spip.php?article157.
      • The "Undertaking for the use of Météo-France computer resources" form is to be signed by you only
      • The "Demande d'authorisation de conexion au résau de Météo France" must be signed by you and your department head. It must also include an institute stamp. You should enter details in Contacts, Compte d'accesés aux machines du Centre de Cacul and at the bottom with authorization from you institute manager with institute stamp. - A scan of both forms with a brief introductory note should be sent to Eric Escaliere (eric.escaliere@meteo.fr) and cc'ed to Daniel Santos (dsantosm@aemet.es) and Claude Fischer (claude.fischer@meteo.fr).
      • Be careful with the "Machine du client". I had to specify the name and IP address of my institute's Firewall server as this is what the outside world sees when I access external servers from my PC.
    • Météo-France will send (by post) your username (Identificateur) and password (Mot de passe) for log in.
    • The authentication process itself remains in two steps (first “parme”, then target), as before.
    • A few specific examples follow (see MF's instructions for full details):
      • beaufix:
    ewhelan@realin23:gcc-8.3.1:.../~> which beaufix
    +

    Using Météo-France Servers

    Introduction

    The procedure to get access to MF servers and their read-only git repository is outlined here

    First steps

    • Discuss your requirements for access to MF servers with the HIRLAM System project leader, Daniel Santos (dsantosm@aemet.es).
    • Download two forms "Undertaking for the use of Météo-France computer resources" and "Demande d'authorisation de conexion au résau de Météo Franc" from http://www.cnrm.meteo.fr/aladin/spip.php?article157.
      • The "Undertaking for the use of Météo-France computer resources" form is to be signed by you only
      • The "Demande d'authorisation de conexion au résau de Météo France" must be signed by you and your department head. It must also include an institute stamp. You should enter details in Contacts, Compte d'accesés aux machines du Centre de Cacul and at the bottom with authorization from you institute manager with institute stamp. - A scan of both forms with a brief introductory note should be sent to Eric Escaliere (eric.escaliere@meteo.fr) and cc'ed to Daniel Santos (dsantosm@aemet.es) and Claude Fischer (claude.fischer@meteo.fr).
      • Be careful with the "Machine du client". I had to specify the name and IP address of my institute's Firewall server as this is what the outside world sees when I access external servers from my PC.
    • Météo-France will send (by post) your username (Identificateur) and password (Mot de passe) for log in.
    • The authentication process itself remains in two steps (first “parme”, then target), as before.
    • A few specific examples follow (see MF's instructions for full details):
      • beaufix:
    ewhelan@realin23:gcc-8.3.1:.../~> which beaufix
     alias beaufix='telnet beaufix.meteo.fr'
     	/usr/bin/telnet
     ewhelan@realin23:gcc-8.3.1:.../~> beaufix 
    @@ -94,4 +94,4 @@
     [whelane@merou ~]$ 

    Access to (read-only) MF git arpifs git repository

    MF use ssh keys to allow access to their read-only git repository. If approved by the HIRLAM System PL you should request access to the repository by sending a request e-mail to Eric Escaliere and cc'ed to Daniel Santos and Claude Fischer your ssh public key attached.

    Once you have been given access you can create a local clone by issuing the following commands:

    cd $HOME
     mkdir arpifs_releases
     cd arpifs_releases
    -git clone ssh://reader054@git.cnrm-game-meteo.fr/git/arpifs.git

    Happy gitting!

    +git clone ssh://reader054@git.cnrm-game-meteo.fr/git/arpifs.git

    Happy gitting!

    diff --git a/previews/PR1129/System/ReleaseProcess/index.html b/previews/PR1129/System/ReleaseProcess/index.html index b715f265c..7cf10d275 100644 --- a/previews/PR1129/System/ReleaseProcess/index.html +++ b/previews/PR1129/System/ReleaseProcess/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Harmonie release process

    This page describes the release process for tagging new Harmonie versions

    Harmonie repository organization

    In the past we used the concept of trunk(svn) or develop(git) for the development of Harmonie-Arome codes. Since CY46 we decided to used dev-CYXXhX as development branch to be more clear about the harmonie version under development.

    Harmonie-AROME naming convection

    Using a common T(Toulouse) cycle of the ACCORD consortium from the IAL repository the development of an Harmonie-Arome version starts.

    • The naming convention is using the number of their cycle of T cycle used as base.
    • The h letter is used to indicate that it is, or will be, an Harmonie-Arome CSC that differs from the T base code version.
    • The first number after the h refers to the version of T cycle version used as base. (e.g. CY46T1 is used as base for dev-CY46h1)

    Tagging

    In Hirlam, various tagging prior to official releases are made to provide user communities with a 'frozen' code set, even though the code has not necessarily been fully validated. These codes are often labeled as alpha, beta, rc.

    • Alpha release (e.g. harmonie-46h1.alpha.1): a snapshot of dev branch which is not mature both technically and meteorologically
    • Beta release (e.g. harmonie-46h1.beta.1): a snapshot of dev branch which is deemed technically mature for evaluation and meteorological validation. On the other hand, there could still be possibility for more features to add
    • Target releases (e.g. harmonie-43h2.2.target.2 and harmonie-43h2.2.target.3): pre-release tagging for final meteorological evaluation
    • Release candidate(e.g. harmonie-43h2.2.rc1): pre-release tagging for final evaluation
    • Official release (e.g. harmonie-43h2.2): mature for operational use
      • The second number refers the number of the Harmonie-Arome release technically and meteorological quality assured
      • A third number could appear in the name for a minor update, a technical release necessities or other aspects.(e.g., harmonie-43h2.2.1
      • Also some bugfix branches could be active using the bf in the naming (e.g.harmonie-43h2.2_bf)
    +

    Harmonie release process

    This page describes the release process for tagging new Harmonie versions

    Harmonie repository organization

    In the past we used the concept of trunk(svn) or develop(git) for the development of Harmonie-Arome codes. Since CY46 we decided to used dev-CYXXhX as development branch to be more clear about the harmonie version under development.

    Harmonie-AROME naming convection

    Using a common T(Toulouse) cycle of the ACCORD consortium from the IAL repository the development of an Harmonie-Arome version starts.

    • The naming convention is using the number of their cycle of T cycle used as base.
    • The h letter is used to indicate that it is, or will be, an Harmonie-Arome CSC that differs from the T base code version.
    • The first number after the h refers to the version of T cycle version used as base. (e.g. CY46T1 is used as base for dev-CY46h1)

    Tagging

    In Hirlam, various tagging prior to official releases are made to provide user communities with a 'frozen' code set, even though the code has not necessarily been fully validated. These codes are often labeled as alpha, beta, rc.

    • Alpha release (e.g. harmonie-46h1.alpha.1): a snapshot of dev branch which is not mature both technically and meteorologically
    • Beta release (e.g. harmonie-46h1.beta.1): a snapshot of dev branch which is deemed technically mature for evaluation and meteorological validation. On the other hand, there could still be possibility for more features to add
    • Target releases (e.g. harmonie-43h2.2.target.2 and harmonie-43h2.2.target.3): pre-release tagging for final meteorological evaluation
    • Release candidate(e.g. harmonie-43h2.2.rc1): pre-release tagging for final evaluation
    • Official release (e.g. harmonie-43h2.2): mature for operational use
      • The second number refers the number of the Harmonie-Arome release technically and meteorological quality assured
      • A third number could appear in the name for a minor update, a technical release necessities or other aspects.(e.g., harmonie-43h2.2.1
      • Also some bugfix branches could be active using the bf in the naming (e.g.harmonie-43h2.2_bf)
    diff --git a/previews/PR1129/System/StandaloneOdb/index.html b/previews/PR1129/System/StandaloneOdb/index.html index 00de03fc9..c9ecd5419 100644 --- a/previews/PR1129/System/StandaloneOdb/index.html +++ b/previews/PR1129/System/StandaloneOdb/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    ODB software

    Get the software

    To make best use of ODB information produced by your Harmonie experiment one should use ODB and ODB-API software developed by ECMWF. Below are instruction on how to obtain the software from ECMWF.

    ODB-API

    ODB-API software is open source and released under an Apache licence

    ODB

    ODB stands for Observational !DataBase. It is database software to store and retrieve large amounts of meteorological numerical data in an efficient manner while used from within IFS. ODB software mimics relational database queries through its ODB/SQL -compiler and accesses data currently via a Fortran90 library interface. The original documentation is available here

    Building your ODB software

    The ODB-API Software bundle uses cmake https://www.cmake.org to configure the make files used to compile the software. The instructions below worked with Redhat 7/GCC 4.8.5 and CentOS 7/GCC 4.8.5. On newer systems python functionality may have to be switched off with -DENABLE_PYTHON=OFF.

    VERSION=0.18.1
    +

    ODB software

    Get the software

    To make best use of ODB information produced by your Harmonie experiment one should use ODB and ODB-API software developed by ECMWF. Below are instruction on how to obtain the software from ECMWF.

    ODB-API

    ODB-API software is open source and released under an Apache licence

    ODB

    ODB stands for Observational !DataBase. It is database software to store and retrieve large amounts of meteorological numerical data in an efficient manner while used from within IFS. ODB software mimics relational database queries through its ODB/SQL -compiler and accesses data currently via a Fortran90 library interface. The original documentation is available here

    Building your ODB software

    The ODB-API Software bundle uses cmake https://www.cmake.org to configure the make files used to compile the software. The instructions below worked with Redhat 7/GCC 4.8.5 and CentOS 7/GCC 4.8.5. On newer systems python functionality may have to be switched off with -DENABLE_PYTHON=OFF.

    VERSION=0.18.1
     wget https://confluence.ecmwf.int/download/attachments/61117379/odb_api_bundle-${VERSION}-Source.tar.gz
     gunzip odb_api_bundle-${VERSION}-Source.tar.gz
     tar -xvf odb_api_bundle-${VERSION}-Source.tar
    @@ -68,4 +68,4 @@
     cp -r /home/ms/ie/dui/odbMacroTest .
     cd odbMacroTest
     metview4 -b odbmap.mv4 conv201312.odb "obsvalue" "andate=20131225 and antime=120000 and varno=39" legon png
    -xv odbmap.1.png
    +xv odbmap.1.png
    diff --git a/previews/PR1129/System/TheHarmonieScript/index.html b/previews/PR1129/System/TheHarmonieScript/index.html index 050d022e3..475b7d19b 100644 --- a/previews/PR1129/System/TheHarmonieScript/index.html +++ b/previews/PR1129/System/TheHarmonieScript/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    The Harmonie main script

    The Harmonie script is the main user interface to the harmonie system. It is used to setup, start, check and control your experiment and environment. Below follows the most useful commands. There are other commands inherited from the HIRLAM environment that may or may not work. For a full list check scr/Start, scr/Actions, scr/Actions.pl.

    • Harmonie setup [ -r REVISION] [ -h HOST] [ -d DOMAIN] [ -c CONFIGURATION] [ -l LEVELS] where:

      • REVISION is the path to the version of harmonie you are working with.
      • HOST is the name of the host you are working on. There should exist corresponding config-sh/config.HOST.
      • CONFIGURATION is one of the predefined configurations in scr/Harmonie_testbed.pl. It a fast way to setup your favourite configuration.
      • DOMAIN is one of the predefined domains in ecf/config_exp.h
      • LEVELS is one of the predefined level definitions in scr/Vertical_levels.pl
    • Harmonie start DTG=YYYYMMDDHH [ DTGEND=YYYYMMDDHH] [ optional environment variables] launches a cold start run.

      • DTG is the initial time of your experiment
      • Several other optional variables can be given like
        • PLAYFILE=FILENAME use a different ecflow suite definition file. Default is harmonie.tdf
        • BUILD=yes|no to turn on and off compilation
        • CREATE_CLIMATE=yes|no to turn on and off generation of climate files
        • Any environment variable that you would like to send to the system.
    • Harmonie prod will continue from the DTG given in your progress.log file. The rest of the arguments is as for Harmonie start. This should be used to continue and experiment. It is assumed that a first guess file is available and the run will fail if this is not found.

    • Harmonie mon will restart your ecflow_ui window and try to connect to an existing ecflow server.

    • Harmonie co [FILE|PATH/FILE] will copy the request file from the version chosen in your setup ( as pointed out in the config-sh/hm_rev file ) to your local directory. If the PATH is not given a search will be done. If the name matches several files you will be given a list to choose from.

    • Harmonie install will build your libraries and binaries but not start any experiment

    • Harmonie testbed will launch the Harmonie testbed

    • Harmonie diff [--xxdiff] will look for differences between the revision in config-sh/hm_rev and HM_LIB.

    • Harmonie CleanUp -ALL -go will clean the following directories: HM_DATA, HM_LIB, HM_EXP. Instructions from src/Actions.pl:

    # args: if -go: remove, (default is to list but not remove the matching files)
    +

    The Harmonie main script

    The Harmonie script is the main user interface to the harmonie system. It is used to setup, start, check and control your experiment and environment. Below follows the most useful commands. There are other commands inherited from the HIRLAM environment that may or may not work. For a full list check scr/Start, scr/Actions, scr/Actions.pl.

    • Harmonie setup [ -r REVISION] [ -h HOST] [ -d DOMAIN] [ -c CONFIGURATION] [ -l LEVELS] where:

      • REVISION is the path to the version of harmonie you are working with.
      • HOST is the name of the host you are working on. There should exist corresponding config-sh/config.HOST.
      • CONFIGURATION is one of the predefined configurations in scr/Harmonie_testbed.pl. It a fast way to setup your favourite configuration.
      • DOMAIN is one of the predefined domains in ecf/config_exp.h
      • LEVELS is one of the predefined level definitions in scr/Vertical_levels.pl
    • Harmonie start DTG=YYYYMMDDHH [ DTGEND=YYYYMMDDHH] [ optional environment variables] launches a cold start run.

      • DTG is the initial time of your experiment
      • Several other optional variables can be given like
        • PLAYFILE=FILENAME use a different ecflow suite definition file. Default is harmonie.tdf
        • BUILD=yes|no to turn on and off compilation
        • CREATE_CLIMATE=yes|no to turn on and off generation of climate files
        • Any environment variable that you would like to send to the system.
    • Harmonie prod will continue from the DTG given in your progress.log file. The rest of the arguments is as for Harmonie start. This should be used to continue and experiment. It is assumed that a first guess file is available and the run will fail if this is not found.

    • Harmonie mon will restart your ecflow_ui window and try to connect to an existing ecflow server.

    • Harmonie co [FILE|PATH/FILE] will copy the request file from the version chosen in your setup ( as pointed out in the config-sh/hm_rev file ) to your local directory. If the PATH is not given a search will be done. If the name matches several files you will be given a list to choose from.

    • Harmonie install will build your libraries and binaries but not start any experiment

    • Harmonie testbed will launch the Harmonie testbed

    • Harmonie diff [--xxdiff] will look for differences between the revision in config-sh/hm_rev and HM_LIB.

    • Harmonie CleanUp -ALL -go will clean the following directories: HM_DATA, HM_LIB, HM_EXP. Instructions from src/Actions.pl:

    # args: if -go: remove, (default is to list but not remove the matching files)
     #       if -k*: do not do the long term archive HM_EXP - so keep it
     #       if -d*: combination of -k and -ALL (-d* means: disks)
     #       if -ALL: treat all files and also (if -go) remove the directories
    @@ -17,4 +17,4 @@
     #       unless the / is preceded by ~ (which will be removed).
     #       Hence, to remove e.g. all analyses from 1995, use 1995/an,
     #       which translates to 1995[0-9][0-9]*_*/an*
    -#       (to be precise: use: CleanUp("REMOVE:1995/an", "-go");
    +# (to be precise: use: CleanUp("REMOVE:1995/an", "-go");
    diff --git a/previews/PR1129/System/UpdateNamelists/index.html b/previews/PR1129/System/UpdateNamelists/index.html index 8ec79b70a..b04d905f4 100644 --- a/previews/PR1129/System/UpdateNamelists/index.html +++ b/previews/PR1129/System/UpdateNamelists/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Update the namelist hashes

    Introduction

    Each namelists is build from a perl dictionary of different settings, nam/harmonie_namelists.pm as the deviation from the default setup. One section takes care of the general file settings, one of the mpp options and the large ones of different configurations. The script nam/gen_namelists.pl allows us to build new namelists adding the settings on top of each other. In the following we describe how to add new namelists and include them in the suite.

    Create a new hash module

    Let us assume we have some new 4DVAR namelists we would like to merge. Create a directory, 4dvar, and put your new namelists in here. Run the script nam/Create_hashes.pl

    ./Create_hashes.pl 4dvar
    +

    Update the namelist hashes

    Introduction

    Each namelists is build from a perl dictionary of different settings, nam/harmonie_namelists.pm as the deviation from the default setup. One section takes care of the general file settings, one of the mpp options and the large ones of different configurations. The script nam/gen_namelists.pl allows us to build new namelists adding the settings on top of each other. In the following we describe how to add new namelists and include them in the suite.

    Create a new hash module

    Let us assume we have some new 4DVAR namelists we would like to merge. Create a directory, 4dvar, and put your new namelists in here. Run the script nam/Create_hashes.pl

    ./Create_hashes.pl 4dvar
     Create namelist hash for 4dvar 
     Scan 4dvar/namscreen_dat_4d 
     Scan 4dvar/namtraj_1_4d 
    @@ -11,4 +11,4 @@
     Create namelist hash 4dvar.pm 
     Create updated empty namelist hash empty_4dvar.pm for 4dvar

    We have now created a perl module for the new namelists. One with empty namelist entries, 4dvar_empty.pm, and one with all namelists in the right format, 4dvar.pm. To get one of your namelists back ( sorted ) you can write:

    ./gen_namlist.pl -n 4dvar_empty.pm -n 4dvar.pm namscreen_dat_4d

    To get the module integrated in the system the module has to be merged with the conventions in harmonie_namelists.pm, but as a start the full namelists can be used. Copy the new empty*.pm to empty.pm to get the updated list of empty namelists.

    Create the new namelist

    Add the new namelists to the script scr/Get_namelist. In this case we would add a new case for 4dvar

    4dvartraj) 
        NAMELIST_CONFIG="$DEFAULT minimization dynamics ${DYNAMICS} ${PHYSICS} ${PHYSICS}_minimization ${SURFACE} ${EXTRA_FORECAST_OPTIONS} varbc minim4d"
    -    ;;
    + ;;
    diff --git a/previews/PR1129/System/UsingSubmodulesinHarmonie/index.html b/previews/PR1129/System/UsingSubmodulesinHarmonie/index.html new file mode 100644 index 000000000..8bad8ef0c --- /dev/null +++ b/previews/PR1129/System/UsingSubmodulesinHarmonie/index.html @@ -0,0 +1,151 @@ + +GitHub workflow with submodules · Harmonie wiki

    How to use submodules in the HarmonieCSC repository

    The information on this page is not relevant for CY46 yet, but is intended as pointer to upcoming changes in CY49 and might be useful when porting changes from CY46 to CY49

    Introduction

    Up to CY49 the Hirlam/Harmonie repository contained all scripts and code needed for Harmonie and then some. Since CY49 a new Hirlam/HarmonieCSC repository is used. The src directory has been externalized so it can follow the ACCORD IAL repository, simplifying code development within ACCORD. In addition to the src directory, several other components have been moved into separate repositories outside the Harmonie repository as well. To create a consistent set of scripts, code and tools, git submodules are used. Git submodules are essentially git repositories contained within the main repository, a.k.a. the superproject, with commit hashes of the submodules tied to a specific commit of the superproject. The Hirlam/HarmonieCSC superproject contains the following submodules:


    Preparation

    To be able to access github using ssh, add your public ssh key to https://github.com/settings/keys

    For users of Harmonie (no development)

    Clone the Harmonie superproject + the submodules contained therein from the Hirlam organization:

    git clone --recurse-submodules git@github.com:Hirlam/HarmonieCSC.git

    This will create a structure very similar to the original Harmonie repository and usage is as before.

    If you forget the --recurse-submodules, you can use:

    git submodule init

    To initialise your submodules after cloning followed by a recursive pull described below.

    Pull changes to your local repository

    If a submodule has changed remotely, the reference in the superproject will also have changed and you can get the change with a pull. After pulling, you can update your submodules with:

    git submodule update

    This can be done with one command with:

    git pull --recurse-submodules

    This can be set to the default behaviour by setting:

    git config --global submodule.recurse true

    However, this will check out the commit set in the superproject in all your submodules, so if you are working on your own branch, you will have to check it out again afterwards manually. If you have uncommited changes in your branch, you will get an error and the checkout will be aborted.

    To avoid this, you can use:

    git submodule update path/to/submodule

    to update a specific submodule.


    For developers

    Contributing code has become somewhat more complex, as the submodules and the superproject need to be treated separately. The steps that need to be taken are described below for 3 scenarios.

    Creating user forks

    To be able to create pull requests a user fork of the superproject is needed. This can be done via https://github.com/Hirlam/HarmonieCSC/fork. To contribute to one or more of the submodules, forks must be created for these as well in a similar fashion, e.g. from src via https://github.com/Hirlam/IAL/fork. User forks need to be kept up-to-date manually by the users themselves (see GitDeveloperDocumentation). Alternatively, you can clone the superproject from Hirlam and add your user fork as a second remote:

    • Clone the superproject + the submodules contained therein from the Hirlam organization, and move into it (Note that the remote will be called Hirlam, rather than origin):
      git clone -o Hirlam --recurse-submodules git@github.com:Hirlam/HarmonieCSC.git
      +cd HarmonieCSC
    • Then add your user fork as new remote and update all remotes
      git remote add <YOUR_GITHUB_USERNAME> git@github.com:<YOUR_GITHUB_USERNAME>/HarmonieCSC
      +git fetch --all
    • After making changes, push to your fork, not to Hirlam.

    Scenario 1: Modify the superproject only

    If only modifications to the superproject are needed, the workflow is very similar to before, as there is no need to update the submodules and their hashes.

    Create a new feature or bugfix branch for the new development, e.g.:

    git checkout -b <YOUR_BRANCH> [branch_to_branch_from]

    This will create a new branch named <YOUR_BRANCH> based on the branch that was checked out, or from the branch specified as [branch_to_branch_from]. to create and check out the branch in a different location without copying the full repository, worktrees can be used, e.g.:

    git worktree add -b <YOUR_BRANCH> ../some_new_dev [branch_to_branch_from]

    Which will create new branch <YOUR_BRANCH> and check it out in ../some_new_dev

    Make the changes as needed, then stage and commit them:

    git add <changed_files>         # stage
    +git commit -m "Useful message"  # and commit

    Then push the changes in your branch to your fork:

    git push -u <YOUR_GITHUB_USERNAME> <YOUR_BRANCH>

    Once you are happy, create a pull request in github.

    Scenario 2: Modify a submodule only

    If only a change in the submodules is needed, first the change in the submodule needs to be made and merged, and then the submodule's hash in the superproject needs to be updated so that it points to the new commit. Basic steps are:

    • Make the desired changes to the submodule:
      • Clone your user fork
      • Go to the submodule directory and add your fork of the submodule as remote
      • Create a new branch for the submodule
      • Make and test your changes to the submodule
      • Commit the changes to the submodule
      • Push the submodule changes to your user fork of the submodule on github
      • Create a pull request for the submodule changes and wait until they are approved and merged in
    • Update the submodule's hash in the superproject:
      • Go to the superproject clone directory
      • Create a new branch
      • Update the submodule's hash and commit the change
      • Push the new branch to your user fork of the superproject
      • Create a pull request to update the submodule's hash

    See next section for details from a real case.

    Steps taken in a real case, modifying src/surfex/SURFEX/interpol_npts.F90

    • Clone your up-to-date fork

      > cd <SOMEPATH>
      +> git clone --recurse-submodules git@github.com:<YOUR_GITHUB_USERNAME>/HarmonieCSC.git
      +Cloning into 'HarmonieCSC'...
      +remote: Enumerating objects: 25230, done.
      +remote: Counting objects: 100% (277/277), done.
      +remote: Compressing objects: 100% (154/154), done.
      +remote: Total 25230 (delta 156), reused 198 (delta 120), pack-reused 24953
      +Receiving objects: 100% (25230/25230), 8.28 MiB | 2.91 MiB/s, done.
      +Resolving deltas: 100% (18952/18952), done.
      +Updating files: 100% (1395/1395), done.
      +Submodule 'const' (git@github.com:Hirlam/HarmonieConst) registered for path 'const'
      +Submodule 'src' (git@github.com:Hirlam/IAL.git) registered for path 'src'
      +Submodule 'util/auxlibs' (git@github.com:Hirlam/Auxlibs.git) registered for path 'util/auxlibs'
      +Submodule 'util/gl' (git@github.com:Hirlam/GL.git) registered for path 'util/gl'
      +Cloning into '<SOMEPATH>/HarmonieCSC/const'...
      +remote: Enumerating objects: 535, done.        
      +remote: Total 535 (delta 0), reused 0 (delta 0), pack-reused 535        
      +Receiving objects: 100% (535/535), 158.98 MiB | 16.17 MiB/s, done.
      +Resolving deltas: 100% (189/189), done.
      +Cloning into '<SOMEPATH>/HarmonieCSC/src'...
      +remote: Enumerating objects: 472517, done.        
      +remote: Counting objects: 100% (7780/7780), done.        
      +remote: Compressing objects: 100% (2433/2433), done.        
      +remote: Total 472517 (delta 5550), reused 6930 (delta 5329), pack-reused 464737        
      +Receiving objects: 100% (472517/472517), 560.70 MiB | 11.45 MiB/s, done.
      +Resolving deltas: 100% (381932/381932), done.
      +Cloning into '<SOMEPATH>/HarmonieCSC/util/auxlibs'...
      +remote: Enumerating objects: 2880, done.        
      +remote: Counting objects: 100% (2880/2880), done.        
      +remote: Compressing objects: 100% (1102/1102), done.        
      +remote: Total 2880 (delta 1614), reused 2880 (delta 1614), pack-reused 0        
      +Receiving objects: 100% (2880/2880), 20.29 MiB | 12.55 MiB/s, done.
      +Resolving deltas: 100% (1614/1614), done.
      +Cloning into '<SOMEPATH>/HarmonieCSC/util/gl'...
      +remote: Enumerating objects: 9414, done.        
      +remote: Counting objects: 100% (9414/9414), done.        
      +remote: Compressing objects: 100% (1839/1839), done.        
      +remote: Total 9414 (delta 5595), reused 9408 (delta 5593), pack-reused 0        
      +Receiving objects: 100% (9414/9414), 2.82 MiB | 4.56 MiB/s, done.
      +Resolving deltas: 100% (5595/5595), done.
      +Submodule path 'const': checked out '6ff54331ba1e9253ffde7a0c5a003f6258f78fff'
      +Submodule path 'src': checked out '61847aa01cf880252d5bd35e57a417ed31d1cd08'
      +Submodule path 'util/auxlibs': checked out '626140284a8ec8eef5974cc7fb38b7d08105ea91'
      +Submodule path 'util/gl': checked out '9285e2fcdb76c5afb9444e227fb03a338b68ff6a'
    • all submodules in the user fork point to repositories in the Hirlam organisation, e.g.:

      > cd <SOMEPATH>/HarmonieCSC/src
      +> git remote -v
      +origin	git@github.com:Hirlam/IAL.git (fetch)
      +origin	git@github.com:Hirlam/IAL.git (push)
    • Add my fork of the IAL repository as remote, in the src directory, and fetch it:

      > cd <SOMEPATH>/HarmonieCSC/src
      +> git remote add <YOUR_GITHUB_USERNAME> git@github.com:<YOUR_GITHUB_USERNAME>/IAL.git
      +> git fetch <YOUR_GITHUB_USERNAME>
      +remote: Enumerating objects: 363, done.
      +remote: Counting objects: 100% (340/340), done.
      +remote: Compressing objects: 100% (77/77), done.
      +remote: Total 363 (delta 266), reused 328 (delta 263), pack-reused 23
      +Receiving objects: 100% (363/363), 315.12 KiB | 1.12 MiB/s, done.
      +Resolving deltas: 100% (268/268), completed with 56 local objects.
      +From github.com:<YOUR_GITHUB_USERNAME>/IAL
      + * [new branch]            accord_CY49T0_bf                       -> <YOUR_GITHUB_USERNAME>/accord_CY49T0_bf
      + * [new branch]            accord_CY49T0_to_T1                    -> <YOUR_GITHUB_USERNAME>/accord_CY49T0_to_T1
      + * [new branch]            feature/markdown_docs                  -> 
      + *   <YOUR_GITHUB_USERNAME>/feature/markdown_docs
      + * [new branch]            gco_CY46T1_bf                          -> <YOUR_GITHUB_USERNAME>/gco_CY46T1_bf
      + * [new branch]            gco_CY46T1_op1                         -> <YOUR_GITHUB_USERNAME>/gco_CY46T1_op1
      + * [new branch]            mary_CY48T1_preT2                      -> <YOUR_GITHUB_USERNAME>/mary_CY48T1_preT2
      + * [new branch]            master                                 -> <YOUR_GITHUB_USERNAME>/master
      + * [new tag]               CY49T0_T1rc.01                         -> CY49T0_T1rc.01
      + * [new tag]               CY49T0_bf.00                           -> CY49T0_bf.00
      + * [new tag]               CY49T0_bf.01                           -> CY49T0_bf.01
      + * [new tag]               CY49T0_bf.02                           -> CY49T0_bf.02
      + * [new tag]               CY49T0_op0.00                          -> CY49T0_op0.00
      + * [new tag]               CY49T0_to_T1.01                        -> CY49T0_to_T1.01
      + * [new tag]               CY49T0_to_T1.03                        -> CY49T0_to_T1.03
      + * [new tag]               CY49T0_to_T1.04                        -> CY49T0_to_T1.04
      + * [new tag]               CY49T0_to_T1.05                        -> CY49T0_to_T1.05
      + * [new tag]               CY49T0_to_T1.06                        -> CY49T0_to_T1.06
      + * [new tag]               CY49T0_to_T1.07                        -> CY49T0_to_T1.07
      + * [new tag]               CY49T0_to_T1.08                        -> CY49T0_to_T1.08
      + * [new tag]               CY49T0_to_T1.09                        -> CY49T0_to_T1.09
      + * [new tag]               CY49T1                                 -> CY49T1
      + * [new tag]               CY49T1_toT2.02                         -> CY49T1_toT2.02
    • Create a new branch in the src directory and check it out:

      > cd <SOMEPATH>/HarmonieCSC/src
      +> git checkout -b <YOUR_BRANCH>
      +Switched to a new branch '<YOUR_BRANCH>'
    • Make your changes to the submodule (and test everything carefully of course):

    • git status will show modified files:

      > cd <SOMEPATH>/HarmonieCSC/src
      +> git status
      +On branch <YOUR_BRANCH>
      +Changes not staged for commit:
      +  (use "git add <file>..." to update what will be committed)
      +  (use "git restore <file>..." to discard changes in working directory)
      +	modified:   surfex/SURFEX/interpol_npts.F90
      +
      +no changes added to commit (use "git add" and/or "git commit -a")
    • Stage and commit the changes, e.g.:

      > cd <SOMEPATH>/HarmonieCSC/src
      +git add surfex/SURFEX/interpol_npts.F90
      +git commit -m '<a good descriptive message>'
    • Push the submodule to your fork of the submodule's repository:

      > cd <SOMEPATH>/HarmonieCSC/src
      +> git push -u <YOUR_GITHUB_USERNAME> <YOUR_BRANCH>
      +Enumerating objects: 442, done.
      +Counting objects: 100% (374/374), done.
      +Delta compression using up to 256 threads
      +Compressing objects: 100% (123/123), done.
      +Writing objects: 100% (227/227), 84.20 KiB | 1.24 MiB/s, done.
      +Total 227 (delta 183), reused 144 (delta 103), pack-reused 0
      +remote: Resolving deltas: 100% (183/183), completed with 120 local objects.
      +remote: 
      +remote: Create a pull request for '<YOUR_BRANCH>' on GitHub by visiting:
      +remote:      https://github.com/<YOUR_GITHUB_USERNAME>/IAL/pull/new/<>
      +remote: 
      +To github.com:<YOUR_GITHUB_USERNAME>/IAL.git
      + * [new branch]            <YOUR_BRANCH> -> <YOUR_BRANCH>
      +Branch '<YOUR_BRANCH>' set up to track remote branch '<YOUR_BRANCH>' from 
      +'<YOUR_GITHUB_USERNAME>'.
    • Go to github and create a pull request for the submodule. Make sure the correct base repository and branch are used in the PR!

    • After the PR for the submodule has been merged in, the hash of the submodule in the superproject needs to be updated

    • Get the latest changes of the submodule into your local copy and check out the newest commit:

      > cd <SOMEPATH>/HarmonieCSC/src
      +> git fetch origin
      +remote: Enumerating objects: 1, done.
      +remote: Counting objects: 100% (1/1), done.
      +remote: Total 1 (delta 0), reused 1 (delta 0), pack-reused 0
      +Unpacking objects: 100% (1/1), 904 bytes | 14.00 KiB/s, done.
      +From github.com:Hirlam/IAL
      +   61847aa01c..f8d3d93767  dev-CY49T2h -> origin/dev-CY49T2h
      +> git checkout origin
      +Previous HEAD position was 61847aa01c Updates for CMake compilation of CY49T2h (Hirlam/IAL#7)
      +HEAD is now at f8d3d93767 Fix a FPE (NaN) in PGD, instead of ZDIST, use X, which is ZDIST passed to internal 
      +submodule ORDERI (#10)

    Ensure that this is indeed the commit of the submodule you want to use in the superproject.

    • Go to the superproject's directory and create and check out a new branch:
      > cd <SOMEPATH>/HarmonieCSC
      +> git checkout -b <YOUR_BRANCH>
      +Switched to a new branch '<YOUR_BRANCH>'
    • Update the submodule's hash, src in the example, and commit the change:
      > cd <SOMEPATH>/HarmonieCSC
      +> git add src
      +> git commit -m '<another good log message>'
    • Push the new branch to your user fork (remote: origin) of the superproject:
      > git push -u origin <YOUR_BRANCH>
      +Enumerating objects: 3, done.
      +Counting objects: 100% (3/3), done.
      +Delta compression using up to 256 threads
      +Compressing objects: 100% (2/2), done.
      +Writing objects: 100% (2/2), 297 bytes | 148.00 KiB/s, done.
      +Total 2 (delta 1), reused 0 (delta 0), pack-reused 0
      +remote: Resolving deltas: 100% (1/1), completed with 1 local object.
      +remote: 
      +remote: Create a pull request for '<YOUR_BRANCH>' on GitHub by visiting:
      +remote:      https://github.com/<YOUR_GITHUB_USERNAME>/HarmonieCSC/pull/new/<YOUR_BRANCH>
      +remote: 
      +To github.com:<YOUR_GITHUB_USERNAME>/HarmonieCSC.git
      + * [new branch]        <YOUR_BRANCH> -> <YOUR_BRANCH>
      +Branch '<YOUR_BRANCH>' set up to track remote branch '<YOUR_BRANCH>' from 'origin'.
    • Finally create a pull request to update the submodule's hash, as suggested by git above. Once this PR has been approved and merged in, you're done!

    Scenario 3: Changes in superproject and a submodule

    If changes are needed in both the superproject (e.g. namelist) and submodules (e.g. new namelist variable) the steps are very similar to the scenario in which only a submodule needs to be modified. In the step where the submodule's hash is updated in the superproject, additional commits to the superproject's own files can be pushed and included in the same pull request.


    For developers++

    This section describes some git submodule actions that regular users probably never need to worry about.

    Merge from dev-CY46h1 into dev-CY49T2h

    ToDo

    Adding a submodule

    To add a new submodule to the superproject, got to the cloned superproject's directory, create a new branch and then add the submodule:

    git submodule add <submodule_url> [<new_submodule_path>]

    The path to the submodule and updated .gitmodules file will be staged automatically, e.g.:

    > git status
    +On branch main
    +Your branch is up to date with 'origin/main'.
    +
    +Changes to be committed:
    +  (use "git restore --staged <file>..." to unstage)
    +        new file:   .gitmodules
    +        new file:   <new_submodule_path>

    Then simply commit (git commit) and push (git push).


    More info

    • https://git-scm.com/book/en/Git-Tools-Submodules
    • https://www.atlassian.com/git/tutorials/git-submodule
    diff --git a/previews/PR1129/Verification/AllobsVerification/index.html b/previews/PR1129/Verification/AllobsVerification/index.html index 9cbac46ea..f2266f404 100644 --- a/previews/PR1129/Verification/AllobsVerification/index.html +++ b/previews/PR1129/Verification/AllobsVerification/index.html @@ -3,6 +3,6 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    allobs Verification

    Introduction

    It is possible to use Screening (NCONF=002) to calculate observation - forecast (O-F) statistics using forecasts of any length as the model first-guess. The ...

    Screening adjustments

    The screening of observations is switched off by adjusting the NAMCT0 and L_SCREEN_CALL

    &NAMCT0
    +

    allobs Verification

    Introduction

    It is possible to use Screening (NCONF=002) to calculate observation - forecast (O-F) statistics using forecasts of any length as the model first-guess. The ...

    Screening adjustments

    The screening of observations is switched off by adjusting the NAMCT0 and L_SCREEN_CALL

    &NAMCT0
       L_SCREEN_CALL=.FALSE.,
    -/

    Inputs

    The inputs for the allobs data extraction are the same as for a regular DA Screening task - observations (ECMA) and a first-guess (short-forecast files - ICMSHHARM+hhhh and ICMSHHARM+hhhh.sfx).

    Note

    The forecasts being verified will need both model state (ICMSHHARM+hhhh) and the "full" SURFEX file (ICMSHHARM+hhhh.sfx) available for the Scextr task. You may need to adjust the VERITIMES and SWRITUPTIMES settings in your ecf/config_exp.h file.

    The following settings are important:

    config_exp.h setting
    SCREXTRUse Screening (NCONF=002) to produce O-F data
    SCREXTR_TASKSNumber of parallel tasks for O-F extraction
    FGREFEXPExperiment name for FirstGuess. If set to undef it will use own forecasts
    OBREFEXPExperiment name for ODBs. If set to undef it will use own ODBs

    Running

    This extraction can be executed as part of a running experiment (with SCREXTER=yes) or using a standalone suite (PLAYFILE=allobsver).

    Output

    The output from the Scrextr task is a CCMA ODB with O-F statistics. This ODB is archived in $HM_DATA/archive/extract/obsver/odb_ver_${FGDTG}_${FCLENSTR}/ where FGDTG is the forecast cycle DTG and FCLENSTR is the forecast length verified. The ODB data is then converted to ODB-2 and sqlite files for use in Harp and other downstream applications using odbcon tools.

    +/

    Inputs

    The inputs for the allobs data extraction are the same as for a regular DA Screening task - observations (ECMA) and a first-guess (short-forecast files - ICMSHHARM+hhhh and ICMSHHARM+hhhh.sfx).

    Note

    The forecasts being verified will need both model state (ICMSHHARM+hhhh) and the "full" SURFEX file (ICMSHHARM+hhhh.sfx) available for the Scextr task. You may need to adjust the VERITIMES and SWRITUPTIMES settings in your ecf/config_exp.h file.

    The following settings are important:

    config_exp.h setting
    SCREXTRUse Screening (NCONF=002) to produce O-F data
    SCREXTR_TASKSNumber of parallel tasks for O-F extraction
    FGREFEXPExperiment name for FirstGuess. If set to undef it will use own forecasts
    OBREFEXPExperiment name for ODBs. If set to undef it will use own ODBs

    Running

    This extraction can be executed as part of a running experiment (with SCREXTER=yes) or using a standalone suite (PLAYFILE=allobsver).

    Output

    The output from the Scrextr task is a CCMA ODB with O-F statistics. This ODB is archived in $HM_DATA/archive/extract/obsver/odb_ver_${FGDTG}_${FCLENSTR}/ where FGDTG is the forecast cycle DTG and FCLENSTR is the forecast length verified. The ODB data is then converted to ODB-2 and sqlite files for use in Harp and other downstream applications using odbcon tools.

    diff --git a/previews/PR1129/Verification/CommonVerification/index.html b/previews/PR1129/Verification/CommonVerification/index.html index 92c525555..4fb24c92b 100644 --- a/previews/PR1129/Verification/CommonVerification/index.html +++ b/previews/PR1129/Verification/CommonVerification/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/previews/PR1129/Verification/Extract4verification/index.html b/previews/PR1129/Verification/Extract4verification/index.html index 005cb10a8..d4c78933b 100644 --- a/previews/PR1129/Verification/Extract4verification/index.html +++ b/previews/PR1129/Verification/Extract4verification/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Verification preparation

    Introduction

    Before we can run the verification we need to extract data for each geographical point and produce files in a format that the verification program can use. In HARMONIE there are two programs, one fore extracting model data (fldextr_grib_api) and one for observations ( obsextr ). Both are part of the util/gl_grib_api.

    fldextr is capable of extracting data from several sources (HARMONIE/HIRLAM/IFS and produces so called vfld-files in ASCII format. The main tasks of the program is to:

    • Recalculates rh,td to be over water
    • Interpolates to geographical points according to a synop.list and temp.list
    • Does MSLP,RH2M,TD2M calculations if the are not available in the input file
    • Optional fraction of land check.
    • Interpolates to pressure levels for TEMP data.

    obsextr extracts conventional observations from BUFR data and creates a vobs file similar to the vfld file. It:

    • Reads SYNOP and TEMP
    • LUSE_LIST controls the usage of a station list

    Station lists used by verification

    src/Fldextr links synop.list to $HM_LIB/util/gl_grib_api/scr/allsynop.list and temp.list to $HM_LIB/util/gl_grib_api/scr/alltemp.list. These station lists are based on information in WMO's ''Publication No. 9, Volume A, Observing Stations and WMO Catalogue of Radiosondes. This is regularly updated by the WMO. allsynop.list and alltemp.list are updated less frequently. There is also scope to include local stations in these lists that are not included in WMO'sPublication No. 9''. The following 7-digit station identifiers are available to HIRLAM countries:

    Countryidentifier
    Norway1000000 - 1099999
    Sweden2000000 - 2099999
    Estonia2600000 - 2649999
    Lithuania2650000 - 2699999
    Finland2700000 - 2799999
    Ireland3900000 - 3900000
    Iceland4000000 - 4099999
    Greenland4200000 - 4299999
    Denmark6000000 - 6999999
    Netherlands6200000 - 6299999
    Spain8000000 - 8099999

    Field extraction

    scr/Fldextr This script goes through all forecast files and collects all the variables (T2m, V10m, mean sea level pressure, RH2m, Q2m, total cloudiness, precipitation + profiles) needed in basic verification.

    • Input parameters: none.
    • Data: Forecast files.
    • Namelists: Station lists for surface data (ewglam.list) and radiosounding data (temp.list).
    • Executables: fldextr.
    • Output: Field extraction files (vfld${EXP}${DTG}), which are placed in EXTRARCH.

    Extract observations

    scr/FetchOBS scripts takes care of the observation extraction for verification. First, the observation BUFR-file is fetched from the MARS (ExtractVEROBSfromMARS), then all the needed data is extracted from the BUFR-files.

    • Input parameters: none.
    • Data: Station lists for surface data (ewglam.list) and radiosounding data (temp.list). These shoud be found from SCRDIR
    • Executables: mars, obsextr.
    • Output: Field extraction files (vobs*), which are placed in EXTRARCH.

    A general input format

    The file format for verification is a simple ascii file with a header that allows an arbitrary number of different types of point data to be included in the model vfld- or observation vobs- files.

    The generalized input format is defined as

    nstation_synop nstation_temp version_flag  # written in fortran format '(1x,3I6)' )
    +

    Verification preparation

    Introduction

    Before we can run the verification we need to extract data for each geographical point and produce files in a format that the verification program can use. In HARMONIE there are two programs, one fore extracting model data (fldextr_grib_api) and one for observations ( obsextr ). Both are part of the util/gl_grib_api.

    fldextr is capable of extracting data from several sources (HARMONIE/HIRLAM/IFS and produces so called vfld-files in ASCII format. The main tasks of the program is to:

    • Recalculates rh,td to be over water
    • Interpolates to geographical points according to a synop.list and temp.list
    • Does MSLP,RH2M,TD2M calculations if the are not available in the input file
    • Optional fraction of land check.
    • Interpolates to pressure levels for TEMP data.

    obsextr extracts conventional observations from BUFR data and creates a vobs file similar to the vfld file. It:

    • Reads SYNOP and TEMP
    • LUSE_LIST controls the usage of a station list

    Station lists used by verification

    src/Fldextr links synop.list to $HM_LIB/util/gl_grib_api/scr/allsynop.list and temp.list to $HM_LIB/util/gl_grib_api/scr/alltemp.list. These station lists are based on information in WMO's ''Publication No. 9, Volume A, Observing Stations and WMO Catalogue of Radiosondes. This is regularly updated by the WMO. allsynop.list and alltemp.list are updated less frequently. There is also scope to include local stations in these lists that are not included in WMO'sPublication No. 9''. The following 7-digit station identifiers are available to HIRLAM countries:

    Countryidentifier
    Norway1000000 - 1099999
    Sweden2000000 - 2099999
    Estonia2600000 - 2649999
    Lithuania2650000 - 2699999
    Finland2700000 - 2799999
    Ireland3900000 - 3900000
    Iceland4000000 - 4099999
    Greenland4200000 - 4299999
    Denmark6000000 - 6999999
    Netherlands6200000 - 6299999
    Spain8000000 - 8099999

    Field extraction

    scr/Fldextr This script goes through all forecast files and collects all the variables (T2m, V10m, mean sea level pressure, RH2m, Q2m, total cloudiness, precipitation + profiles) needed in basic verification.

    • Input parameters: none.
    • Data: Forecast files.
    • Namelists: Station lists for surface data (ewglam.list) and radiosounding data (temp.list).
    • Executables: fldextr.
    • Output: Field extraction files (vfld${EXP}${DTG}), which are placed in EXTRARCH.

    Extract observations

    scr/FetchOBS scripts takes care of the observation extraction for verification. First, the observation BUFR-file is fetched from the MARS (ExtractVEROBSfromMARS), then all the needed data is extracted from the BUFR-files.

    • Input parameters: none.
    • Data: Station lists for surface data (ewglam.list) and radiosounding data (temp.list). These shoud be found from SCRDIR
    • Executables: mars, obsextr.
    • Output: Field extraction files (vobs*), which are placed in EXTRARCH.

    A general input format

    The file format for verification is a simple ascii file with a header that allows an arbitrary number of different types of point data to be included in the model vfld- or observation vobs- files.

    The generalized input format is defined as

    nstation_synop nstation_temp version_flag  # written in fortran format '(1x,3I6)' )
     # where version_flag == 4
     # If ( nstation_synop > 0 ) we read the variables in the file, their descriptors and
     # their accumulation time
    @@ -30,4 +30,4 @@
     ...
     pressure(nlev_temp) val(1:nvar_temp)
     stid_2 lat lon hgt
    -...

    The accumulation time allows us to e.g. easily include different precipitation accumulation intervals.

    +...

    The accumulation time allows us to e.g. easily include different precipitation accumulation intervals.

    diff --git a/previews/PR1129/Verification/HARP/index.html b/previews/PR1129/Verification/HARP/index.html index 1128ca4d4..47df2d53f 100644 --- a/previews/PR1129/Verification/HARP/index.html +++ b/previews/PR1129/Verification/HARP/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    harp

    harp is a set of R packages for manipulation, analysis, visualization and verification of data from regular grids. The most up to date information and tutorials can be found on the website for the 2024 training course

    +

    harp

    harp is a set of R packages for manipulation, analysis, visualization and verification of data from regular grids. The most up to date information and tutorials can be found on the website for the 2024 training course

    diff --git a/previews/PR1129/Verification/Obsmon/index.html b/previews/PR1129/Verification/Obsmon/index.html index b1a489abb..153d7c3a2 100644 --- a/previews/PR1129/Verification/Obsmon/index.html +++ b/previews/PR1129/Verification/Obsmon/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    OBSMON

    In 2014 a new version of the observational monitoring system entered trunk. The first official release containing obsmon was cy38h1.2

    The obsmon package consists of two components. The first is a fortran-based code that is run, for all the active observations types (defined in scr/include.ass), at the post-processing stage of an experiment. It generates statistics from the ODB and store data in three SQLite tables (ECMA/CCMA/ECMA_SFC(CANARI)). In addition the SQLite tables are concatenated in tables in the /ts directory at the end of the run.

    The second component is written in R using the Shiny web application framework. It allows the interactive visualization of the data contained in the SQLite tables produced by the first component of the package. This can be done either offline or via a server daemon (e.g. shiny.hirlam.org).

    For disambiguation, we will hereinafter use the terms "backend" and "frontend" to refer to the first and second components of obsmon, respectively.

    How to turn on backend obsmon?

    Obsmon is enabled by default in ecf/config_exp.h vi OBSMONITOR=obstat

    Note

    If you don't have any log-files from the monitoring experiment, you should disable plotlog from the OBSMONITOR= string in ecf/config_exp.h

    Note

    Make sure that the -DODBMONITOR pre-processor flag is active during compilation of util/monitor. This should only be an issue on untested platforms and is by default enabled on ECMWF.

    How to create statistics and SQLite tables offline/stand-alone:

    If you are running a normal harmonie experiment with the OBSMONITOR=obstat active, the following step is not relevant.

    Two new actions are implemented in the Harmonie script. Instead of start you can write obsmon and instead of prod you can write obsmonprod. This will use the correct definition file and only do post-processing. If you have your ODB files in another experiment you can add the variable OBSMON_EXP_ARCHIVE_ROOT to point to the archive directory in the experiment you are monitoring. This approach is used in the operational MetCoOp runs. If you set OBSMON_EXP=label the runs will be stored in $EXTRARCH/label/. This way you can use the same experiment to monitor all other experiments. The experiements do not need to belong to you as long as you have reading permissions to the experiment.

    1. as start:
    +

    OBSMON

    In 2014 a new version of the observational monitoring system entered trunk. The first official release containing obsmon was cy38h1.2

    The obsmon package consists of two components. The first is a fortran-based code that is run, for all the active observations types (defined in scr/include.ass), at the post-processing stage of an experiment. It generates statistics from the ODB and store data in three SQLite tables (ECMA/CCMA/ECMA_SFC(CANARI)). In addition the SQLite tables are concatenated in tables in the /ts directory at the end of the run.

    The second component is written in R using the Shiny web application framework. It allows the interactive visualization of the data contained in the SQLite tables produced by the first component of the package. This can be done either offline or via a server daemon (e.g. shiny.hirlam.org).

    For disambiguation, we will hereinafter use the terms "backend" and "frontend" to refer to the first and second components of obsmon, respectively.

    How to turn on backend obsmon?

    Obsmon is enabled by default in ecf/config_exp.h vi OBSMONITOR=obstat

    Note

    If you don't have any log-files from the monitoring experiment, you should disable plotlog from the OBSMONITOR= string in ecf/config_exp.h

    Note

    Make sure that the -DODBMONITOR pre-processor flag is active during compilation of util/monitor. This should only be an issue on untested platforms and is by default enabled on ECMWF.

    How to create statistics and SQLite tables offline/stand-alone:

    If you are running a normal harmonie experiment with the OBSMONITOR=obstat active, the following step is not relevant.

    Two new actions are implemented in the Harmonie script. Instead of start you can write obsmon and instead of prod you can write obsmonprod. This will use the correct definition file and only do post-processing. If you have your ODB files in another experiment you can add the variable OBSMON_EXP_ARCHIVE_ROOT to point to the archive directory in the experiment you are monitoring. This approach is used in the operational MetCoOp runs. If you set OBSMON_EXP=label the runs will be stored in $EXTRARCH/label/. This way you can use the same experiment to monitor all other experiments. The experiements do not need to belong to you as long as you have reading permissions to the experiment.

    1. as start:
     ${HM_REV}/config-sh/Harmonie obsmon DTG=YYYYMMDDHH DTGEND=YYYYMMDDHH OBSMON_EXP_ARCHIVE_ROOT=PATH-TO-ARCHIVE-DIRECTORY-TO-MONITOR OBSMON_EXP=MY-LABEL
    2. as prod:
     ${HM_REV}/config-sh/Harmonie obsmonprod DTGEND=YYYYMMDDHH OBSMON_EXP_ARCHIVE_ROOT=PATH-TO-ARCHIVE-DIRECTORY-TO-MONITOR OBSMON_EXP=MY-LABEL

    If you want to monitor an experiment stored on ECFS, you should specify OBSMON_EXP_ARCHIVE_ROOT with the full address (ectmp:/$USER/..... or ec:/$USER/...) e.g.

    OBSMON_EXP_ARCHIVE_ROOT=ectmp:/$USER/harmonie/MY-EXP OBSMON_EXP=MY-LABEL

    You can also monitor other users experiments as long as you have read-access to the data.

    How to visualize the SQLite tables using frontend obsmon:

    Download the code from its git repo at github:

    git clone git@github.com:Hirlam/obsmon.git 

    Instructions on how to install, configure and run the code can be found in the file docs/obsmon_documentation.pdf that is shipped with the code.

    How to extend backend obsmon with new observation types

    Step 1: Extract statistics from ODB

    In the scripts you must enable monitoring of your observation type. Each observation type is monitored if active in:

    msms/harmonie.tdf

    The script which calls the obsmon binary, is:

    scr/obsmon_stat

    This script set the correct namelist based on how you define your observation below.

    After the information is extracted, the different SQLite bases are gathered into one big SQLite file in the script:

    scr/obsmon_link_stat

    The observation types which the above script is gathering is defined in obtypes in this script:

    util/monitor/scr/monitor.inc

    Then let us introduce the new observation in the obsmon binary. The source code is in

    harmonie/util/monitor

    There are two modules controlling the extraction from ODB:

    mod/module_obstypes.f90
    -mod/module_obsmon.F90

    The first routine defines and initializes the observation type you want to monitor. The second calls the intialization defined in the first file. The important steps are to introduce namelist variables and a meaningful definition in the initialization of the observation type.

    The real extraction from ODB is done in

    cmastat/odb_extract.f90

    At the moment there are two different SQL files used, one for conventional and one for satelites. E.g. radar is handled as TEMP/AIRCRAFT.

    Step 2: Visualize the new observation in shiny (frontend obsmon)

    The logics of which observation type to display is defined in:

    src/observation_definitions.R

    In case of a new plot added, the plotting is defined in the files under:

    src/plots
    +mod/module_obsmon.F90

    The first routine defines and initializes the observation type you want to monitor. The second calls the intialization defined in the first file. The important steps are to introduce namelist variables and a meaningful definition in the initialization of the observation type.

    The real extraction from ODB is done in

    cmastat/odb_extract.f90

    At the moment there are two different SQL files used, one for conventional and one for satelites. E.g. radar is handled as TEMP/AIRCRAFT.

    Step 2: Visualize the new observation in shiny (frontend obsmon)

    The logics of which observation type to display is defined in:

    src/observation_definitions.R

    In case of a new plot added, the plotting is defined in the files under:

    src/plots
    diff --git a/previews/PR1129/Verification/Verification/index.html b/previews/PR1129/Verification/Verification/index.html index 3d6a382a3..d10e93d07 100644 --- a/previews/PR1129/Verification/Verification/index.html +++ b/previews/PR1129/Verification/Verification/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -
    +
    diff --git a/previews/PR1129/Visualization/EPyGrAM/index.html b/previews/PR1129/Visualization/EPyGrAM/index.html index 7930f6e4d..6e2c76b15 100644 --- a/previews/PR1129/Visualization/EPyGrAM/index.html +++ b/previews/PR1129/Visualization/EPyGrAM/index.html @@ -3,7 +3,7 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    EPyGrAM

    General

    Using EPyGrAM (version 1.4.13) at Atos AA (Bologna)

    Enjoy!

    diff --git a/previews/PR1129/assets/README/index.html b/previews/PR1129/assets/README/index.html index e22a78017..88f71c112 100644 --- a/previews/PR1129/assets/README/index.html +++ b/previews/PR1129/assets/README/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    da_graph.svg is created in .github/workflows/documentation.yml. It can be recreated locally by using dot which is part of graphviz

    sudo apt install graphviz
    dot -Tsvg da_graph.dot -o da_graph.svg
    +

    da_graph.svg is created in .github/workflows/documentation.yml. It can be recreated locally by using dot which is part of graphviz

    sudo apt install graphviz
    dot -Tsvg da_graph.dot -o da_graph.svg
    diff --git a/previews/PR1129/index.html b/previews/PR1129/index.html index 3805bc2b7..64b0a5953 100644 --- a/previews/PR1129/index.html +++ b/previews/PR1129/index.html @@ -3,4 +3,4 @@ function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-HQ1BCP3LPJ', {'page_path': location.pathname + location.search + location.hash}); -

    Harmonie System Documentation

    Welcome to the Harmonie system documentation

    Github

    Contributing

    To update a page:

    • Click the "Edit on Github" button at the top right of the page
    • Edit the markdown file on github.com
    • commit (this creates a new branch in your fork) and start a pull request

    When adding new pages also add them to docs/pages.jl so they appear in the navigation bar.

    To add a reference:

    • Update docs/references.bib using <Lastname><Year>` as the citation key.
    • Cite paper in markdown using [<Lastname><Year>](@cite)

    Instructions how to build the system documentation locally are here.

    +

    Harmonie System Documentation

    Welcome to the Harmonie system documentation

    Github

    Contributing

    To update a page:

    • Click the "Edit on Github" button at the top right of the page
    • Edit the markdown file on github.com
    • commit (this creates a new branch in your fork) and start a pull request

    When adding new pages also add them to docs/pages.jl so they appear in the navigation bar.

    To add a reference:

    • Update docs/references.bib using <Lastname><Year>` as the citation key.
    • Cite paper in markdown using [<Lastname><Year>](@cite)

    Instructions how to build the system documentation locally are here.

    diff --git a/previews/PR1129/objects.inv b/previews/PR1129/objects.inv index 8e7637fcff29605438700254d89b96c62ee7b1f6..813626a433e1587597c055a54dffef91686dce26 100644 GIT binary patch literal 17071 zcmV(}K+wMNERX>N99Zgg*Qc_4OWa&u{KZXhxWBOp+6Z)#;@bUGkNVRCJ6 zZfRv8cWG;B3L_v^WpZ8b#rNMXCQiPX<{x4c$~d`>2ljjmgavvMMn9mTdq(_mgMTH>xk|O zii^d2MNvi4cHRD?fJjh88zfi&C|fl#5%muJYaXGWqu->TWX_TsR)UgeA{-V;Nxe(X!I`F2?X%?){K89H@cnYF;8zjAWzFP;I%+JDj<5KKv zdnVx0_3lA@i<7U*DE_9=Kc(U3ak_h0$BSJQz?W-3S;w0&s6el6TS6D#!fYi{)6^S3 z1<6wwd^=nC_m7FcUAge3i-3oJ!nq%ri+aHQ?&!#+wYDT;&+mwKJhUSgNaV<0oN|R+8z9XF`9ngo7{&263|XvuaxB`kBBl zLVdFk(|DK6gGy??bSC((*Z&T4BgBaTH?RyJcL`4r{Vn(n+Sx`f@7t4 z>dE*r+*tqfa8e|l`&m{`iC2LT{}k>oh2QG$53j=gAz_~GONJNWaE1T#M7TKX zA0}Nr6Z9ufH}MyK(l~ug3pj4^W8o%)INCM5MY;^PNq;?r>A-qhhY_#)=`LCNaJF?j zx1Gt*h1t^-76kp>2YEjp-j6!N+XJ9AiS9y-hXTLv?W+Il;C}jPGU)&Hklrkn=@5Pr z{8N3O>ThS`N6%@Pw(7A!1GAQY5c-#j?&e7lYziA}eJflD>SF~~mh9#ka2bQ$W=`{? z8MM?K(DiR(sedf@)AKvgW1Ij3yKe5mZ3uJJe6D~DAH&R#fOX4)WVi{l(2v5um)L|u z=->?Hi-NC;{N?f(oP{-pQxs2;=G&_x0QO*fci)=~KldjUZwTI76aZcGi_xv%eb7f- z^JTD`>1g;@f3x`IYa^O%b|&bvfG^^guN5z0Fl~O=1K1kjzcS@$)^FYcumS9k@PC=O zd;M$w^SF_mo51>|zh#KqFZ~uMrWG*9`lELh?=}k-Jk-Z$6d;g!@ymSb{?^R*s4pIf zA;H{FGve*e^fwo=y2Y9Ky#`_$WEsqjbU&1C36n{EDnO2R*>;zqs8A1P+aO%)Gle&F-x5*9-l}4g3_& zbCamP6`t1SOA!sXSdeF*f+Q_+HO(zq8RYy>Lf%EuHf}`Z7JvjF;93#kF=`-wj6jyS zDUy88;<$;5uAd3|Q=}bkV0~@lsI&r_+TomZ$zVvIVU;e!IbZI7&k}#0y?=6kbg}m~ zMnzWn*L)|R+kof3 z>VY-(7p6PQtoHAoKPH_~j<~7;Jn1cb@YmkeE{qmu^#467lkewi|7%XmQQZRe!4do^ z)8ob6wh@4vz{x>>wZeHc@aH)1DzwY`5|9i;hdz4KU6=(Uf8##}qy+7E2RGv{IpWF| zaO?C}cRO1JT3-!9QMk=2`Y*LP6Y!I;Dt!9!|E=bn+Wm9N8gxZF38KJH1GQi)!1E2x z?1&S-Ae3rN1Kz}QKRWx%E}VazW`2^<_X^y~b|4twAC0B^ngwb05G*QC3N7HF@`wC` zr2p?T81Obus@_gjGsNOg8jR;(gd@BM`l_c@Y@%*Rno{3KWgNwAsc^-%U# zm~`f|*WWyaNxZ?Em%!qM{zIhbBL5+XYILV>ei9C$FofPkPbyu;iG-P_4(hXoIkV*^ z@DrJG8wrmqU#Lf=wi@* zAV}IUl#8BL+MI~W8x<}-iPDH~AJvI4=>J-R_pTy;{uS>A_6C{kn%@blXyKi>RD!6y z&T!J5bOy6Ld26!`fJ*N+&H@>4WVQ-qsue#nUeXW0Ovkr!?xM(LSQ>B55d-iDj=LXV z2?LJYJ^p~;A_p!A+?$QR+~#{|t4yRjLWxH1kO885Sg@s6J|zBTzLLk?-`@YrTN&~l zKZcnENj*+OnEB65bFfC9%a4=cl?)akO?g_cg?-Kpc@L`(69Di8;t7pM>(k{xoxG~?-o zUCJd}mhq(m3pg6z4*P{QTxFF8da_F^YJ9Jnn$%gcU}~%05v;8M@JKjyENtC40p@_0 zDlFoypTP?#-lvx9fZl13>r%J(&}^T?5$#a$&caRcMJSBndK(4Se}WJ-r4X9bo5J*( zuMiE!wJoR!nwFi_F#xnd!9Asj;&o}R;u_oR)(=5~_fOj}y(XV_uExm(f*OvWW-qQc zhQY_eF!;WF+M~QR4o)ttS$FU5>l${U7y7CVyhdjtQ($}Mk#StX4-n>b%V(sB?h8X{@yL8cr)Vy)nE9c$0Si z%M1?HRGyxmzWmf9P~m_I0u=!2k@t=asyHW{Mz1{)30?$M{1r~G+A|T@9y<=`G~EXC z5T3u{LDIJD(@)6U*e-FRF_O?o-jvs}Bn}jgwqPM2o@GfbU^?5c7{H~goUxekMNskZ zEOLqhy%o~_Zw)AWw&+Ux3v_6NND0y zF)-5(Ud0yoeD%D7`2an<^4QfPEk7O*j_Ak?9H%0Kh!2Yqy)KA;C{@bcw$;?rXFBX` zic)}p&$DU>{A@z=JxR2os|}K-6HMm?+f=TMiZgdcu#yvB2s{mFknCU+-M|V+gEdS+ z=XSj>D?bCy{d@Lj`TO6`#`3B!yW`uza5C!me*gQ**JM-+Zw&F#cY+y zEMdWg@VHZCVoH-3eu}qBuMOMF0=W|3S#ND573dYG7#^Y2t8jZo=BG5#;U}k8fxQa1 zS8sTG_i6SLZmpXHyx4ts$lqRbVrwKg0Rc%ck~iGUqul~KKzA6K_#h-Am&q)L+0t;< z*FQP(r@>{V!N|8p7%*l$%#H?N-uvF?&P1lJ0q`~FFTt5(*HcD9xlC!N!-=wn8(RC{ zVao}kMKe>*EQG=kj6ttmVFs0>FKmHSckSKqy z&MO~9y%Y7DdfcP_#nGSM9sPXiqPxxACy)vlhOiBJF$c)S!e3NVC^7lK;i)?jcV|7l0|*C}u#SLNl!#ZKVCJVo>3F=J=xsXQ zDlN+N%gJpY=6SLK#vjhKGgwzvpxegbruYmiwuc-ZoVGc{ql-ZmCekQX42?lBh2Swb z(jd3@?xHwDB?z4FEN;PyhI?j73y)k<4XFmv(y!vy+Ub};%!9rJLkXuZ7Rm6SB|VaA z0DUR`?%IDsenP_W3$mev^{|oOR`B1rd)vE^VJbIp56F$5NLYXH;?wYP0}Ki7_|&G6 zx9Rq7FV5by?*!SNnr;w;_(B2^2qF+f@Z~TS077!Zi2w+pA?RG8HXbXHHuIQ{V^?r( zEgP}kNI|%=uLqk`RHNb1p$;^Wr`sS{DDIs1Sw7=YB!JQ5benWWauKFqTXWkQ6PYR( z6NQ2udrN3cFSpgv^ZgbAKe>cM=H+^qd^`<4aRe0zT2{k6T*UJD{k!b^^xgT}2BwER zuHRs5I3rbiJRIQ{Vd(LQ0-h)>jrJu@`}~5kU@Ol*NXG_$PrM&EVEMK@z8NL~qET z2#w+?Pe&3bw3`}jLGm&0J!7Zfe!n)_=Lwg@4c&~k8)0$15sVs^dFF>vd#MtMgd+0F zJ`Q#JecsD~Ub1o{35geb@aJD2$@0QtuJfw)9rR`ASkhG~a9ns6oMmC>9ecB2zS>}j zMg3+};pGlJ7{c5ZH0@40v+i{R|4?m)2Ed;bEgT4&1u1o>f4Px=+XXNsaDI@O7x^HT zWe4gMG@vkmG%-kmV?b@xS;13!T*;Lh-05~*p-|?#!AHQ&h11*{Kfu%=)kkMKg9Wn+ z0T0$VoR_rQ_4*^alOM8En9c-sCJLPi=#1{t({VIxAZayYSwJ!YI5|RM!f@Krsj+0C z0yTcY{PCgk-REu3iWAcav|H05fqdlS1ABZ(e2bE0gO-8>vyT+8$gU%}6-nHSByL7h z??x}$z2J9&7X)4r`jW@|5G(O31<$6lTzO3sv6eWESVq>CPQ6~E6lzIRT?uMrb8(41 zXN$xghrUKjJs_N{1SHPCtOME zXC*{w#ue5=4#2h6rVx8Ow@0gf&QTJwqxa2rq!gzCZ7X3v3@gesr=6|q@Lbnh+7xnYOHSiu_RTLk4YXdD`Or}f=H)HMs3%43j!az(F|MaW zq!qKa^5Cd9MQ|ZS>Y9#m?Um#fI5qJ=M!V4DdOZG>&fXCm_-K{?4-iTZFpp>`!BBJv z=b$`N?af#I<`FhtT)k+vR;+t1gPv!R==4?)w7$_~tz`&@7r%x#EYsb3?H4^<4hy&@ z8xD#m3c9$|qZmgCMbW@Z=SZ_h15f_ncNJOU+6B9&bLV(B5AIkigh_!OArx zMEy<_J{5;kYj#5rWsFefIsy>{;=xuBtYg@p-k-idz1T0y>h>tlA_eX<>NlYFnMtC`-A9(P(Cw%sXv}A8$ zEgzGTOO;SV6!3~D?%H)ZXnI8)tBvO#g><`A9>+O15pzJa+Isyt%MoyFqKi~g)ptFA zZs|^GwwK6VT0yiPhJ3MWk=j5H;K`Ae-HbFXY=mJX{lx-A ztMmt)tq3O!9BNm(*T2@NR_Y=QS)T%5tTM$24%Yup0z2z(-1yYuye_zJyCQ_+0qDYb z?8}y+_DG)Nxqu%oD$z?~pQ05+iw6SHdS6iW$2PbYwp(m1s5SdhvZ29W_}gsnDO|6< zChasq2@92TJ@t2Rvx|85fX;Q1Uo>g9b1`*51c8u_J}bG?H3WrP7r(i6D})h-DNOjs zK<+lMvS^8F5&Ka8MDhT1z=1hIc!BVO;DrnCb?JoX-EWg~9pAf7tRFe~wIL2~g$Bo? zI5n31QjqHscIj$$ig1V<33!TBakU?V>>dV`EyAKIMn{}EEl#)5>2}S}J>70=9ThP& z4gDo)1;Ov$bRJ-apjcjfkTBcA z__=)-YIp*u1r1MFT0yh|X$8@eya;QRY0+=3GyBxEF_xmR>5<%o@Oa5$$eT*w&sSPN zs8L6$Bvo{hDq^)|AhA%ys&T?Q#z>ZBAe((eKQ<)xrW`HI;1l$)#6Zk z_QnF{Os@PVe$3isC0trxl@x;~VmX_9Vn=lHe@9gyFrf0dvrRx;y?A$V`S!Hw87hV3 zE4w?o_>=AM=;G&7fq;Sm9dDU%Y29Q}jNFI?0+J<`Ts~oG6VY`dq=`K|5eu<@+AE zql2g zW=lUR4zfPXkjaE5^M)86v+52kM^QI zvU#8X>_slDnQu8DQH|_gdsZSRQ8f!OAW@A>U$h=g5`_Kti7qa*_2D8Rf2VMxebY18 zs!$dTIM^Joq8o>kyJwFmX`$IA%7GRdnsz7!t@Hg9Q)0E&4w{UgH9D6Xqm)f7sue3% z8?DR>ZE!4+vMYF)#ZOMG?;XvCsKmZkdHd@GfZCJJka{za0ZI_H#uUk_ajgh2>1A5j zeKjF_0$`e%Jo9OtA_L}Gn!S^3j*c8lYCRz>ir2IDTG;+Lj@(A6S1-9%3BA#=#M*OV z!y{}bblt4U>8yVz&n0H9OM!a7d8GPSMi>FDEsQq_Fw%{t>7gknfaNsAPId@Af#kGu z+V9u1nsl2c@g?W|Y8lU5kvI%Mt5SyzI?5BrY$yKa8O`(lF3Kpw#W_m#o3zT|uxV2r zbd*1j*-QXfC}7%h&xHf*;YxY(VUL?*ZijRmZx)&bB5@O?%SE`Pd>9$Vb+g&o=4W}) z+|#{^T(3O+Q6@zZ)buKJufp`|?$*ZIn&JxP2P$W73aM%rf^z(nvrpS}Uj)kgwn;N} zhdh+yk3;4*Hh$LShuLpWf>R7RD#u5MtZ!@xbuzguqXq5|wCkX71fN?d)D#u%GK$K( zJe6J2tms{_jVNLiGnc2rP}A1PGDByvU`Ep#xek61jNux}JbOdRmE&rUKaj&47`HO^ zF02g=*%6Px#so%y*KQ9JnX2y0hx~6gHV55 zT8{^$LP8+n0AX;_JVU$93YNs*(K|DuouDGVrt09<jsS{WvIegXd5F1nN0$JAm8m|kA-7W0rO5piZV_L-yPo-jW}i5joufzr&9)z6N%Z3 z(7B48vE*gjZQcPvFVzwZUFS3Pm~xb*h<9_OODKz>0+95$?0IQssjjfah3Z~bwmwke96o}9Byr^@4`Hh)$}hqm5z z@A{L`;SPmAiJug0jVp5Ng;ecq>^bAOYBcnUC-juViJq7iMO%dLjI)^z{xdq~bUGr3R2F%Bl^UTu;9)8(HMc=#@ znH`9V(t`y~GBQ$?*yDy0{Gs*3pEpK#qk0!rsxlaOf zGN6%;#yMuu%AvrN?nEVXMW}zxVR?u1A=5kuLNSyH#!ymH2fs3gvfV*JF_Z|!dHXaJ z8+iwvzr4b-3)J4+N1w<0IJ__WA{`)-=vqk5^VfvILY(0ZEbt%&5^_<(EydM~$<&re zXx^HLeRs58D)WgKFCGp^Pk^oS=?UN$Cp|&tWXCQprJC>;Gay9^2Ui--$$kUkC%a$1 z?kprg;R5CYR8e@+sioxFDo9%BH(hQ7MI{X*ER0upRVo2Wdv#agg=v>MHFL_5yY*_* z)+M@}iCK#6&1p_uUojo-zhCxnT!o0pbLzR(JaDe)hI$kFbRw>A##73=;oZdkLbLMu zoMSE6ctuPsd_KpR*O|{@cWF|uHE=etAz{9{XxApWC^i<0db{BE-!#* z=4hZo!czGZ`WUXVuGU-G9D+Zo>I8Ujjs_|iEEPa5h#V6H&c>Y+=k%e&$W7%}mF#>I zuS1w?vv}Kf8E{8!qZ(G74i`1ETnv%T*xshOEcLCmR=o zAO~L@JDklAp`WrceQw+w(p<^DJ=>-OCJ;u*b6*Qq#g4Qq!V^E7n1XA0;x>g2=DJ59^VWL+KtbLU)qklNphjl-CoG3Rl-k?AVj zJj-K{YcRo*j%Y7>ZPljdp&*WyoxmAi=myVv%HMPG!FYzTH5#)&=3JFw(jbU1BUPdG zZ*_2nyz$|9mJ_(TlNaa6em~2LcjuSw4*ESBYSoFLP6Tx#F3yo#e-_w@r;;m6E`ca3 zDl0a)ZZyWcB6&D_MdO0IF7aRSA*uEcx8@LRjG%KBKN6(!bR_Q#SavA+#+x$xTRD17%8AG< zsW-0+7k;%zq*LrYm!R9D7%*K>fVPx*lCSdEN~>H21Dm@6%3T+HQkiZ%wdtx@k?Iqm zN+41DxgzuahBbjWT9mDyl|a??gK7>D$K<)y7278P^RUzD>})#h*5}7+Bl|jF@Vts} z>Z(v(ski+Z#*DPG5~#GRc;w?{PV?y--;`X&h_*gMq#DQbpAk_T?+fj)l1m=} zvg#x7w!Dz>@!4>9+*Cu#@{H9P(8>QpjbY~y3}AGG)1Drm6P^#du4<3l2KI>|y7abb zr3Wv`2c{5&LU1#K^(iSlvB@N@fAqI4RT_Jy3f%$-A(!}1L_JfR#(9?X4ar8BSJeti81h49|;`3k1(2RsE3*t&Xe;f-?K)B;vM&9cMn493^nK@u*KH;vadi8~L{rU8o0 z|2C| zYVd(ojqsL8p<^f7=N8ZIufg*Xz4sJ_!zNBug`L%aB9~u)z4(I#cBy!8<)LUtUQry1 zQ^TBAoc5QQqa$9$_V5A@*!Do{QQM+z4?0lhzyB-y@BfPBAR!g=RH@OLb#IFFsC#+T z`8nf1B?uMtlEWA3qKA?p6|JEp%Y(^icd_wQEc=jXJaUYnmYQb(mb$XO}Y3_?Yir9)Iq>-E$=51AXd%U}k*o;yZWs zqD}*WJ$WpW-fxql*1S?7AU%~eR?b&(STcsZ;K?XFq42!XE}h>e!Se7arlV2N)xS;j zh6zUFB5@{kLx*Adte4>lR?fcHc1SuL(`TIw&ofHQhiSjs3R2=vg{?&uR^Fumgs~g`HTP9 zrCEguyG2Na4OMbb3O>La_@iV|r! z&>2Hn;rMYjZ93cw`9fKx2sGA!#p)D!qvYFZbzD}3_QhDL_$QSk>{%EDw3BZe_8|bc z>1(xuB?Q>|T`%&`Wq#Z3+1%CP20)z03+&5&!k=iM4#Rn7j; zKKB%^Z{gg?M0wZ7nmYBJ$PG=6H$mI#!JWvBI|V{!D=9pc*~9?rjc|)rv!XK6m089B zD~%8oyur9CCv@BCi{w=d&fwF{jl3IA8}B@qBC8ZvQH3-HN)03F`aS7%H-$)IwJBpFh99d|PLQs3pU?i$Dlc95NqHGs-h$Wq3yCkmC!f zc`Z!HSYMTgwPgKXN6w$;WF0#W-J&R*E)m^H;MzWsv?_M_m7^h*PThFOZv-QwFNDQ! zDig$KCg2N&FLHOQ3>5lqa~Z7mbRV>1idUyC!DO&6Bvrc+sI(udG z5ZM;oawwf-z6zg8&M?&xdukk%R6wy%O$MiWF9Quh`%}UAq=Vt`s2c=}IPsO^a5Z{q z2Q|TT@PwFNIS!-7Cdxv$cn}@0>Z3VXQ_N#nh#zQi##wM3osTm@eKhB6irEee@dSBC z8*+s)zBYqS(~D{8Xu}RW+Sg{#0dDSVn91XJr|0ia-|csL=jO0%nE-b*`blU6j7&-K zMc{j@3W#buYzId$j+d7~$6Zo|AG=q0;Nhqw2_GC*mD>73)$I zSLmba`!pf%$fekH7!2a|Vi~g1Kcsa12d4hmV=3>&B4pJ+qgxkt703~MUEX8i@ZkGl zu^jzh2lp)X91_lfR2TvRQy&EE*2(CxwKB4TWHKBS1*<9pG)%p(G-F;`8yEpNzLJw) zFSBhWFIKIHgw~qns>l*W#vhZy`~zkB=B#O5*}t8ZnXs$gbT;YC`X7Hgd$Si0wTMN; ziX2m9C$fW}G~%31tt)_pQiRfG#Ej2tlZDbI#5~Su>Rl8QRG^ZdT^+G(ww8fAlkuqE zcA;Gq(^ilpH+e{6eeE1(y+#1P@3vbkArq3jO;KPySUZ5<2~hwdI^<++WK7tqjBs?K z^2S6aJAgbtzjOr83>*JIVbkp#WzDMYb22CopoQE?$VTkstTmBIY9R2o9AT7TQH-iE zpw)2#D0Fe$09C@f9SffM$1t;Md7472O8>Yv z5xh!!i#~4_QqLjXunP;x$r>{)s>ABj-WH=cnl@h(dQ^hQHG1qgtmAKFf4((a9>pE~W%%bLvE6m|JMY{T%82R63#Y#;Sm8Q9(@r?S6U zp_c-P&pS93LM)~;Zwjq8CkfLFO)p$}Cp5SsSY%H~Fi}|h*10L4K}rbM^z^D48s|<8 zjcR0L$vQWMG)RHrTC%8`#$Y_@cRSOT>gHt6ZFQ#g2k~M zf_#A6qRSPuv{ba{2)EA0uF&K#q$(5a`RjTz6As%QP6VNdMGUPGMdSx;rKIOT%P?bhE#Pbx9tMB>h*tVM48 zqC8%*_O#jEDjy%IKygXc!D3Vw1EUOk(bMwEiO5m79#wiEUQ6ZRF&kvxU#>1nB?4RN zB3t$Yx8nAM_tR3n$RiYLgt-uA8?{X*x}K=@S%(hSW4S?7POLfZ2YNcYxr*H2=FV*V zdD<$v*$9kocr%eZ@y&X};7|Z8@LFNcs6L7{Df|2*DH}AXvL;LRhm?kGF0aXWN}cE~ z9;yn4*{U{}H6m5<@Dz+1hmoE3FIz*c(cNXFiG)kUA+11U*A-`>;0gR9fsz;^6eM{v zqa+!dTWQZS-szwSVe+#Q&uI%`rj8%&euftF@~#3rpXc%$DjqNt0Vmjt>M9v5fmnjV z8eqmTmiO6gYuns*_wZaw#MJZ%42;OZV5rSwWb-Slg`($y>yrOsuW%Qf%KoVN;((LL zmsZvtZW8-6h1VMn2931{OKD)T}+XFXF;g2foaiqbZrejIj3zn4p&H&)Jy7sVMT z$^q2SI70zpv+!BBrL-Ky(M9KgahM_(EN>Yu*eMSt@YclA=RKY+NdPGvNVU%qB;{Uv zOep2U-*&Lu!ba78m*=!=OfD?BuJrpDW@oB|Zbi}G{rXZ1gQYcGAXP2ccAyFeYq|J( z+Bz4!7KN{;tl+T)%JywWp;LWKP2a{UV=5$c9X|zU^LYC#*Uv?-Fv@_ji764K{SoH{ zh;|WlR4Hz}U7VGDtM;T?>U?As5oO=u;VA!xODL|}Bt)ccK*ss{R zXs6Z!2Iy-UN^&jTzL~u3)43iz83|X^M8fsnZk9XrBO zD$HT$hUQs~U8+tSj+Qp6)mlo934k=;j45goP&V+nRzKAIqfj z^9(xhUy0a34UdVgyH#v zFoI!p-Q$+!#KP)I+X z3hD=*AoSw|J){_OZ2J+aA7T3us-Lo5n6fvM-kNWj;ymERqCBeNCSm8^EteAZ@&yK9 z?|Ujbyl5{F-V@sY;Qga4s~QPvBvd2uXYb3(CyzS}UQc!#6i{d@pIGBb9G8$EdOe%p=f;v zCE7nguT-N>#^SK1=80B!5H{z=f?cnXu`t%=@zr$v>Bi(*{-ClQ_NoH)i;l@5@&>wo zlT&}Kl*jEwZi+asJ9op%)1b=TxsD%^$Lts-i&p0B$Y~hP{Ri(~f4CGiK9QU68ka&nt6Cc*trdw|?qZnnrSD-moSqMZJH((59d0Z_;3muN8GtKaN+^ zu%zm;{2CU%t?R`4S&b-Xucis+#Oli9i_?~#?%W(O z_AxqplVjIg%)O2Kwx4XnWlA6S6w$M)FAE4S#X#cm(Fkf2cRS<2B;Jqc2TvsQ(ea<; zT2hG<;Z{A{stmtePs*XSbMUgKs(PjccUy9}aLRoipUc~u{^z%D9O?5z61EN=ZQT<* zN$v~7^>PuU+}~wSXsvbLkC&V_oGPMb$Zp&yVOR|{ap3W{jgz0`g)EEFR}-%ekWP*g z6Qe;%M}lm=Sk`$!REMZ?a0cZZA!^52frUp3cFM|5p8Hg#idtxDYZ<7ChOZC4GDZMH z>1pRG6^p_V0IfF)O|61ZI26F_8Dy!4N6HRh+N{~hBL5Y6l?<%%E7PV;f@MENeJ`&* zNqFN{Xe7o2b*{Sga*IP^Ih!uO%o?dpUCr?*>NjUAE}vo7@wY6_@3nBqKHZ}WcxXI} z($yLNaQfT$)8u;G)(^Jk=&9=9=weXp!Zs7~-$dABizy2{NKvxW@qYr7TA}04LS5B$02)7IsO#m0S@u` zd5qkme0b;HQCOq@&O{!D)1Nk zc1w^?c4*sBe1E%uX(myud&*3k?>fHuMp!w}(Ul&&3neU+Q3I|s9pi(;D(0<=@qK?+PbdFDdUXmmBX%+@dWRwf) zyO*yGDv!~x@vUrC4vnjY765=(LkoLTyDBNe;;vD4Id<`vflMQmrINXKcn0P5+A{GFq!b2D}EB_aI)c@}?YtzPesRHik zk5>qI^v51S{)A1CZ*-dcWVK4?!; zI{G3+^{!O#hpyO`bSb)gM!Q$qboGUNdRedQ zD-Z~e`WHukdUy2mC6f<20-*kl_)XfDb56W3*q(FFxq6%K^PJ|Ix|OlCZj{eqY!hRv z61$bLwGOs7as}>#V@EoRg$dN6ARXAI+*GuxYFta75#3XMge~07b-FiA#?jySdQ~P+${RySkE`OtjEleV z>C(xRreV2)TZ4s=n-?mN!VClf;WgR)hj)nt3Pj>$11r;u)FYrip8|8AY z8l^}AHx2iw52{L5VZv?HPMxzTD{_5`an|{ltEZ)RJb^>;L2Hw+=msVEuCx->%6@PF zPw4!u2g>y$@G|8D*Vd!{-*&)YN2ttHu~$^e7-QugcctgFCyL902?HhwOc0o7c$#`8 ze}C@$d@A$$4EU4%9&e1BI`HFJ6M3|UG&Wsiai;*J-I8NgGQQ&V*WRT5I-~PGX zLu*towd@*hS9gvoUF@E+nX8Bw&bCE~WC%c506_r4j$QEBJ=S^_6`s&}bqsBj;*&L; zMdcaZpP&iTXU&?Xi*+RU-E_A_7uVkz+o0U~^RIq$N1tLD8JS+7OE6slqjO+q zD5N=~WPO(|n$?i~$*_4k^vv2mLJ~QZ41XVd-=41IhnDAP$#r30H%6(TI^v4O%uVWm zH)}EpNz}~^O?)5`gieurjKz471mchz0i?z&uhrt|Iw>sOnl))|rS=B<_>*4v7y^$2 z`$`rh`3we1X$z8x^2Y=AA2`z}ShD*%r>sq=MurgFw3Oo5oRm2=Ws!gNlb z)EjT)V@xVron(2*T5=VjQQ>FTQpu)~SvvbC!MMW3)d}fRVjY9UJsX8yy6fq4_nUho7*>gaGfoz?;;RX(2vMWbYFGtBb#uOc(4wRT3 zIPJNas-jFO##MAad7aVcefprz`iX?t(P!5Cn9%9JU?eQ;2&hrt!u!rJ_dd!<8Rnfs z?GeR49~MXxF2!FkcovqN!Ns)u8dv0TJxq6@y=QCBW+%h|vsq{l)!ee~T?1DUM-TD$ zmq}mfuc!c=B6@5y*|UgrYZ{+n!-$mjNs3eAs;{nxK`Ki}W?g)}PNPk~O(&im4t?JcX` zETpjPe>9-7l($F(E?`S-2iyqnAkjK;F20M zZDWWZBM5`wZS$7G{kw`(H7ImQ@eBA2TkSz}8LoDK-msGl^9LL~R`Y=8x5c)~2Yz<4 zS06y>>KeFp-f2k%kL?ZU_!4dw0{}{G_!viQ9BNL;8xODosm)b7kJM_}ZB6m4TnPd6 zQ)z9!HrSk)Tw$YXOjpz*SH7GRjCUJjj~i272m?e-A^}%tmO5#d+s+~cN0W~QcQOgM zI$r8^+yC_`o-mQ;F}hO-Es zWcRoC|FWv~%h;c-3^{;UP)%YQjs;Yqi~AE&@cN7}JoFw~r#4VApR>>ed$!nB_?COc zM8IY7elS%{*~OdUxv1s|)_VvxkMN9A7?`Rg<>zv8si3!7xmbC8Q_vUHM!^~ngee%4 z9RK`W{w;-RHa-lvpg~pTBdR~y^mc_uD)rBoJY+(Y>9AoH=MxSTitNDHlW?0U?njUQHi7x^5c(NERX>N99Zgg*Qc_4OWa&u{KZXhxWBOp+6Z)#;@bUGkNVRCJ6 zZfRv8cWG;B3L_v^WpZ8b#rNMXCQiPX<{x4c$~d`>v9{(mFE9?iVUkcLphkFNXlbd5i<@D z7ZKeAK?9KFoQ2@0Y(a{2dJYUu& z^W?doa6J#cizJu;7WT;WHc$A7|^ADJKZ1$NtmFTIP= zZoLlIPq+Sd6|ci!xNfC!QgVqkqc4Q|TENZW7xp6C@j8WjDZNJZ+l5dcEY(f8_P1e> zl#=NmFNFSN3437_^y2NxPs?eY-!BCIA=F^qFNV_uo$Hl?`` z&L2tTR1cA#F5r!1zHKsJDdC0CUsq0e8-IIDy&o+=lZe*zWiXq5l&2r{})G3+IoA zZLrzKvmi;31-Kih2?4w$_s`rcyU1TxL*lQnvhbfQ-Ho5d+e$k671#&RFmQ7ah9&{Ct)QS{&*q!t#LnhTfuXuokeLvo97e1VA?bdy3)OTU_422Cr|L%i)9QGdvt^WN1 z(3(U~A%=Z{fA5{D`%CX}{Atwd{^gL_ER~rMz7qUZ^_=S8*4mF=k}zpBV~z$^Eq@{O zHzm`}wn4DYG1%%+xDnLH60CH)o24LS^mgkRt&e)p!f-&>--(6(vN%u2Z$(e>HcXeR z`Wf7Tutv@263Fl=O#KK%w=~%H*I^p^QTX?Qm~aRkoT2=q;9o`d=i)Uu8*2!sD4rtq zw^v30;=%CYu`}v_?v6^{5WKf20J{1YqesE}ppUrci(q%-LH{rQdj9j*S~NTCLeOUc ze~6#Imb`@Cxc*`HfHlJZij;#%x1Iyw0N{`C{~~ew_LuJGVJ$fif&EMW79sAxbQ_?U zQ6L=akKRqZThCeXP#<4Vfk5HKFUqO=N3-0c{_sEz31)thl5BUO|8fz_M_iciD9%^ zqY(7>;A5~3v<{vB#szCI{6Y^B_Pdg4I`Ox46#Mi38Wwq8aBDCegk$_j+{f!aj8T_N zC{crt2Dx*vbd54OL64rD$v`dzD6z=rPt8UD{h z9v%uK9#9o|c+kI3N(M}5#Ke1ulk~wVf1S{OTEkc2EVGEJN8xF0{>Y;tiv@l5IoKw7 zsiuA;8-tuL3dp-C+QhYpJOYs51N@dpc#3L>pCZsD?(!tx(>Si9qTeqB{gbEduVH^} z;;66#>c-)kbjje7KEp0ugfsrR`#s(Iv-JJ5^QDWucQ6{Vir;1@`P>9N^GD{VELbaw z;%U+dvN-fY`1D;3{%vdYP>F~?!rq1dXNf>cTh&|$|4inA70wqXJ1eYqA6`CgTZ0U7 z*#LOb8~EU_yqjGZ%`fQxkKaJ7ew?lRuNf;x`3T^H1Nf&%k3a4P7eB48=MC{ ze}?O>#Ja410+OC+(MNB*3)5iWul=WhjG*0i?{4@dLtHunZk_(s(@vIwwpW8v6mHUz z`3qw%1bij*cOs!L|KD29Y23diY(ZDFqaX_WBv2c+1Ux(7!d~%~e-O&FrUCEbnIB#J zc^A&Uj#Gb|(%&VxrQ<*`z<)HBo@){$=`@&^pyWotL*)J1V}^lg|*{vgGI0n*0Zb~%6*sJAukq$3 zhP@objomf? zD!uzS4P?BQ=`xUsHvGtVL0|kl9^T8Diy{|c;d(2M7=TA`n_yvNC47eb0 zZ!-LHpPiwpF_G>FH5$1?0f_El&c0VU-TLdqSAW3yPOkn~F(av8L{85{kqagle6b?rCkUj%l6uSiHvt?63zqj3WGf_P95!U7#gapBB$+Z8nh-$C87V880uKQZCrDj4u^Lz`^jo z-_7meGOskylV4g<<9pTAWX_T~S6ewpu)YGoBjwcbu(jhY2nW1XVH0os6kb67KDAy4 z^v-zPmwLR1Zu@N<5r=|z7OwLzLSgh*n<%gm4TPvEh0vtl7?#&;iD+nsK=iCB41COP3UdhLlw@FFOauW)%)o{1p#*z15vlT9!S;rS~WBxx!>{e;4e zoe~!sBMFVsz$f{N*j$QTMVD+jFA?mwsI zMe(;BcDYK+j|Y?^Ix+(%sK_AV!(v3Q4XPh%m2$UfwDk0u4xCL<3Q+KQRrNuh-O_sB zZndMU36iE0Oy>ogL@teqGjm3;krQ7CJPjDI-2oF_!wyJ-6)ZvLcD*l4KLyGCd-^B& z+uu%x@}?`>!~0%;H0XAI``gmjWK0Wh4Drcz55%q({Fj5^F9i-1G>e!F#Z6b30T~4c zaCfU6EHZeVd$ft;RF^|0a1|1VZl4Ew^?l?Z?8GAH4>bFfFu~n>#t|gZjKXRIE>u-pd_M{$s&hgX}IdEpB%;0;I`6W6k8(< z7z-YTqXC%rzVo>?l8Ng8e9iewaOT+cl#@^{65@2YP*!k5EB`yNoFJOl3*{_AC=9_E z^x74MU<^;474mepjW=)MaV5KH=^HA+*R-HuAvG#k8G7}*U~G7L_BW4ApK=qT664En za1cMi##n8DC2pIZ#QL zh3S^NX_kJX@H>&P z>u#Qj1*d`03or@T0}nz~_89X>pGl(Zwy)sOsQPvi^2Ei19ufb9Hc)a%z#VOi3;0R}p3Ya!LepaUN zX_ECv4Tu(gnLyUej0Bn-%q18~ICHVqh0m5uMQQ-dr9`+}{~1LF2`4P5e-hrvT7Fx? z|Ay`R&Xo)kxrTc{f%{AXD}XhagimXbIgqK-m|F3r-MPQIc+-3mlx!-dK@bxC2t*)= zKoG${hlv0XQVq@oKnM*%M+c4Z*iN*b#dBWKRyS>|{;+9CLAdIz2YfQBFyiQJ2HI2O zO%TkL0M0C*k8zax;p%a@jamaa50kHr1!|3na+8aRa{rD4A~dF#*@pCdzl9(&F8uB4 zc+*NYi3OiHf(isJT3V)8u{?eMF1b0mi-V)Yd9W2oZy*+2$9+1MEatOxen)SrEQ zBIgQgvd*j8Cvz0zU&$P$z;WYQa8`GnckE4q*>a6p6xGbAB#<3uFoYQvG;WVtllEqculfeo_^0T1>#oR`GyI^6-?$q(TaW->vO ziQHrYCZoIbbgB&dKpL%B)`3ipO@@%MagPPL!E{tS^Q2#hafAG$crR0BdNL3QQQl@7x;s~Uxfb2V|j?}^rgCG z-C3>zqls86m3n7La;UVbO&5i1N?PhFz=GT=F0tpZNaQ%2IUr#r!NJ8#s>HV5Bq9+- zKPS!@7>-JBaIx9we%FU}`gk{NH}T<1;Ly*wlU6TEpVNZtl7#egkj@Vm`59BJUWCTv zbIxMAsVjUKZE|7M5-A@mAt*lS`yYEprP2}lxx7eP^ zr)@Y5zMskM)AVh_^QRscPN_q&tMJ^w zT;eC0Z7Ij`BK_v)Lkb#i>1<-D2J^E1WHef!jY=l2{}{L8A&QRKmRWGLSs=KOB5@td zxNQq64;;97mZR5bbUPgWLdW(1WCiru{}uGv1Dq!sN-z|i-x(;643x8_zkULqjT;}G z+DhuLl;AOw6*%-i1D}ZhGnu_t^9oa%VFu*5D&OWu!@1Ty+40{ezji>*5gqXMhaw6>N}w@K;Zf&Z$9-#5+2Y7T|9M>VVogN zveh?%RKies!3+^p+1%Bly`dLbdT$UWvmk?KQLUXH8ofyIfettbCkQVP zUJ$%+;k`DU@XYQ`8&cFe@}T9%Zqy(wKlsxaEMaMyCiTz>#jKNR?Fk zF-RZbg3@`Ix8mr0G-t&5COY4&__-&WO=TD(Caz&LCG8;i-usJ5uWBsbPIhE?Lgij( z$`!8-rIZ~fbu2hE0<<9@!(7PaE21rPahBd{m&~L| zSw74JK}pMKSyeGk#H`o^>i9>!!MG|O+rB~RL<~O#Y-l49EBKxU>qHs)D5$?#Oo|&j z>;eh{s8b(w@K6-(fR)3ec@AJ!AJtQwGhw5s8}%BlHt2$KBem9&MB{x94pT=0nX)sX zP!r-c@r0R(Se`Hwq@}XhJXOdfsI#kyjRO1sM%%_ghBw(K8AI?+LPyCBfceQiD|pjlh+um zr#`=LusC_BxW;HZ_4&Q0su`f6#nf9kSb)>z6kv}~`GuRiR)gK=F7KjGr?$FQ?oE@E zrZpb-2mQMio+C#HK&WHIAfZsK;Ce36og1JXRiY%(Fjm+=h<_0VK#(Y+V~0VJmAWd# zQPx)gpu!NeC||J}bITCU^QWg^TabqLdt$%u6}32*ox`@)(TiOA&-|EmKuNf;kt7Lb zFT`>(`oxjo6o8J_Hjwe;X=}3ujqvK-)%Dx+x@V|tey^PFmUU{@ezC+l5J3 zFotLS9n$;F0A%Xl$V9o4B*?E{{ij+JI$P#XuC9<2{wX15RH1(m`Uf|&U<1;b#LEow zY_oCDG$$4ykWfJ0r%B#ATqZ?bAE!W~*M&l-S_$SIJ}2k$<7D_SynZJq*($lOjk2Mc zaB0v?0%j6wCbxVpd3#p=rsicikqsu;jg*<-sSr=yEY8iuwpkdjH5#;QZ3)$*(~8%&VV`4#`z3F1S&nJt-kkT>mc?NSQYAm0cU&TuNY+E6wleCN?K?R zSaP6+h9(ZBpmo8Wd`+sx)WxwCfYU*8@%9C7pt6@TMN#!m8 zEvwztYpGxt!%qD53wpr)U6fLZiA&V#*GXA8U>#HKwN%KAVI}}96fm*eOW^=}xKo~e z;Bhl7u8?fv^<0ZUq;8^iIS&_9kRrvzYWD2f0x3tWJw2<)%*x9jby8G8&8$MtD$J}d zw>A#V1a~k$P&sjHIF)(A#GP1dw==8Af}1_zDb6`+qMSIefFo2$RFdhBm9_@R z%y+YZLPK&Vg(G!gSaNLMi`V_nPQUf>uw(HK^dE{=L0fBK?H*9K2Z4kGglRUj6nz{^ zSU!KpIKzl`O-ZIm*&(ONg(Vk;oTHPr_!w5nG1fCpYs_ahpchY-P$g#2mM3G9tyq4b zjOvs+g(F1)^R6bN5?3&94%*}@^13NYbU-lPvH(NB zb4`sis>eftv-GRD_Vz{5X7Z9i)<>Zgxk6D?s;IF;(kgNUD!sPEv*Md13995h??1p4 zg0KkQxHYM`6jwFl+9lY602H6NEE=XfH-yWRWa%(464)f3D*Zx$YJJ3lp%h5zITaq8 z2nF<|5wqFc0oo&FqH<=5su?8bRid@yr`Lq^dK95>2zA{d!@K8j_cOh%W>LJG*E~U; zNAG~Bs#*#Jw-AB2;r#n-BAM8aR#^Zu|@ z)thG0#wsKAoAIh#9z*D)kxkt+vV^m@_1TZ2>bh)iHc8?(Qi%o0CYCAb<*D99Q84Fb zF?jVqMP8YPdT^jDFg2U7)~`w&JH$t6@q~lt^^;^*k!rqY*(q;8Gjq%=&c_Foo<44% zt~kXb>#Y};JkQb;2q3>1gZc#e+jK`-s+Cgd1%q-WJDtffH)|lRj7sktAO%3$%0y0T zZd1dpQ79c+Z_Ks2o`Y({x@2KZoxvQcH1+u4P%TxgD-NKpqG5!E@p7+9)zWBgo5{T}9si|PPB96z+Q!$op_8*P z3u&SmGoKsSq%+6&3;#}B3lVuv_pgcv&i%j8wLqWF#O>X1Oa&agyV##=Svp&xs(~D> zh)IUdR;2P;vl(!gI$KbUXaNiftD8l;CZ#BGu%N+3BaCe!c&HJE`wDU~dOR}+J(VJq z$mh_<^o3P<+LEm}_miq+j|bvb0pxOy#;pS=8Wpb{ z&gN<8Cv5qh8J~QTY4Em(Z8~5A!Gs0l8h@tw8=9Uk*yYziAH{(uS3Mpty=Lv%hd;|gM-+UiM9MP;jwF(EW`DSJY8(5 zR*z1O&W9LPR(iR@c=d*pz!{&L2G4pb1#|VmgfX$}6l(=$;%Z^hAc!zmsP!N8 z=fm+VNAT-TUR|R2{UWd4U0ydk==Ye2)g*!@5j2UoxbO~^5kzCf4)=^M_PX60jao=+HnN3#j%SC&aCP5eN&EaF|+|TgF-bncnY$=&aD0QJt)vy&>iP&@j+KKeop0e*ZZ2L8quGHGM0| znkrMAsqL?2Up@sXs6nt|V5v@HtlPtO7b=H0Gk!-~eJ5Anqsp&zBsv!V08-HxKq`<_ zC{kT2kV@9It?r@TH}K;SgaF05uWeUaAR`vZ8ouzRdgX1QQ$@ndwzAQ9SbTXRa9On?Q<81%ajkE@Yd=^PdsX9PSGpU6Cm#0J`cE z$hN$Z@$uR8XJo1&)da?}Bp8wYp~ACr2nGl`!WmDG&q>z?uB*l)+rT+7Ws}}E?eyT7 z(ozaRCKKiP3 z4s9;QlnxF>es!`ZM2(CC0#Gjn}?1jUHqY zt+zOhXG>VNAMg}FU>ldbfj7!YK6BXlw8{?iGnl|+2T8a{-Z)-W)POw5ng%GgDC7#e z^MoK+KHSNOl59b%!&NbO=)cvG6?KIbbUIck5~ywpi+eI<&@_c`C{2mr%vQ9Fa}3_7dhfBrlDpZ|_!Z%ZcTu`;98+ujuEN&EVw^~aR| zDL|+UjSRj}KYFMcQi~I+*V`Kn`uC30=u{nnbIw@DBNcZ*1V`i=F5ryn9V73jJTplx zl!E!&I-aUB((31he7aLI+aCJ&BU+4c+dWDYsC@yI6#@94M=3hJ1v&e5;|QOqyeMVmC6M*UoU zDF}xRqCJdRt?Aoy#GWR|Y}X#w^x3A$BtDi-m#3fGZ}%Jt*{qZ*AXpin+xX61yQnij z;7lH?q>r0z-fCWI5RjfKKq_a;I4n3rj(9Q(PbfTZuuEo-+hB3{64R*@7?<88)xcy< zaZ53iXP|QieYVT+0xMQyYkEmKchF~>46idP)P`lCnkrhV(}cFJmgkyQ^g?}O`b%iL zs!2WV1Z;?}kIOLLAb7l-?uR8tBv~<{m&Rg|O8G-tp=e!?X`?XAQII%2q>fQA6+%gU zIww`dpZ|52q$OVB1|jw~b;Uu+#ksEGA5{q#*ju^;?mc6MRTg~Guo`YQ;&qK8E=SL* zDTJCr$JTLe)aQ)pe*EXVFje-@L^%s{Ci@RHPb61&^@&>g@HYEVk!#x?UeNY?+F%4x9VwhS+5_BoR?vLFG^0abX(rHD)+Xv~IK z@4jls%(rD6>7VD}ma@-Y8XBsaT;0jl@AKD(g3D{K5RF}CdV$lc&O0bzvOPjQ9Kn+d z{cyyLY-DO`F0Ev$U1LTEPfb72)aqyZaJ-vO!(WdN(39Rn|8DrIPQ7~~82MzwFP(m% zCT@{yB~_IOtLC_>|8%B$=+aM?u<`DO?N;6tu%~E;q~@zHCSy!>8zsY3NrZf5k5P@H zYPG5!r=jK)x=172^gd3;b?0ZUL?c`6fX4cW*kU2?lnOa6&(kR}a2RJ0f2C?hy$FMV zuINn-9|C{}es1cFm~mut(?dd?3yR9-a+O8d93u{DZ-a`u$q*ja7c)buID3OgK5nj^ zjU0Scd+|f(+|x4W1T5|R>BL~!J=(e!n@_87s0l)3j`pR-yUwJiTHL1ED z3eCVw*(+i3>zcaonF;tp;fu`ODj$Hp+e}5HJ@tjKG1hFi#RN>Cacdcw{#nXLb7*a& zgC@>bH7ZW-Ryo>J;s`r+8R$%XBh$`H`H{{&ts7KTFh5E`(AofH6y;lHB;EslRmey> z{H&9@L;I^TWK_F$+q|N+oh`%XLcB?NR+_pFN-|d>RGm+!+L5mzh(G20J38SDkGftk zkGH;x%`HbSa8Q#l22Y6TRctOAMWQrxTfNZnsy>>NHNm=exe)yZXPgDs+1j`w)JJp9 zCRqJ07qXv?Wua&j6VB3W)xDU8u`C=Dqr+KxEs*BEhN(P#cYgW){N4Vzavl!bmI;uf z(N{tvVDd+TF9P3NS<+9_%XV-CQyt6e_*0EUM0~ zO}y5eZjGDx+Nhnl#N<;wCxfW|y|z38gLs`-hHO_4sY3dJr6Be=qdTz(*}Bf?HcDLO zN(5gQkJ!mM_5s?5g=OYbYK zm}8sDA^_(QaxvsZx+#^#%58lxT8msIRfEXCVyZ@epvuo&MXN2l_v4}vcGDS8My*Nr z_t#nX|Ar-7C=HJBZ(QY;Pcwzk;H^p#u<#gt9-Qtw4-x0 zAqc)k2X2jqgKpD}c9pN8K(W#kga`+A7PHzSfPe2c8|>NqI5eO7mfa8gqwZs;-+HLow@y~+c)43FqTpt?sN3jHRPZ=& zj;bz0nom@yV}2&PCZCSrb#y%mbt2l-^>rL3wT>2!urUDY@Uv;_1V91)O&1ec*9PNH zt!C#{MWtg(Ec_le5x6MG7{_{cj_S;Rc4^76?5>yShzIp`2V6PC{vorv>|w*nnO(NP#wvJ$B5A+a;K0D9<-h=4f`trH1VWP^s(=>Qy8!7`N z^jBV6m0aeVs;^{He-=B;!0Yo`N~Mmpo0r~ajbXW}LTpR5 z28*!3=HD5wpRM}dRNu7px&l7ado1xj8n;BTWo)zkch!+f^gWZf+m#2zO?#84 z3$_T>N9knK>J%t$sVZ1Z24Y~;%dR?F20s%S3O5i(Ux?Sx;C0GQ#`o9Dt3s!-M)tLa z^T4gRGvWO-G@0=g3U!5<%vYQ2OQ)n>sMXbC4!6y?xi(IUGV%kxoZT8TZcc1#GW`jWm&Pi8vDyi0r=N;sv~bk5ZCI$UxVZSI+)l6r`9HFy*S{c#w4Zf zOY2k_P2qL=y|-)&f9jH=JuB~ z^tPtm24Zx$yMus>oA4ZK+bg-*&)l zfl)Q*@?0*9$%Q4?4MU&8^g?ywEa@G(-v(%5uzi3Fq?(S}2~_`4t!v(n8`rzm>gDa2 zjTW{E(>`VtIvtkO@~!Qpq%sLt@pEu7i#IRSp*R9F%=NBU*fs|wGQ*^!t|iL zLYxbp-SM;8JVSHY)C4{oB}D}q7|8h&*B%22xNO*$lvh+{Dp33g_!S4t>Ym2B2|W1bn;MV#zO5u0HkGMyda~1 z3KP$?71VtydzXPFW<#e-?DgASIjLi0<;?+2t$YwqrgDa+dfXIj`fI{ZOwD}Z0w~*| z*9*7UcRLO8$&MvuW@xOSvEK1bx@2`z=F%&uOxvz(oa{{s@qlFxp-ed@OoG93d6fHE z^H?)A^)mTynCgN*(oXh%{wn=MlAm6#!tb~#FD_Fyh&d#fXiU>MzkwPEvU4udfbS)I1g?7Ehx?O)%&yMX^)*ZJ6$ z0Z`GLT>Sy5;4L8?nn094-2qcEVj=4I$tIYg35{i447|bUg2JY%X%`uyzOP|ec z&Ba2qWXq+qzNwk||HGw_89Na)4?IDb$BDO3Fr(DYBh);?&Lh-3MVBNM$0hSJ-!i2b zz>7t-QDwz(r*sLte2(e0`<{wU9@z_o_k@W9@cvQdRD%TfWmkj5pS&+ipMt$GHF>*R zqaHz=v^Bt#IoOpO)Tf(XRWmfaG#rWR71NnvB&r(3f7M7qBjNgV-u$6CC(_14{kYb) zPKcGxjjFQcVkb(|m_eH*sE+vo=AR0yFZQW4?H9CFg0Q*r5p@u6;^-XSjEA4@OeNh9 zs+L`+EDd{q!*QvL!Tf*-Ib7GRhkog4P!?%hC6CBcj>(bvnww3G5Y?!_+BGiakXFet zLQO9&-+G;x;~~_;mc?F|RZoi629t9D5C=i%`o{?5weDJ-{(S?P$@r_;|L+ugx{|t^ z(ADL;=Bw0bPVD%13PfEA-DwB_QR7fec|*#~%!%?1W0U(O?7O01P>H5NWia z(Sk<{94&Zm$8)2^F6`s_U0IJg?Uy~yzU>~RrKytVUL%NMKUW2;N?ft4*|(`r8DMXG zKm2l7w4PJkSbC#jtMfy8ryx+KcS<96@w;o;2We=A5jS|=8~OPMbCKU&3n|P@C@+|q zVE3<$D~?(Lpz^>+ zA@lkbiT~Bk^uA5;@2-tgg#ml)i7u4q&StYwlXg%H_dTq?Hau9s5XB|F5Z%& z6P*5krPm<@te636hALp#F2u7sR%_9GeA0ZAWV=x6?O>H*?dtIE>NDM$kF*!*MX=V&pxYg zlMEEToT)Ygn%kq%v*ePb-C1ZTu~bv%D4#+2_Y(H(iA$3@2G$@E7O54nK`+$_VVCR*h{a|xs|7Py4RJ=pW3C>U62O2nx(>%8@?0>q@2)i%Y*%26V}f` zKf}FUBlbl63{~5RZtE5D{GjNREAR0jy$4cNv1#i*H@l7jg2$qhB2wV2#i+8Z>Ir}8 zj0YIbkz~{sjmlx=O>>;6gMv0{TOgdHkkW5`0{st^uN|h`Mv+_w(dMw4PGyj&1m-}g zb{PufYAA{~FjT171r8K6yAz3lF@=5FFt}B1nR2(Wp%hW5^c$7*&1BIQ|RxYPu;&)zO`g4*B zvFs!P*t#| zV+)0?N>i~pEZ@&dx_BUXo#0>hC>Z zA`qf#rpf@(x}f?7E)X3=`ig}MgU0~pFH#NMVEr6!<2AV%DwLjrI!|S99YROnQH5D_ z7ah1{-Ls~UAh9RO-}cUd3wA)7BtAZTuFHD-y`v>>Vw9{yKl&VFS(q?=X?jE)av2r* zkH60-&?T4BAQ|%Eb6qCn?_KS>IDu0#VTz|O+yy$lDb!PYmLJEXwvDZ8=$LhjDKN4? zWRCazsT^7zDmt{Zi6!{myUV7p>nB}{PX(DaF#rR6CNDw-S{|4Z!BigB8&X9(_p@yr ztSWLA19xV|4|-hejzeDv-?6``@SA+_{2`GyPm$aB3=iFr|4p_M?9Nk+nmV#Rfzu!j zk7AfX&(DARHns1Ao4cTY`SFL*Isy`u z%F2OJq4eNgC}E+DNvKi%daO7F99qBI^-c$nrhf7@F$JIxZ4#?Nxpd!72a*z~sr{^b z$2u_!p>8j7XpyZf28#+B!`9N9hU*d`(JZGM^)pE#I@naxc+aZnn0zYOWSiD0Gobnh zRW_Gf)_0r~oz*Ux;nSZL^oV_yg&q?b0iQ?fdZ!y_hXP7I+AiZFEwgqyhQfq+YPe0HWn45qC_p?*jVOd*X zZk*N>X8b&yR8O*vWD42Fiv=c6Ur_xYNqXb~C2|j#0Zp5B4yRNuV+bd$lq@QllDTD9 z>D^**AU0X0<}R3GD*qSudHdf*(WXfRQ3>41?{5(BOjUwFXyg*}HFb`lH%1%ce~Mx-woqZd;0rZR!{CZJ6ycGKGa|EKS*gnT$>eXEJR7XK91F3EIi>#k??l zTzN+h@uT4dU_3ehMidq? z*QZ6EI=wfr%@}BtD;4Qin}VdcV-5}8WHlao2(m3-#HrJ>1Dlq)vvbY}mx-dnClOXL zkrQJD!iLW%0S>qE;~?dlP*qA<*ZfS374j#sw)H_ zEPx;YVXs~AwR>#!%o{vm^72g9I>RShIP=C$teT*6SlPTDyuO+d)|#5sEf*U|@SE{& zgCVZJF}^{$@n>KC`kHneYk6{YrKsUk3BAEK9z>ykvnFqns17fvVGP?OLuJZqpz?Ix z`P>>^yvP5uHA;63swrP|ge}2L1%l3jlcAF4TqWDPbYr5LQb|V4(+ObK_YsoHsdV_q z;QQu$DL*tkM+>0~`=&8Q1vL?Og=J|{r=3}sNl2=0=F-FmQbFhxdBlu}qcjkQ+$bOw zQF)CvPur=X=r%1$GCQ@?+b5sw>120E59&ivqmCO3ud4FxI9HN$dEXkeZ-1%iKvv%YI|C#b$o3-Zui+3T zzj7q?c9d*mOfUfIK#kdf(~+Br>dKQ~UPbGZ*BX4@XAf$vo=J%vd}h0kNnrjn=A!~f zK#TeY-gk<{*HK4Gu}B+Qk0=ZIutb`0A^wahtgz(_F4kSwxFU<|VbKX4JzIG;J1GWO zQ$k0mX0~7+B zsw{V1N149KgefEf&rMtgr~X%8j$4!A&6)hz9e(NGcN&JM6crN0!*;nEk=*Z$Nzo~= zmT)@3(+NGD_oX%2+q$|gjB>11e3U9oB+wb2%HdY7K)lo9Fo#4@b0&^CH62cyX-efY z;e8%5mqroU>lK6{6b5im4#lE6H5?AB5xntuaxw0=k9=l84Tm|>!>PoPkfe~#4^1hf z4a~~B0r1)}iFqu*Z7bZgwWS^yK^O#Yll2tt=PFV)A<&7vN5~nr+`pDGT)?Py3~zpYv9q@pe2<&wl|^UOSqo*04TBPW1O*Zs5>F+Jir!| zwy@_cQoCihG2N3gHRLf)rLp_kU~^?`g^i{$-BO7{`7%K;-hGTSu1$X#Ob|8c{I1O` zacUGdT}23vE*}f-{dek5j|+;^Q9XNR`=53EOX-du=p09Zq9M3Om=XX z=3SP2(oWRYdX8g7Jxl3f%gUN#%YW--pFiACSS{BVVSL7oOGRrm0u1-W$R9W zIrV}`RM@p;*sx?*FHRB=)??X*e~(GR2FWtmZsM}wn9`%fl8OweTQh)o!v=$Uo&zoc zmI?IP>k-Wq4zT^Nc3=m#OaXv=kX^&d946QU z0W*pJPmwox$YlhTM!r!|EIjvONo`i?azIsSWIXE5BmyT9b`pv
    +
    diff --git a/previews/PR1129/search_index.js b/previews/PR1129/search_index.js index 9624b97df..ef19e0f8a 100644 --- a/previews/PR1129/search_index.js +++ b/previews/PR1129/search_index.js @@ -1,3 +1,3 @@ var documenterSearchIndex = {"docs": -[{"location":"ForecastModel/SingleColumnModel/Forcing/#musc-forcing","page":"Forcing","title":"MUSC Forcing","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"From Eric Bazile: The fields SXXXFORC0001 –> SXXXFORC00NN in the initial file for MUSC are the atmospheric forcing without any rules for variables or advection etc ...","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"XXX = vertical levels\nNN = number of forcing fields","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"So if you put for NN=1 the temperature and QV in 2, and the geostrophic wind ug (3) and Vg (4) and you want to force MUSC for 48h with a nudging for T and Q and a geostrophic wind you should add in the MUSC namelist ","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"NAMCT0\n LSFORC=T\n LSFROCS= FALSE ; default ONLY for surface forcing without SURFEX\n/ \n&NAMGFL\n NGFL_FORC=4 ; number of atmospheric forcing fields\n/\n&NAMLSFORC\n LGEOST_UV_FRC=.TRUE., ; geostrophic wind forcing\n LMUSCLFA=.TRUE.,\n NGEOST_U_DEB=3, ; Ug is in position 3 in GFL_FORC\n NGEOST_U_NUM=1, ; ONLY 1 Ug available \n NGEOST_V_DEB=4, ; Vg is in position 4 in GFL_FORC\n NGEOST_V_NUM=1, ; ONLY one Vg available\n LT_NUDG=.TRUE., : Nudging for T\n LQV_NUDG=.TRUE., ; Nudging for Qv\n RELAX_TAUT=43200. ; Relaxation time for Nudging for T\n RELAX_TAUQ=43200. ; same for Q\n NT_NUDG_NUM=1 ; Number of nudging profile for T \n NT_NUDG_DEB=1 ; Profile 1 used for the nudging of T\n NQV_NUDG_NUM=1 ; Number of nudging profile for Q\n NQV_NUDG_DEB=2 ; Profile 2 used for nudging Qv\n/","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"Here you can run MUSC for 1 day or 100 years with the same nudging profile and geostrophic wind !","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"So it is fully flexible BUT the user should know how the initial profile was created and which fields are in FORC00NN etc ....","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"After you can have several nudging profile (for several time) instead of one profile used for all the simulation. You just need to put the number of profile For ex you have 5 profiles for T for the nudging at 0, 6, 12 18 24. and if you put the T profile 0 in 1, etc ... the modified namelist","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"&NAMGFL\n NGFL_FORC=8 ; number of atmospheric forcing fields\n/\n&NAMLSFORC\n LGEOST_UV_FRC=.TRUE., ; geostrophic wind forcing\n LMUSCLFA=.TRUE.,\n NGEOST_U_DEB=7, ; Ug is in position 3 in GFL_FORC\n NGEOST_U_NUM=1, ; ONLY 1 Ug available \n NGEOST_V_DEB=8, ; Vg is in position 4 in GFL_FORC\n NGEOST_V_NUM=1, ; ONLY one Vg available\n LT_NUDG=.TRUE., : Nudging for T\n LQV_NUDG=.TRUE., ; Nudging for Qv\n RELAX_TAUT=43200. ; Relaxation time for Nudging for T\n RELAX_TAUQ=43200. ; same for Q\n NT_NUDG_NUM=5 ; Number of nudging profile for T \n NT_NUDG_DEB=1 ; Profile 1 used for the nudging of T\n NQV_NUDG_NUM=1 ; Number of nudging profile for Q\n NQV_NUDG_DEB=6 ; Profile 2 used for nudging Qv\n NL_T_NUDG_TIME(1) = 0\n NL_T_NUDG_TIME(2) = 21600\nNL_T_NUDG_TIME(3) = 43200\nNL_T_NUDG_TIME(4) = 64800\nNL_T_NUDG_TIME(5) = 86400\n/","category":"page"},{"location":"ForecastModel/SingleColumnModel/Forcing/","page":"Forcing","title":"Forcing","text":"and now you can not run MUSC more than 1 day ... if the time between the forcing profile is the same you can use *_FREQ instead of TIME ...","category":"page"},{"location":"Overview/Content/#Harmonie-Content","page":"Content","title":"Harmonie Content","text":"","category":"section"},{"location":"Overview/Content/#Overview","page":"Content","title":"Overview","text":"","category":"section"},{"location":"Overview/Content/","page":"Content","title":"Content","text":"Harmonie is HIRLAM's adaptation of the LAM version of the IFS/ARPEGE project. The common code shared with the ALADIN program, Meteo France and ECMWF only contains the source code. Harmonie adds the build environment, scripts, support for a scheduler, and a number of diagnostics tools for file conversion and postprocessing. In summary a git clone of harmonie from github contains the following main directories","category":"page"},{"location":"Overview/Content/","page":"Content","title":"Content","text":"config-sh : Configuration and job submission files for different platforms.\nconst : A selected number of constant files for bias correction, assimilation and different internal schemes. A large number of data for climate generation and the RTTOV software is kept outside of the repository. See [wiki:HarmonieSystemDocumentation#Downloaddata].\necf : Directory for the main configuration file config_exp.h and the containers for the scheduler ECFLOW.\nsuites Scripts and suit definition files for ECFLOW, the scheduler for HARMONIE. \nnam : Namelists for different configurations.\nscr : Scripts to run the different tasks.\nsrc : The IFS/ARPEGE source code.\nutil : A number of utilities and support libraries.","category":"page"},{"location":"Overview/Content/#util","page":"Content","title":"util","text":"","category":"section"},{"location":"Overview/Content/","page":"Content","title":"Content","text":"The util directory contains the following main directories","category":"page"},{"location":"Overview/Content/","page":"Content","title":"Content","text":"auxlibs : Contains gribex, bufr, rgb and some dummy routines\nbinutils : https://www.gnu.org/software/binutils/\nchecknorms : Script for code norm checking\ngl_grib_api : Boundary file generator and file converter\nmakeup : HIRLAM style compilation tool\nmusc : MUSC scripts\nobsmon : Code to produce obsmon sqlite files\noffline : SURFEX offline code\noulan : Converts conventional BUFR data to OBSOUL format read by bator.\nRadarDAbyFA : Field alignment code","category":"page"},{"location":"Observations/Iasi/#IASI-radiances-(pre-)-processing","page":"IASI","title":"IASI radiances (pre-) processing","text":"","category":"section"},{"location":"Observations/Iasi/#Introduction","page":"IASI","title":"Introduction","text":"","category":"section"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"Typical IASI radiance data reception consists of a subset of 366 channels out of the full set of 8461. These cover the infrared absorption spectrum from 3.8 to 15.4 micrometers. In the context of NWP, the most useful IASI channels include (i) the temperature-sounding channels in the approximate channel index range 100-450, (ii) the humidity-sounding channels at 2800-3500 and 5000-5500 indices, and (iii) surface-sensing window channels at 500-1000. Most of the NWP impact from IASI is thought to come from group (i) and especially from the upper-tropospheric and lower-stratospheric channels in the range 200-300.","category":"page"},{"location":"Observations/Iasi/#Including-IASI-radiances-in-a-HARMONIE-run","page":"IASI","title":"Including IASI radiances in a HARMONIE run","text":"","category":"section"},{"location":"Observations/Iasi/#scr/include.ass","page":"IASI","title":"scr/include.ass","text":"","category":"section"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"scr/include.ass should be edited to \"switch on\" the use of AMSUA (AMSU-A), AMSUB (AMSU-B/MHS):","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"export IASI_OBS=1 # IASI\nexport ATOVS_SOURCE=mars # local: EUMETCast;\nexport IASI_SOURCE=ears # mars:MARS | else: file in $OBDIR\nexport IASI_RT_COEF=lblrtm # genln2|kcarta|lblrtm\n[[ $IASI_OBS -eq 1 ]] && types_BASE=\"$types_BASE iasi\"","category":"page"},{"location":"Observations/Iasi/#Loading-the-IASI-radiances","page":"IASI","title":"Loading the IASI radiances","text":"","category":"section"},{"location":"Observations/Iasi/#Data-extracted-from-MARS","page":"IASI","title":"Data extracted from MARS","text":"","category":"section"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":" elif [ \"$base\" = iasi ] ; then\n # IASI\n if [ \"$IASI_OBS\" -eq 1 ]; then\n echo \"iasi iasi BUFR iasi \">>batormap\n ln -sf \"${HM_LIB}\"/const/bator_param/param_bator.cfg.iasi param.cfg\n if [ \"$IASI_SOURCE\" = mars ] ; then\n ln -sf \"$WRK\"/splitObs/iasi ./BUFR.iasi\n else\n ln -sf $OBSDIR/iasi$DTG ./BUFR.iasi\n fi\n fi","category":"page"},{"location":"Observations/Iasi/#Locally-received-data","page":"IASI","title":"Locally received data","text":"","category":"section"},{"location":"Observations/Iasi/#Controlling-the-detection-of-cloud","page":"IASI","title":"Controlling the detection of cloud","text":"","category":"section"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"IASI radiances are strongly influenced by cloud. Because of inaccurate forward modelling, large background errors in cloud fields, and non-linear effects, success in the use of IASI requires careful screening and removal of cloud-affected data. In the HARMONIE data assimilation system, the screening for cloud follows the method of McNally and Watts (2003). The power of this method lies in the use of a large number of individual channels such that much of the disturbing instrument noise can be smoothed out and the cloud radiative effect is therefore more easily detected.","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"Even if the active use of IASI is limited to relatively small number of channels (such as the 55-channel subset in MetCoOp in early 2021), it is advisable to include more than 100 channels in the cloud detection channel list. Furthermore, it is important that all these channels are subjected to VarBC. To achieve the latter, one needs to make sure that blacklisting for the cloud detection channels uses the fail(EXPERIMENTAL) syntax rather than fail(CONSTANT). The following excerpt from src/blacklists/hirlam_blacklist.b.data_selection_after_20140601 illustrates the concept:","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":" if (SENSOR = iasi) then\n\n ! remove channels that are not used in either cloud detection\n ! or minimization\n if PRESS notin (38, 49, 51, 55, 57, 61, 63, 83, 85, 87,\n 104, 109, 111, 116, 122, 128, 135, 141, 146, 148,\n 154, 159, 161, 167, 173, 179, 180, 185, 187, 193,\n 199, 205, 207, 210, 212, 214, 217, 219, 222, 224,\n 226, 230, 232, 236, 239, 242, 243, 246, 249, 252,\n 254, 256, 258, 260, 262, 265, 267, 269, 275, 278,\n 280, 282, 284, 286, 288, 290, 292, 294, 296, 299,\n 306, 308, 310, 312, 314, 316, 318, 320, 323, 325,\n 327, 329, 331, 333, 335, 341, 345, 347, 350, 352,\n 354, 356, 358, 360, 362, 364, 366, 369, 371, 373,\n 375, 377, 379, 381, 383, 386, 389, 398, 401, 404,\n 407, 410, 414, 416, 426, 428, 432, 434, 439, 445,\n 457, 515, 546, 552, 559, 566, 571, 573, 646, 662,\n 668, 756, 867, 921, 1027, 1133, 1191, 1194, 1271, 1805,\n 1884, 1946, 1991, 2094, 2239, 2701, 2819, 2910, 2919, 2991,\n 2993, 3002, 3008, 3014, 3098, 3207, 3228, 3281, 3309, 3322,\n 3438, 3442, 3484, 3491, 3499, 3506, 3575, 3582, 3658, 4032)\n then fail(CONSTANT); endif;\n\n if PRESS notin (38, 51, 63, 85, 104, 109, 167, 173, 180, 185,\n 193, 199, 205, 207, 212, 224, 230, 236, 239, 242,\n 243, 249, 296, 333, 337, 345, 352, 386, 389, 432,\n 2701, 2819, 2910, 2919, 2991, 2993, 3002, 3008, 3014, 3098,\n 3207, 3228, 3281, 3309, 3322, 3438, 3442, 3484, 3491, 3499,\n 3506, 3575, 3582, 3658, 4032)\n then fail(EXPERIMENTAL);\n endif;\n\n endif; ! SENSOR = IASI","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"Here we provide two lists of IASI channels. The first list includes all those channels that are either used in the cloud detection, or are intended for active assimilation (or both). The second list is a subset of the first and includes just those intended for active assimilation. Only those channels included in the latter list will have a significant weight during the assimilation process.","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"The control for the cloud detection happens via namelist file at nam/IASI_CLDDET.NL. The format of the namelist file is illustrated below:","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"&NAMCLDDET\nN__Band_Size(1)=145\nN__Bands(1:145,1) =\n 38, 49, 51, 55, 57, 61, 63, 83, 85, 87,\n 104, 109, 111, 116, 122, 128, 135, 141, 146, 148,\n 154, 159, 161, 167, 173, 179, 180, 185, 187, 193,\n 199, 205, 207, 210, 212, 214, 217, 219, 222, 224,\n 226, 230, 232, 236, 239, 242, 243, 246, 249, 252,\n 254, 256, 258, 260, 262, 265, 267, 269, 275, 278,\n 280, 282, 284, 286, 288, 290, 292, 294, 296, 299,\n 306, 308, 310, 312, 314, 316, 318, 320, 323, 325,\n 327, 329, 331, 333, 335, 341, 345, 347, 350, 352,\n 354, 356, 358, 360, 362, 364, 366, 369, 371, 373,\n 375, 377, 379, 381, 383, 386, 389, 398, 401, 404,\n 407, 410, 414, 416, 426, 428, 432, 434, 439, 445,\n 457, 515, 546, 552, 559, 566, 571, 573, 646, 662,\n 668, 756, 867, 921, 1027, 1133, 1191, 1194, 1271, 1805,\n1884, 1946, 1991, 2094, 2239\n/","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"Here we specify a list of 145 channels to be included in \"band 1\" of the cloud detection, i.e., in the main cloud detection channel band. The setup of the cloud detection involves not just the channel list but several additional tuning parameters that can be modified to make the screening more or less conservative. The default settings are specified in src/arpifs/obs_preproc/cloud_detect_setup.F90. A comprehensive description of the cloud detection scheme, including explanations of the various tuning parameter values, is given at the NWPSAF web site https://nwp-saf.eumetsat.int/site/software/aerosol-and-cloud-detection/documentation/.","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"Log file of the Screening task will indicate whether the formatting of the namelist file is appropriate:","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":" READING CLOUD DETECTION FILE FOR IASI\n IASI CLOUD DETECTION FILE READ OK","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"In case of an error, the following is printed instead:","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":" READING CLOUD DETECTION FILE FOR IASI\n PROBLEM READING IASI CLOUD DETECTION FILE: Using Default Values","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"The third possibility is that the namelist file does not appear in the working directory, in which case the printout statement is this:","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":" READING CLOUD DETECTION FILE FOR IASI\n NO IASI CLOUD DETECTION FILE : Using Default Values","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"Please note that the use of the \"Default Values\" is generally not a desired outcome. This is because many of the cloud detection channels in the default list (see src/arpifs/obs_preproc/cloud_detect_setup.F90) are sensitive to higher stratosphere and therefore may be severely affected by the relatively low model top of limited-area HARMONIE systems.","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"References:","category":"page"},{"location":"Observations/Iasi/","page":"IASI","title":"IASI","text":"McNally, AP, and PD Watts, 2003: A cloud detection algorithm for high-spectral-resolution infrared sounders. Quarterly Journal of the Royal Meteorological Society, 129, 3411-3423, doi:10.1256/qj.02.208.","category":"page"},{"location":"Verification/Verification/#Monitor","page":"Verification","title":"Monitor","text":"","category":"section"},{"location":"Verification/Verification/","page":"Verification","title":"Verification","text":"monitor documentation","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#configure-your-experiment","page":"Experiment","title":"Experiment configuration","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Introduction","page":"Experiment","title":"Introduction","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"There are several levels on configuration available in HARMONIE. The highest level of configuration is done in ecf/config_exp.h. It includes the environment variables, which are used to control the experimentation. In the following we describe the meaning of the different variables and are described in the order they appear in ecf/config_exp.h.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Host specific paths and environment variables for your system are defined in Env_system. ","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Build-options","page":"Experiment","title":"Build options","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Build and bin paths ****\n# Definitions about Build, should fit with hm_rev\nBUILD=${BUILD-yes} # Turn on or off the compilation and binary build (yes|no)\nBUILD_WITH=${BUILD_WITH-makeup} # Which build system to use (makeup|cmake)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"BUILD is a switch for compiling HARMONIE code (yes|no) and BUILD_WITH controls which build system to use when compiling HARMONIE-AROME.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"BINDIR=${BINDIR-$HM_DATA/bin} # Binary directory","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"BINDIR is the location of where your HARMONIE binaries will be installed. You can use this to point to binaries outside of your experiment. A few other options for non default configurations exists as well:","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"COMPILE_ENKF=${COMPILE_ENKF-\"no\"} # Compile LETKF code (yes|no)\nCOMPILE_DABYFA=${COMPILE_DABYFA-\"no\"} # Compile FA/VC code (yes|no)\nSURFEX_OFFLINE_BINARIES=\"no\" # Switch to compile and use offline SURFEX binaries","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#General-settings","page":"Experiment","title":"General settings","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Misc, defined first because it's used later ****\n\nCNMEXP=HARM # Four character experiment identifier\nWRK=$HM_DATA/$CYCLEDIR # Work directory","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"CNMEXP: experiment identifier used by MASTERODB\nWRK is the work directory. The suggested path on ECMWF.atos is $SCRATCH/hm_home/${EXP}/$CYCLEDIR","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Archive-settings-(ECMWF)","page":"Experiment","title":"Archive settings (ECMWF)","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Since $SCRATCH is cleaned regularly on ECMWF some files are transferred to ECFS for a more permanent storage by the scripts scr/Archive_host1 and scr/Archive_logs. ","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Paths to archive ****\n# We need to define ARCHIVE early since it might be used further down\n\nARCHIVE_ROOT=$HM_DATA/archive # Archive root directory\nECFSLOC=ectmp # Archiving site at ECMWF-ECFS: \"ec\" or ECFS-TMP \"ectmp\"\nECFSGROUP=accord # Group in which to chgrp the ECMWF archive, \"default\" or \"accord\"\nEXTRARCH=$ARCHIVE_ROOT/extract # Archive for fld/obs-extractions","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"ARCHIVE_ROOT is the path to forecast file archive. Note that at ECMWF this directory is not a permanent storage\nEXTRARCH is the path to field extraction archive. Note that at ECMWF this directory is not a permanent storage\nECFSLOC Archiving site at ECMWF-ECFS (ectmp|ec) Note that files archived on ectmp will be lost after 90 days. If you wish your files to stay longer you should set ECFSLOC=ec. \nECFSGROUP Group in which to chgrp the ECMWF archive, (accord|default)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Running-Mode","page":"Experiment","title":"Running Mode","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Running mode ****\nRUNNING_MODE=research # Research or operational mode (research|operational)\n # operational implies that the suite will continue even if e.g.\n # observations are missing or assimilation fails\n\nSIMULATION_TYPE=nwp # Type of simulation (nwp|climate)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"RUNNING_MODE can be research or operational. Operational is more forgiving in the error handling and e.g. the assimilation will be skipped if Bator doesn't find any observations. Exceptions handled by the operational mode are written to $HM_DATA/severe_warnings.txt\nSIMULATION_TYPE Switch between nwp and climate type of simulation. The climate simulations are still in an experimental stage. ","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Model-domain-settings","page":"Experiment","title":"Model domain settings","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Horizontal domain settings. Further information is available here","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"\n# **** Model geometry ****\nDOMAIN=DKCOEXP # See definitions in scr/Harmonie_domains.pm\nTOPO_SOURCE=gmted2010 # Input source for orography. Available are (gmted2010|gtopo30)\nGRID_TYPE=LINEAR # Type of grid (LINEAR|QUADRATIC|CUBIC)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"DOMAIN defines your domain according to the settings in scr/Harmonie_domains.pm (DKCOEXP). The spectral truncation for your domain is determined from NLON and NLAT by scr/Harmonie_domains.pm. Further information on model domains are available here\nTOPO_SOURCE: Defines input source for model orography (gmted2010|gtopo30). Further information available here: hi-res topography\nGRID_TYPE: This variable is used to define the spectral truncation used (LINEAR|QUADRATIC|CUBIC). GRID_TYPE is used in scr/Climate and scr/Forecast","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Vertical-levels","page":"Experiment","title":"Vertical levels","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Set the number vertical levels to use. Further information is available here","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"VLEV=65 # Vertical level definition name\n # HIRLAM_60, MF_60,HIRLAM_40, or\n # BOUNDARIES = same number of levs as on boundary file.\n # See the other choices from scr/Vertical_levels.pl","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"VLEV is the name of the vertical levels defined in scr/Vertical_levels.pl (65). Further information is available here. If you intend to run upper air assimilation you must select the same domain and level definition for which you have derived structure functions. Read more Structure Functions","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Forecast-model","page":"Experiment","title":"Forecast model","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Higher level forecast model settings.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** High level forecast options ****\nNAMELIST_BASE=\"harmonie\" # Input for namelist generation (harmonie|alaro1)\n # harmonie : The default HARMONIE namelist base nam/harmonie_namelists.pm\n # alaro1 : For ALARO-1 baseline with only a few configurations available\n # nam/alaro1_namelists.pm\nDYNAMICS=\"nh\" # Hydrostatic or non-hydrostatic dynamics (h|nh)\nVERT_DISC=vfd # Discretization in the vertical (vfd,vfe)\n # Note that vfe does not yet work in non-hydrostatic mode\nPHYSICS=\"arome\" # Main model physics flag (arome|alaro)\nSURFACE=\"surfex\" # Surface flag (old_surface|surfex)\nDFI=\"none\" # Digital filter initialization (idfi|fdfi|none)\n # idfi : Incremental dfi\n # fdfi : Full dfi\n # none : No initialization (AROME case)\nLSPBDC=no # Spectral boundary contions option off(no) | on(yes)\nLGRADSP=yes # Apply Wedi/Hortal vorticity dealiasing\nLUNBC=yes # Apply upper nested boundary condition","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"NAMELIST_BASE: Two different namelist sets are available (harmonie|alaro).\nDYNAMICS: Hydrostatic or non-hydrostatic dynamics (h|nh)\nVERT_DISC: Vertical discretization (vfd,vfe)\nPHYSICS: HARMONIE uses either AROME or ALARO for its forecast model physics (arome|alaro)\nSURFACE: Surface physics flag to use either the SURFEX or the ALADIN surface scheme(surfex|old_surface)\nDFI: Digital filter initialization switch (idfi|fdfi|none). idfi - incremental dfi, fdfi - full dfi, none - no initialization. See Digital filter for more information\nLSPBDC: Specify whether the boundary conditions are spectral or not (yes|no)\nLGRADSP: Switch to apply vorticity dealiasing (yes|no)\nLUNBC: Switch to apply upper boundary conditions (yes|no)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Physics","page":"Experiment","title":"Physics","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Physics options.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# Highlighted physics switches\nCISBA=\"3-L\" # Type of ISBA scheme in SURFEX. Options: \"3-L\" and \"2-L\".\nCROUGH=\"NONE\" # SSO scheme used in SURFEX \"NONE\"|\"'Z01D'\"|\"'BE04'\"\nSURFEX_SEA_ICE=\"none\" # Treatment of sea ice in surfex (none|sice)\nMASS_FLUX_SCHEME=edmfm # Version of EDMF scheme (edkf|edmfm)\n # Only applicable if PHYSICS=arome\n # edkf is the AROME-MF version\n # edmfm is the KNMI implementation of Eddy Diffusivity Mass Flux scheme for Meso-scale\nHARATU=\"yes\" # Switch for HARATU turbulence scheme (no|yes)\nALARO_VERSION=0 # Alaro version (1|0)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"CISBA: If SURFACE is set to surfex this selects the type of ISBA scheme to use in SURFEX. (3-L|2-L). See src/surfex_namelists.pm Namelists\nCROUGH: If SURFACE is set to surfex this selects the sub-grid scale orography scheme used in SURFEX. (NONE|Z01D|BE04). See src/surfex_namelists.pm Namelist\nSURFEX_SEA_ICE: Treatment of sea ice in surfex (none|sice). See nam/surfex_namelists.pm\nMASS_FLUX_SCHEME: If PHYSICS is set to arome choose the mass flux scheme to be used by AROME; edkf to use the AROME-MF scheme or edmfm to use the KNMI developed scheme\nHARATU: Switch to use the HARATU turbulence scheme\nALARO_VERSION: If PHYSICS is set to alaro select version of ALARO to use (0|1)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Assimilation","page":"Experiment","title":"Assimilation","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Data assimilation settings. More assimilation related settings, in particular what observations to assimilate, can be found in src/include.ass","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Assimilation ****\nANAATMO=3DVAR # Atmospheric analysis (3DVAR|4DVAR|blending|none)\nANASURF=CANARI_OI_MAIN # Surface analysis (CANARI|CANARI_OI_MAIN|CANARI_EKF_SURFEX|none)\n # CANARI : Old style CANARI\n # CANARI_OI_MAIN : CANARI + SURFEX OI\n # CANARI_EKF_SURFEX : CANARI + SURFEX EKF ( experimental )\n # none : No surface assimilation\nANASURF_MODE=\"before\" # When ANASURF should be done\n # before : Before ANAATMO\n # after : After ANAATMO\n # both : Before and after ANAATMO (Only for ANAATMO=4DVAR)\nINCV=\"1,1,1,1\" # Active EKF control variables. 1=WG2 2=WG1 3=TG2 4=TG1\nINCO=\"1,1,0\" # Active EKF observation types (Element 1=T2m, element 2=RH2m and element 3=Soil moisture) \n\nSST=BOUNDARY # Which SST fields to be used in surface analysis\n # BOUNDARY : SST interpolated from the boundary file. ECMWF boundaries utilize a special method.\n # HIRLAM and HARMONIE boundaries applies T0M which should be SST over sea.\nLSMIXBC=no # Spectral mixing of LBC0 file before assimilation\n[ \"$ANAATMO\" = 3DVAR] && LSMIXBC=yes\nJB_INTERPOL=no # Interpolation of structure functions from a pre-defined domain to your domain\n","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"ANAATMO: Atmospheric analysis (3DVAR|4DVAR|blending|none)\nANASURF: Surface analysis (CANARI|CANARIOIMAIN|CANARIEKFSURFEX|none). See nam/surfex_namelists.pm\nANASURF_MODE:When the surface should be called (before|after|both)\nINCV: Active EKF control variables. 1=WG2 2=WG1 3=TG2 4=TG1 (0|1)\nINCO: Active EKF observation types (Element 1=T2m, element 2=RH2m and element 3=Soil moisture) (0|1)\nSST: which sea surface temperature field to use in the surface analysis\nLSMIXBC Spectral mixing of LBC0 file before assimilation (no|yes)\nJB_INTERPOL Interpolation of structure functions from a pre-defined domain to your domain (no|yes). Note that this has to be used with some caution.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Observations","page":"Experiment","title":"Observations","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Observations ****\nOBDIR=$HM_DATA/observations # Observation file directory\nRADARDIR=$HM_DATA/radardata # Radar observation file directory\nSINGLEOBS=no # Run single obs experiment with observation created by scr/Create_single_obs (no|yes)\n\nUSE_MSG=no # Use MSG data for adjustment of inital profiles, EXPERIMENTAL! (no|yes)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"OBDIR: Defines the directory that your (BUFR) observation files (obYYYYMMDDHH) are to read from\nRADARDIR: Defines the directory that your (OPERA HDF5) radar observation files are to be read from. BALTRAD OPERA HDF5, MF BUFR and LOCAL files are treated in scr/Prepradar\nSINGLEOBS Run single obs experiment with synthetic observation created by scr/Create_single_obs scr/Create_single_obs (no|yes)\nUSE_MSG: Use MSG data for adjustment of inital profiles, EXPERIMENTAL! (no|yes), expects MSGcloudYYYYMMDDHH.grib in $OBDIR","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#4DVAR-settings","page":"Experiment","title":"4DVAR settings","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"4DVAR settings (experimental)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** 4DVAR ****\nNOUTERLOOP=1 # 4DVAR outer loops, need to be 1 at present\nILRES=2,2 # Resolution (in parts of full) of outer loops\nTSTEP4D=360,360 # Timestep length (seconds) of outer loops TL+AD\nTL_TEST=yes # Only active for playfile tlad_tests\nAD_TEST=yes # Only active for playfile tlad_tests","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"NOUTERLOOP: Number of outer loops, need to be 1 at present\nILRES: Resolution (in parts of full) of outer loops\nTSTEP4D: Timestep length (seconds) of outer loops TL+AD\nTL_TEST: Only active for playfile tlad_tests (yes|no)\nAD_TEST: Only active for playfile tlad_tests (yes|no)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#digital-filter","page":"Experiment","title":"Digital filter settings ","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Digital filter initialization settings if DFI is not equal to \"none\"","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** DFI setting ****\nTAUS=5400 # cut-off frequency in second\nTSPAN=5400 # 7200s or 5400s","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"TAUS cut-off frequency in seconds \nTSPAN length of DFI run in seconds","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Boundaries-and-initial-conditions","page":"Experiment","title":"Boundaries and initial conditions","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Settings for generation of lateral boundaries conditions for HARMONIE. Further information is available here","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Lateral boundary conditions ****\nHOST_MODEL=\"ifs\" # Host model (ifs|hir|ald|ala|aro)\n # ifs : ecmwf data\n # hir : hirlam data\n # ald : Output from aladin physics\n # ala : Output from alaro physics\n # aro : Output from arome physics\n\nHOST_SURFEX=\"no\" # yes if the host model is run with SURFEX\nSURFEX_INPUT_FORMAT=lfi # Input format for host model run with surfex (lfi|fa)\n\nNBDMAX=12 # Number of parallel interpolation tasks\nBDLIB=ECMWF # Boundary experiment, set:\n # ECMWF to use MARS data\n # RCRa to use RCRa data from ECFS\n # Other HARMONIE/HIRLAM experiment\n\nBDDIR=$HM_DATA/${BDLIB}/archive/@YYYY@/@MM@/@DD@/@HH@ # Boundary file directory,\n # For more information, read in scr/Boundary_strategy.pl\n\nSST_SOURCES=$HOST_MODEL # List of external SST sources like $HOST_MODEL|HIROMB|NEMO|ROMS|ECE\n # See util/gl/ala/merge_ocean.F90 for more details\nSST_IS_LSM=\"auto\" # Switch for using SST as LSM (lsm|sst|auto)\n\nINT_BDFILE=$WRK/ELSCF${CNMEXP}ALBC@NNN@ # Interpolated boundary file name and location\n\nBDSTRATEGY=simulate_operational # Which boundary strategy to follow\n # as defined in scr/Boundary_strategy.pl\n #\n # available : Search for available files in BDDIR, try to keep forecast consistency\n # This is ment to be used operationally\n # simulate_operational : Mimic the behaviour of the operational runs using ECMWF LBC,\n # i.e. 6 hour old boundaries\n # same_forecast : Use all boundaries from the same forecast, start from analysis\n # analysis_only : Use only analysises as boundaries\n # era : As for analysis_only but using ERA interim data\n # latest : Use the latest possible boundary with the shortest forecast length\n # RCR_operational : Mimic the behaviour of the RCR runs, ie\n # 12h old boundaries at 00 and 12 and\n # 06h old boundaries at 06 and 18\n # enda : use ECMWF ENDA data for running ensemble data assimilation\n # or generation of background statistic.\n # Note that only LL up to 9h is supported\n # with this you should set your ENSMSEL members\n # eps_ec : ECMWF EPS members (on reduced gaussian grid)\n # : Only meaningful with ENSMSEL non-empty, i.e., ENSSIZE > 0\n\nBDINT=1 # Boundary interval in hours\n\nSURFEX_PREP=\"yes\" # Use offline surfex prep facility (Alt. gl + Fullpos + prep )","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"HOST_MODEL defines the host model that provides the lateral boundaries conditions for your experiment\nhir for HIRLAM.\nald for ALADIN \nala for ALARO\naro for AROME\nifs for ECMWF-IFS. \nHOST_SURFEX Set to yes if host model runs with SURFEX. (no|yes)\nSURFEX_INPUT_FORMAT Input format for host model run with surfex (lfi|fa)\nBDLIB is the experiment to be used as boundaries. Possible values, ECMWF for IFS from MARS (default), RCRa for HIRLAM-RCR from ECFS or other HARMONIE experiment. \nBDDIR is the boundary file directory. The possible date information in the path must be given by using UPPER CASE letters (@YYYY@=year,@MM@=month,@DD@=day,@HH@=hour,@FFF@=forecast length). \nBDSTRATEGY Which boundary strategy to follow i.e. How to find the right boundaries with the right age and location. Read more\nBDINT is boundary interval in hours.\nBDCLIM is the path to climate files corresponding the boundary files, when nesting HARMONIE to HARMONIE.\nINT_BDFILE is the name and location of the interpolated boundary files. These files are removed every cycle, but if you wish to save them you can specify a more permanent location here. By setting INT_BDFILE=$ARCHIVE the interpolated files will be stored in your archive directory.\nNBDMAX Number of parallel boundary interpolation tasks in mSMS. The current default value is 12.\nSST_SOURCES defines the host model used for SST & SIC\nSST_IS_LSM in interpolation of SST/SIC from host to HARMONIE grid, use SST (with missing values above land) to derive LSM, or use the actual provided LSM. The default, auto, makes a smart guess based on host model.\nSURFEX_PREP Use SURFEX tool PREP instead of gl+FULLPOS to prepare SURFEX initial conditions. This is now the default. The gl+FULLPOS version is still working but will not be maintained in the future (no|yes)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Read more about the boundary file preparation here.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Ensemble-mode-settings","page":"Experiment","title":"Ensemble mode settings","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# *** Ensemble mode general settings. ***\n# *** For member specific settings use msms/harmonie.pm ***\nENSMSEL= # Ensemble member selection, comma separated list, and/or range(s):\n # m1,m2,m3-m4,m5-m6:step mb-me == mb-me:1 == mb,mb+1,mb+2,...,me\n # 0=control. ENSMFIRST, ENSMLAST, ENSSIZE derived automatically from ENSMSEL.\nENSINIPERT= # Ensemble perturbation method (bnd). Not yet implemented: etkf, hmsv.\nENSCTL= # Which member is my control member? Needed for ENSINIPERT=bnd. See harmonie.pm.\nENSBDMBR= # Which host member is used for my boundaries? Use harmonie.pm to set.\nENSMFAIL=0 # Failure tolerance for all members.\nENSMDAFAIL=0 # Failure tolerance for members doing own DA. Not implemented.\nSLAFK=1.0 # best set in harmonie.pm\nSLAFLAG=0 # --- \" ---\nSLAFDIFF=0 # --- \" ---\n\n# *** This part is for EDA with observations perturbation\nPERTATMO=none # ECMAIN : In-line observation perturbation using the default IFS way.\n \t\t\t# CCMA : Perturbation of the active observations only (CCMA content)\n\t \t\t# before the Minimization, using the PERTCMA executable.\n \t\t\t# none : no perturbation of upper-air observations\n\nPERTSURF=none # ECMA : perturb also the surface observation before Canari (recommended\n \t\t\t# : for EDA to have full perturbation of the initial state).\n # model : perturb surface fields in grid-point space (recursive filter)\n\t\t\t # none : no perturbation for surface observations.\n\nFESTAT=no # Extract differences and do Jb calculations (no|yes)\n","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"ENSMSEL Ensemble member selection, comma separated list, and/or range(s):\n # m1,m2,m3-m4,m5-m6:step mb-me == mb-me:1 == mb,mb+1,mb+2,...,me\n # 0=control. ENSMFIRST, ENSMLAST, ENSSIZE derived automatically from ENSMSEL.\nENSINIPERT Ensemble perturbation method (bnd). Not yet implemented: etkf, hmsv, slaf.\nENSMFAIL Failure tolerance for all members. Not yet implemented.\nENSMDAFAIL Failure tolerance for members doing own DA. Not yet implemented.\nENSCTL Which member is my control member? Needed for ENSINIPERT=bnd. See harmonie.pm.\nENSBDMBR Which host member is used for my boundaries? Use harmonie.pm to set.\nSLAFK Perturbation coefficients for SLAF, experimental\nSLAFLAG Time lag for boundaries in SLAG, experimental","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"For member dependent settings see msms/harmonie.pm.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"PERTATMO Observation perturbation with three options \nECMA : In-line observation perturbation using the default IFS way.\nCCMA : Perturbation of the active observations only (CCMA content) before the Minimization, using the PERTCMA executable.\nnone : no perturbation of upper-air observations\nPERTSURF Perturbation of surface observations before Canari (recommended for EDA to have full perturbation of the initial state) (no|yes).","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"FESTAT Extract differences and do Jb calculations (no|yes). Read more about the procedure here.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Aerosol-choices","page":"Experiment","title":"Aerosol choices","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"USEAERO influences import of near-real-time aerosol via boundaries and use of aerosol in the forecast model. It selects use of n.r.t, climatology or no aerosol. When USEAERO=climaero, CAERO selects data for monthly climate file generation from existing sources (4 species of Tegen or CAMS AOD@550nm or 11 species of CAMS vertically integrated mass). CAMS aerosol mass climatology [camscms] and MOCAGE n.r.t. [mocanrt] are currently not available for within HARMONIE forecast system. ","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Aerosol choices [USEAERO implies aerosol usage in physics via forecast_model_settings!] ****\nUSEAERO=camsnrt # Aerosol usage: camsnrt | climaero | noaero | [mocanrt]\nCAERO=tegenaod # Aerosol climatology generation: tegenaod | [camscms not yet available] ","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Climate-file-settings","page":"Experiment","title":"Climate file settings","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Climate file generation settings. Further information is available here","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Climate files ****\nCREATE_CLIMATE=${CREATE_CLIMATE-yes} # Run climate generation (yes|no)\nCLIMDIR=$HM_DATA/climate/$DOMAIN # Climate files directory\nBDCLIM=$HM_DATA/${BDLIB}/climate # Boundary climate files (ald2ald,ald2aro)\n # This should point to intermediate aladin\n # climate file in case of hir2aro,ifs2aro processes.\nECOCLIMAP_PARAM_BINDIR=$HM_DATA/climate # Binary cover param files directory\n\n# Physiography input for SURFEX\nECOCLIMAP_VERSION=SG # Version of ECOCLIMAP for surfex\n # Available versions are 1.1-1.5,2.0-2.2,2.2.1,2.5_plus and SG\n # FLake requires 2.5_plus or SG\nXSCALE_H_TREE=1.0 # Scale the tree height with this factor\n# Activate inclusion of fake trees for open land VEGTYPEs. The vector positions represent:\n# 1 NVT_BOGR, 2 NVT_GRAS, 3 NVT_TROG, 4 NVT_C3W, 5 NVT_C3S, 6 NVT_C4, 7 NVT_FLGR\nLFAKETREE=.F.,.F.,.F.,.F.,.F.,.F.,.F.\nLDB_VERSION=3.0 # Lake database version.\nSOIL_TEXTURE_VERSION=SOILGRID # Soil texture input data FAO|HWSD_v2|SOILGRID|SOILGRID_v2\n\n# Path to pre-generated domains, in use if USE_REF_CLIMDIR=yes set in Env_system\n# Saves time for quick experiments\nREF_CLIMDIR=ec:/hlam/harmonie_climdir/release-43h2.1.1/$DOMAIN/$GRID_TYPE/$ECOCLIMAP_VERSION\n","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"CREATE_CLIMATE: Run climate generation (yes|no). If you already have a full set of climate files generated in CLIMDIR you can set this flag to no for a faster run.\nCLIMDIR: path to the generated climate files for your specific domain. The input data for the climate generation is defined by HM_CLDATA defined in Env_system -> config-sh/config.YOURHOST\nBDCLIM: path to intermediate climate files\nECOCLIMAP_PARAM_BINDIR: Direcotry where the binray version of ECOCLIMAP 1st generation parameter files will be stored.\nECOCLIMAP_VERSION is the version of ECOCLIMAP to be used with SURFEX. Available versions are 1.1-1.5,2.0-2.2,2.2.1,2.5_plus,SG. See surfex_namelists.pm Namelist\nXSCALE_H_TREE: A factor that scales the original tree height that comes from the database.\nLFAKETREE: Only relevant for ECOCLIMAP_VERSION=SG. It activates the inclusion of fake trees for open land VEGTYPEs to increase the roughness length. The vector positions represent 1 NVT_BOGR, 2 NVT_GRAS, 3 NVT_TROG, 4 NVT_C3W, 5 NVT_C3S, 6 NVT_C4, 7 NVT_FLGR.\nLDB_VERSION: Specifies the version of the Global Lake Database used for FLake.\nSOIL_TEXTURE_VERSION Soil texture input data (FAO|HWSD_v2|SOILGRID|SOILGRID_v2). See surfex_namelists.pm more info.\nREF_CLIMDIR: Specifies the location of possible pre-generated domains. Is used if USE_REF_CLIMDIR=yes set in Env_system.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Archiving-settings","page":"Experiment","title":"Archiving settings","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Archiving settings ****\nARCHIVE_ECMWF=yes # Archive to $ECFSLOC at ECMWF (yes|no)\n# Archiving selection syntax, settings done below\n#\n# [fc|an|pp]_[fa|gr|nc] : Output from\n# an : All steps from upper air and surface analysis\n# fc : Forecast model state files from upper air and surfex\n# pp : Output from FULLPOS and SURFEX_LSELECT=yes (ICMSHSELE+NNNN.sfx)\n# in any of the formats if applicable\n# fa : FA files\n# gr : GRIB[1|2] files\n# nc : NetCDF files\n# sqlite|odb|VARBC|bdstrategy : odb and sqlite files stored in odb_stuff.tar\n# fldver|ddh|vobs|vfld : fldver/ddh/vobs/vfld files\n# climate : Climate files from PGD and E923\n# Some macros\n# odb_stuff=odb:VARBC:bdstrategy:sqlite\n# verif=vobs:vfld\n# fg : Required files to run the next cycle","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Forecast-output","page":"Experiment","title":"Forecast output","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Cycles to run, and their forecast length ****\n\nTFLAG=\"h\" # Time flag for model output. (h|min)\n # h = hour based output\n # min = minute based output\n\n\n# The unit of HWRITUPTIMES, FULLFATIMES, ..., SFXFWFTIMES should be:\n# - hours if TFLAG=\"h\"\n# - minutes if TFLAG=\"min\"\n\n# Writeup times of # history,surfex and fullpos files\n# Comma separated list, and/or range(s) like:\n# t1,t2,t3-t4,t5-t6:step tb-te == tb-te:1 == tb,tb+1,tb+2,...,te\n\nif [ -z \"$ENSMSEL\"] ; then\n # Standard deterministic run\n HH_LIST=\"00-21:3\" # Which cycles to run, replaces FCINT\n LL_LIST=\"12,3\" # Forecast lengths for the cycles [h], replaces LL, LLMAIN\n # The LL_LIST list is wrapped around if necessary, to fit HH_LIST\n HWRITUPTIMES=\"00-21:3,24-60:6\" # History file output times\n FULLFAFTIMES=$HWRITUPTIMES # History FA file IO server gather times\n PWRITUPTIMES=\"00-60:3\" # Postprocessing times\n PFFULLWFTIMES=-1 # Postprocessing FA file IO server gathering times\n VERITIMES=\"00-60:1\" # Verification output times, may change PWRITUPTIMES\n SFXSELTIMES=$HWRITUPTIMES # Surfex select file output times\n # Only meaningful if SURFEX_LSELECT=yes\n SFXSWFTIMES=-1 # SURFEX select FA file IO server gathering times\n SWRITUPTIMES=\"00-06:3\" # Surfex model state output times\n SFXWFTIMES=$SWRITUPTIMES # SURFEX history FA file IO server gathering times\n if [ \"$SIMULATION_TYPE\" == climate]; then #Specific settings for climate simulations\n HWRITUPTIMES=\"00-760:6\" # History file output times\n FULLFAFTIMES=\"00-760:24\" # History FA file IO server gather times\n PWRITUPTIMES=$HWRITUPTIMES # Postprocessing times\n VERITIMES=$HWRITUPTIMES # Verification output times, may change PWRITUPTIMES\n SFXSELTIMES=$HWRITUPTIMES # Surfex select file output times - Only meaningful if SURFEX_LSELECT=yes\n SWRITUPTIMES=\"00-760:12\" # Surfex model state output times\n SFXWFTIMES=$SWRITUPTIMES # SURFEX history FA file IO server gathering times\n fi\n\n ARSTRATEGY=\"climate:fg:verif:odb_stuff: \\\n [an|fc]_fa:pp_grb\" # Files to archive on ECFS, see above for syntax\n\nelse\n # EPS settings\n HH_LIST=\"00-21:3\" # Which cycles to run, replaces FCINT\n LL_LIST=\"36,3,3,3\" # Forecast lengths for the cycles [h], replaces LL, LLMAIN\n HWRITUPTIMES=\"00-06:3\" # History file output times\n FULLFAFTIMES=$HWRITUPTIMES # History FA file IO server gather times\n PWRITUPTIMES=\"00-48:1\" # Postprocessing times\n PFFULLWFTIMES=-1 # Postprocessing FA file IO server gathering times\n VERITIMES=\"00-60:3\" # Verification output times, may change PWRITUPTIMES\n SFXSELTIMES=$HWRITUPTIMES # Surfex select file output times\n # Only meaningful if SURFEX_LSELECT=yes\n SFXSWFTIMES=-1 # SURFEX select FA file IO server gathering times\n SWRITUPTIMES=\"00-06:3\" # Surfex model state output times\n SFXWFTIMES=$SWRITUPTIMES # SURFEX history FA file IO server gathering times\n\n ARSTRATEGY=\"climate:fg:verif:odb_stuff: \\\n an_fa:pp_grb\" # Files to archive on ECFS, see above for syntax\n\nfi\n","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"The writeup times of model output can be defined as a space separated list or as a fixed frequency for model history files, surfex files and postprocessed files respectively. The unit of the steps of WRITUPTIMES, SWRITUPTIMES, PWRITUPTIMES and OUTINT should be in hours or minutes depending on the TFLAG Regular output interval can be switched on by setting OUTINT>0. Consequently, OUTINT will override the WRITUPTIMES lists!","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"TFLAG: Time flag for model output. Hourly or minute-based output (h|min)\nHWRITUPTIMES: Output list for history files. Default is 00-21:3,24-60:6 which will output files every 3 hours for 00-21 and every 6 hours for 24-60.\nVERITIMES: Output list for verification files. Default is 00-60:1 which will produce file every 1 hour for 00-60\nSWRITUPTIMES Output list for surfex files. Default is 00-06:3 which output a SURFEX file every 3 hours for 00-06.\nPWRITUPTIMES Output list for fullpos (post-processed) files. Default is 00-21:3,24-60:6 which will output files every 3 hours for 00-21 and every 6 hours for 24-60.","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"SURFEX_LSELECT=\"yes\" # Only write selected fields in surfex outpute files. (yes|no)\n # Check nam/surfex_selected_output.pm for details.\n # Not tested with lfi files.\nINT_SINI_FILE=$WRK/SURFXINI.fa # Surfex initial file name and location\n\n# **** Postprocessing/output ****\nIO_SERVER=yes # Use IO server (yes|no). Set the number of cores to be used\n # in your Env_submit\nIO_SERVER_BD=yes # Use IO server for reading of boundary data\nPOSTP=\"inline\" # Postprocessing by Fullpos (inline|offline|none).\n # See Setup_postp.pl for selection of fields.\n # inline: this is run inside of the forecast\n # offline: this is run in parallel to the forecast in a separate task\n\nFREQ_RESET_TEMP=3 # Reset frequency of max/min temperature values in hours, controls NRAZTS\nFREQ_RESET_GUST=1 # Reset frequency of max/min gust values in hours, controls NXGSTPERIOD\n # Set to -1 to get the same frequency _AND_ reset behaviour as for min/max temperature\n # See yomxfu.F90 for further information.\n","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"SURFEX_LSELECT: Switch to write a selection of fields in SURFEX output files (yes|no). See surfex_selected_output.pm for more info. Namelist\nINT_SINI_FILE: name and location of the initial SURFEX file\nARCHIVE_ECMWF: archive files to ECFSLOC at ECMWF (yes|no)\nIO_SERVER: Use IO server (yes|no). If set to \"yes\" changes may be required in Env_submit -> config-sh/submit.YOURHOUST\nPOSTP: Postprocessing by Fullpos (inline|offline|none).\nFREQ_RESET_[TEMP|GUST]: Reset frequency of max/min values in hours, controls NRAZTS. Default is every 3/1 hours","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** GRIB ****\nCONVERTFA=yes # Conversion of FA file to GRIB/nc (yes|no)\nARCHIVE_FORMAT=GRIB1 # Format of archive files (GRIB1|GRIB2|nc). nc format yet only available in climate mode\nNCNAMES=nwp # Nameing of NetCDF files follows (climate|nwp) convention.\nRCR_POSTP=no # Produce a subset of fields from the history file for RCR monitoring\n # Only applicable if ARCHIVE_FORMAT=GRIB\nMAKEGRIB_LISTENERS=1 # Number of parallel listeners for Makegrib\n # Only applicable if ARCHIVE_FORMAT=GRIB\n","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"More options on fullpos postprocessing can be found in scr/Select_posp.pl","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"CONVERTFA: Conversion of FA files to GRIB or NetCDF (yes|no)\nARCHIVE_FORMAT: Format of archive files (GRIB1|nc). NetCDF format yet only available in climate mode\nRCR_POSTP: Produce a subset of fields from the history file for RCR monitoring (yes|no). This is only applicable if ARCHIVE_FORMAT=GRIB1|GRIB2\nMAKEGRIB_LISTENERS: Number of parallel listeners for Makegrib. Only applicable if ARCHIVE_FORMAT=GRIB1|GRIB2","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"More options on file conversion can be found in scr/Makegrib","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#DDH-files","page":"Experiment","title":"DDH files","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** DDH ****\nUSEDDH=\"yes\" # Use DDH. (yes|no)\n","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Permits to include the namelist NAMDDH in the forecast namelist.\nThe DDH files will be saved in the ARCHIVE directory.\nTo archive the DDH files to ECFSLOC at ECMWF, include the option \"ddh\" in ARSTRATEGY\nRead more in DDH in HARMONIE-AROME","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Verification-and-monitoring","page":"Experiment","title":"Verification and monitoring","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# **** Verification extraction ****\nOBSEXTR=yes # Extract observations from BUFR (yes|no)\nFLDEXTR=yes # Extract model data for verification from model files (yes|no)\nFLDEXTR_TASKS=1 # Number of parallel tasks for field extraction\nVFLDEXP=$EXP # Experiment name on vfld files\nSCREXTR=no # Use Screening (NCONF=002) to produce O-F data\nSCREXTR_TASKS=1 # Number of parallel tasks for O-F extraction\nFGREFEXP=${FGREFEXP-undef} # reference experiment name for FirstGuess\nOBREFEXP=${OBREFEXP-undef} # reference experiment name for ODBs","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"OBSEXTR: Extract observations for verification from BUFR (yes|no)\nFLDEXTR: Extract model data for verification from model files (yes|no)\n*FLDEXTR_TASKS: Number of parallel tasks for field extraction\nVFLDEXP: Change vfld file name to this string\nSCREXTR : Switch on extraction of O-F data for verification using Screening (NCONF=002)\nSCREXTR_TASKS : Number of parallel tasks for O-F extraction\nFGREFEXP=${FGREFEXP-undef} : reference experiment name for FirstGuess (useful with PLAYFILE=allobsver)\nOBREFEXP=${OBREFEXP-undef} : reference experiment name for ODBs (useful with PLAYFILE=allobsver)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Read more about the verification package here","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Field-verification","page":"Experiment","title":"Field verification","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# *** Field verification ***\nFLDVER=no # Main switch for field verification (yes|no)\nFLDVER_HOURS=\"06 12 18 24 30 36 42 48\" # Hours for field verification","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"FLDVER Main switch for field verification (yes|no). The field verification extracts some selected variables for calculation of bias, rmse, stdv and averages on the model grid.\nFLDVER_HOURS Hours for field verification","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"More options on field verification can be found in scr/Fldver and scr/AccuFldver","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Observation-monitoring-and-general-diagnostics","page":"Experiment","title":"Observation monitoring and general diagnostics","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# *** Observation monitoring ***\nOBSMONITOR=obstat # Create Observation statistics plots\n # Format: OBSMONITOR=Option1:Option2:...:OptionN\n # obstat: Daily usage maps and departures\n # no: Nothing at all\n #\n # obstat is # only active if ANAATMO != none\nOBSMON_SYNC=no # Sync obsmn sqlite tables from HOST1 (if set) to HOST0 (yes|no)","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"OBSMONITOR Selection for observation statistics plots","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"obstat Observations usage. Read more here.\nno No monitoring","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"Note that this is only active if ANAATMO != none","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Mail-addresses","page":"Experiment","title":"Mail addresses","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"# Recipient(s) to send mails to (you@work,you@home)\nMAIL_ON_ABORT= # when a task aborts\nMAIL_TESTBED= # testbed results summary","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"MAIL_ON_ABORT e-mail address to send a mail to if a task fails in ecFlow\nMAIL_TESTBED e-mail address to send a mail to with a summary of the testbed results","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/#Testbed","page":"Experiment","title":"Testbed","text":"","category":"section"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"export TESTBED_LIST=\"AROME AROME_1D AROME_3DVAR \\\n AROME_BD_ARO AROME_BD_ARO_IO_SERV \\\n HarmonEPS HarmonEPS_IFSENS \\\n AROME_CLIMSIM\"","category":"page"},{"location":"ExperimentConfiguration/ConfigureYourExperiment/","page":"Experiment","title":"Experiment","text":"TESTBED_LIST contains the configurations that will be run in the testbed","category":"page"},{"location":"DataAssimilation/MTEN/#Moist-Total-Energy-Norm-(MTEN)-diagnostic","page":"MTEN","title":"Moist Total Energy Norm (MTEN) diagnostic","text":"","category":"section"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"MTEN shows the sensitivity of the forecast model to different observations withdrawn from the full analysis system. There are two ways of computing the MTEN diagnostic: A special branch was created in CY40 (see below) where the MTEN diagnostic can be requested. This approach uses Harmonie ensemble system to perform series of observation denial independent runs. This means that the following settings are used in msms/harmonie.pm","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":" 'ENSBDMBR' => [ 0 ],\n 'ENSCTL' => [ '000', '001', '002', '003', '004', '005', '006', '007' ],\n 'AIRCRAFT_OBS' => [ 0, 1, 1, 1, 1, 1, 1, 1],\n 'BUOY_OBS' => [ 1, 0, 1, 1, 1, 1, 1, 1],\n 'AMSUA_OBS' => [ 1, 1, 0, 1, 1, 1, 1, 1],\n 'AMSUB_OBS' => [ 1, 1, 1, 0, 1, 1, 1, 1],\n 'POL_OBS' => [ 1, 1, 1, 1, 0, 1, 1, 1],\n 'HRW_OBS' => [ 1, 1, 1, 1, 1, 0, 1, 1],\n 'TEMP_OBS' => [ 1, 1, 1, 1, 1, 1, 0, 1],\n 'IASI_OBS' => [ 1, 1, 1, 1, 1, 1, 1, 0],","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"In this particular example, we are interested in the impact of aircraft, Buoy, amsu-a, amsu-b/mhs, polar winds, high-resolution geowinds, radiosonde, and iasi observations. This setting is activated in config.exp with the following choice:","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"export REFEXP DOMTEN\nexport SYNOP_OBS=1 # All synop\nexport AIRCRAFT_OBS=1 # AMDAR, AIREP, ACARS\nexport BUOY_OBS=1 # Buoy\nexport POL_OBS=1 # Satob polar winds\nexport GEO_OBS=0 # Satob geo winds\nexport HRW_OBS=1 # Satob HRWind\nexport TEMP_OBS=1 # TEMP, TEMPSHIP\nexport PILOT_OBS=1 # Pilot, Europrofiler\nexport SEVIRI_OBS=0 # Seviri radiances\nexport AMSUA_OBS=1 # AMSU-A\nexport AMSUB_OBS=1 # AMSU-B, MHS\nexport IASI_OBS=1 # IASI\nexport PAOB_OBS=0 # PAOB not defined everywhere\nexport SCATT_OBS=0 # Scatterometer data not defined everywhere\nexport LIMB_OBS=0 # LIMB observations, GPS Radio Occultations\nexport RADAR_OBS=0 # Radar\nexport GNSS_OBS=0 # GNSS","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"Where REFEXP is the reference experiment (see below), and DOMTEN (yes,no) is activate the MTEN choice when fetching the First-guess and the VarBC files for the MTEN computation, as follows: in /scr/Fetch_assim_data:","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"if [ ${DOMTEN} = \"yes\" ]; then\n HM_REFEXP=/sbt/harmonie/$REFEXP\n adir=${ECFSLOC}:${HM_REFEXP}/$YY/$MM/$DD/$HH\nelse\n adir=$( ArchDir $HM_EXP $YY $MM $DD $HH )\nfi","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"in scr/FirstGuess (be careful this happens twice in the script)","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"if [ ${DOMTEN} = \"yes\" ]; then\n HM_REFEXP=/sbt/harmonie/$REFEXP\n adir=${ECFSLOC}:${HM_REFEXP}/$FGYY/$FGMM/$FGDD/$FGHH\nelse\n adir=$( ArchDir $HM_EXP $FGYY $FGMM $FGDD $FGHH )\nfi","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"The MTEN can be also computed using a deterministic system. In this case, you need to take care of the First-guess and the VarBC files, which should come from the reference experiment. You need to carefully set the choice of the observations to be tested in scr/include.ass. In this case, you need to adapt the above Fetch_assim_data and FirstGuess scripts accordingly.","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"The MTEN diagnostic, similarly to DFS, is case sensitive, so it's better to male the computation with times and dates enough distant (by 5 days or more).","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"The MTEN can be computed the example below:","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":" for EXP in EXP1 EXP2;\n for RANGE in 06 12 18 24 30 36 42 48;\n do\n\n YY=`echo $DTG | cut -c 1-4`\n mm=`echo $DTG | cut -c 5-6`\n dd=`echo $DTG | cut -c 7-8`\n hh=`echo $DTG | cut -c 9-10`\n # -- Get the FA files\n # ===================\n ecp ec:/$USER/harmonie/$REFEXP/$YY/$mm/$dd/$hh/ICMSHHARM+00$RANGE ./FAREF$RANGE\n ecp ec:/$USER/harmonie/${EXP}/$YY/$mm/$dd/$hh/ICMSHHARM+00$RANGE ./${EXP}$RANGE\n $MTEN_BIN/MTEN ./FAREF$RANGE ./${EXP}$RANGE\n\n done\n done\n","category":"page"},{"location":"DataAssimilation/MTEN/","page":"MTEN","title":"MTEN","text":"See (Storto and Randriamampianina, 2010) for more details.","category":"page"},{"location":"DataAssimilation/Surface/CANARI_EKF_SURFEX/#canari_ekf_surfex","page":"CANARI EKF SURFEX","title":"Surface variables assimilated / read in EKF_MAIN","text":"","category":"section"},{"location":"DataAssimilation/Surface/CANARI_EKF_SURFEX/","page":"CANARI EKF SURFEX","title":"CANARI EKF SURFEX","text":"From cycle 37 EKF is implemented in research/development mode. The following tiles and variables are modified:","category":"page"},{"location":"DataAssimilation/Surface/CANARI_EKF_SURFEX/#NATURE","page":"CANARI EKF SURFEX","title":"NATURE","text":"","category":"section"},{"location":"DataAssimilation/Surface/CANARI_EKF_SURFEX/#WG2/WG1/TG2/TG1","page":"CANARI EKF SURFEX","title":"WG2/WG1/TG2/TG1","text":"","category":"section"},{"location":"DataAssimilation/Surface/CANARI_EKF_SURFEX/","page":"CANARI EKF SURFEX","title":"CANARI EKF SURFEX","text":"The uppermost two levels in ISBA of soil moisture and temperature are assimilated. With CANARI/CANARI_OI_MAIN by an OI method, by CANARI_SURFEX_EKF by an Extended Kalman Filter (EKF).","category":"page"},{"location":"DataAssimilation/Surface/CANARI_EKF_SURFEX/","page":"CANARI EKF SURFEX","title":"CANARI EKF SURFEX","text":"For 2012 it is planned to have a re-writing of OI_MAIN/EKF_MAIN to be the same binary in order to be able to apply the work done for OI_MAIN in EKF_MAIN and thus reduce the maintainance costs.","category":"page"},{"location":"DataAssimilation/StructureFunctions/#structure-functions","page":"Structure functions","title":"Derivation of Structure Functions","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/#General","page":"Structure functions","title":"General","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"For each new model domain, in order to carry out upper air data assimilation (3DVAR or 4DVAR) one needs to generate background error covariances (generally referred to as structure functions). The recommended procedure is to use a two step approach. In step one you generate background error statistics by downscaling (this is needed since you do not have have statistics for your domain setup for this forecast model version and physics options, so that you cannot run data-assimilation (unless you use statistics from old system possibly derived from a slighthly different domain and with a different model version, which is not recommended). In step 2 you then use the statistics derived in step 1 to generate the final background error statistics files by applying ensemble data assimilation within the HARMONIE-AROME modelling system.","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"In step 1 structure functions are generated from difference fields from ensemble members of HARMONIE-AROME forecast. These are obtained from downscaling of ECMWF EDA ensemble forecast. To alleviate spin-up issues, these phase 1 downscaled HARMONIE-AROME forecasts are run up to 6 hours, and differences are generated from these. Using the ECMWF LBC data, 6h HARMONIE ensemble forecasts are initiated from ECMWF 6h forecasts daily from 00 UTC and 12 UTC, with ECMWF forecasts as initial and lateral boundary conditions. To obtain stable statistics, it is recomended to run 4 ensembles for two chosen one-month episode (s). The episodes should sample different seasons. Therefore it is recommended to run for one winter month and one summer month, for example June 2016 and January 2017. These periods are chosen so as to benefit from the latest upgrade to ECMWF's EDA system. Thereby we sample both seasonal (January, July) and daily (00 UTC and 12 UTC) variations. After running of the ensembles the archived results (6h forecasts) are processed to generate structure functions by running a program called 'festat'. Festat will be run automatically within the SMS system when DTGEND is approached by an experiment and the statistics will be based on difference files generated by intermediate program femars and stored on ecfs in ec:/$uid/harmonie/$exp/femars (software to generate binary GRIB files of forecasts differences after each cycle). This will mean that if you start by running a one month experiment for January the structure functions generated when you reach DTGEND will be for January. When you use the same experiment name and launch also an experiment for July you will when you reach DTGEND have background error statistics based on both January and July differences files (since both of those are now found in ec:/$uid/harmonie/$exp/femars). These combined winter/summer background error statistics files from phase one are final product from step 1 and can are the intermediate background error statistics files to plug into the HARMONIE-AROME data assimilation of step 2. It should be mentioned that there is a possibility for the more advanced user to run festat off-line and with any combinations of January-July forecast difference files from ec:/$uid/harmonie/$exp/femars. That will be described in ore detail further below and is something you might want to do with forecasts difference files generated from step 2 to produce monthly background error statistics files by combining in different ways.","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"In step 2 we run again two one-month ensemble experiments for the same Januar and July months again utilizing ECMWF EDA forecasts as lateral boundary conditions. Again you use 4 ensemble members. The important difference as compared to step 1 is that you now carry out ensemble-data assimilation also within the HARMONIE-AROME framework. You use the background error statistics from phase 1 and do the eda within a data assimilation cycle. This has the important advantage that you significantly reduce spinup caused by the HARMONIE-AROME model adjustments to ECMWF EDA starting initial states. Because of this we can in step 2 derive the statistics from +3h forecast difference (rather than +6 that is used in step 1). ","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"Note that there are methods to circumvent step1 and to technically run 3/4DVAR using structure functions derived from another HARMONIE model domain. Such existing methods include aspects such as horizontal truncation or extrapolation of horizontal spectra and possibly vertical interpolation in between vertical level geometries. Since the recommended procedure is to use the two stp approach described above these alternative methods are not described in detail. Furthermore it should be noted that there are background error covariance related tuning coefficients REDNMC and REDZONE. Settings of values of these ae not covered here. If you have a new domain you will use the default value 0.6 for REDNMC and 100 for REDZONE which are considered apropiate values for the derivation of structure functions. If you re-derive your statistics for an existing domain you will use the REDNMC and REDZONE values as assigned in scr/include.ass. ","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"There are various existing tools for investigating your newly derived structure functions and at the end of this page there are some documentation of existing tools and how to use them. ","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"The procedure for generating structure functions from an ensemble of forecasts is described below for a AROME setup with 2.5 km horizontal resolution and 65 vertical levels. The experiment is run for a one mont winter-period of followed by a one month summer-period on the ECMWF computing system. Forecast differences are derived twice a day (00 forecasts from 12 UTC) from combinations of the four ensemble members. Besides the scientific recommendation to cover many different weather situations there is as well a matemathical constraint that the number of forecast difference files provided to festat needs to be larger than the number of vertical levels used in the forecast model integration. In the section below detailed instructions on how to generate the structure functions are given. The other sections deals with how to diagnose the structure functions recent and ongoing work and future development plans.","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"It is recommended for future coming enhancements regarding handling of B statistics and diagnostics of it to save all generated forecast difference files as well as stabal.cv, stabal.cvt and stabal.bal and generated .xy and .y files (.cvt .xy and .y for diagnotical puroposes):","category":"page"},{"location":"DataAssimilation/StructureFunctions/#Generating-background-error-statistics-(using-43h2.2)","page":"Structure functions","title":"Generating background error statistics (using 43h2.2)","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"The following instructions are valid for trunk and any 43h2.2 tags that have been created. These instructions will only work at ECMWF. If you do have a new domain (or are not sure) you should follow that route in step 1 below. New domain creation is described in ModelDomain which links to the useful Domain Creation Tool ","category":"page"},{"location":"DataAssimilation/StructureFunctions/#STEP-1-Downscaling","page":"Structure functions","title":"STEP 1 Downscaling","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"Create a new experiment on ECMWF:\nIn case you do have an existing domain setup do:\nmkdir -p $HOME/hm_home/jbdownexp\ncd $HOME/hm_home/jbdownexp\n~hlam/Harmonie setup -c JBDSC -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1 -d DOMAIN # where domain is the name of your domain\nIn case you are creating structure functions for a new domain (or you are not sure):\nmkdir -p $HOME/hm_home/jbdownexp\ncd $HOME/hm_home/jbdownexp\n~hlam/Harmonie setup -c JBDSC -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1\n~hlam/Harmonie co scr/Harmonie_domains.pm\nThen edit scr/Harmonie_domains.pm and add your new domain definition.\nThe ensemble that will be used to generate the structure functions needs to be defined in suites/harmonie.pm. An edited ensemble configuration file should define a four member ensemble that only varies the boundary memeber input (ENSBDMBR) as follows:\n%env = (\n# 'ANAATMO' => { 0 => '3DVAR' },\n# 'HWRITUPTIMES' => { 0 => '00-21:3,24-60:6' },\n# 'SWRITUPTIMES' => { 0 => '00-06:3' },\n# 'HH_LIST' => { 0 => '00-21:3' },\n# 'LL_LIST' => { 0 => '36,3' },\n# 'LSMIXBC' => { 0 => 'no' },\n# 'ANASURF' => { 0 => 'CANARI_OI_MAIN' },\n 'ENSCTL' => [ '001', '002', '003', '004'],\n# 'OBSMONITOR' => [ 'obstat'],\n# SLAFLAG: Forecast length to pick your perturbation end point from\n# SLAFDIFF: Hours difference to pick your perturbation start point from\n# SLAFLAG=24, SLAFDIFF=6 will use +24 - +18\n# SLAFDIFF=SLAFLAG will retain the original SLAF construction\n# SLAFK should be tuned so that all members have the same perturbation size\n 'ENSBDMBR' => [ 1,2,3,4],\n# 'SLAFLAG' => [ 0, 6, 6, 12, 12, 18, 18, 24, 24, 30, 30],\n# 'SLAFDIFF' => [ 0, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6],\n# 'SLAFK' => ['0.0','1.75','-1.75','1.5','-1.5','1.2','-1.2','1.0','-1.0','0.9','-0.9'],\n# When using ECMWF ENS the members should be defined\n# # 'ENSBDMBR' => [ 0, 1..10],\n\n### Normally NO NEED to change the settings below\nRun for two one-month (30 day) periods:\ncd $HOME/hm_home/jbdownexp\n~hlam/Harmonie start DTG=2016060100 DTGEND=2016070100\n#\n#~hlam/Harmonie start DTG=2017010100 DTGEND=2017013100\nGenerate the statistics using festat standalone:\nPlace yourself at $TEMP on ECMWF\nCopy Festat.standalone to $TEMP on ECMWF\nEdit the script to reflect your user and experiment details (in particular copy femars data ec:/$uid/harmonie /jbdownexp/femars/ to your femars-directory on $TEMP)\nsubmit with\nqsub ./Festat.standalone\nyou will get a log-file festat.log on $TEMP and results in directory festat_wrk. when the program has finished do:\ncd festat_wrk\nemkdir ec:/$uid/jbdata\ngzip stab_your_exp.cv\ngzip stab_your_exp.bal\necp stab_your_exp.cv.gz ec:/$uid/jbdata/. (with your own filename and directory)\necp stab_your_exp.bal.gz ec:/$uid/jbdata/. (with your own filename and directory)\n(also create a tar.file with all *.xy *.y *.cv, *.bal and *.cvt and put on ecfs for future diagnostical purposes) ","category":"page"},{"location":"DataAssimilation/StructureFunctions/#STEP-2-Generating-background-error-statistics-with-EDA-cycling-(instructions-under-testing)","page":"Structure functions","title":"STEP 2 Generating background error statistics with EDA cycling (instructions under testing)","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"Create a new experiment on ECMWF:\nIn case you do have an existing domain setup do:\nmkdir -p $HOME/hm_home/jbedaexp\ncd $HOME/hm_home/jbedaexp\n~hlam/Harmonie setup -c AROME_JBEDA -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1 -d DOMAIN # where domain is the name of your domain\nIn case you are creating structure functions for a new domain (or you are not sure):\nmkdir -p $HOME/hm_home/jbedanexp\ncd $HOME/hm_home/jbedaexp\n~hlam/Harmonie setup -c AROME_JBEDA -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1\n~hlam/Harmonie co scr/Harmonie_domains.pm\nThen edit scr/Harmonie_domains.pm and add your new domain definition.\nThe ensemble that will be used to generate the structure functions needs to be defined in suites/harmonie.pm. An edited ensemble configuration file should define a four member ensemble that only varies the boundary memeber input (ENSBDMBR) as follows:\n%env = (\n# 'ANAATMO' => { 0 => '3DVAR' },\n# 'HWRITUPTIMES' => { 0 => '00-21:3,24-60:6' },\n# 'SWRITUPTIMES' => { 0 => '00-06:3' },\n# 'HH_LIST' => { 0 => '00-21:3' },\n# 'LL_LIST' => { 0 => '36,3' },\n# 'LSMIXBC' => { 0 => 'no' },\n# 'ANASURF' => { 0 => 'CANARI_OI_MAIN' },\n 'ENSCTL' => [ '001', '002', '003', '004'],\n# 'OBSMONITOR' => [ 'obstat'], \n# SLAFLAG: Forecast length to pick your perturbation end point from\n# SLAFDIFF: Hours difference to pick your perturbation start point from\n# SLAFLAG=24, SLAFDIFF=6 will use +24 - +18\n# SLAFDIFF=SLAFLAG will retain the original SLAF construction\n# SLAFK should be tuned so that all members have the same perturbation size\n 'ENSBDMBR' => [ 1,2,3,4],\n# 'SLAFLAG' => [ 0, 6, 6, 12, 12, 18, 18, 24, 24, 30, 30],\n# 'SLAFDIFF' => [ 0, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6],\n# 'SLAFK' => ['0.0','1.75','-1.75','1.5','-1.5','1.2','-1.2','1.0','-1.0','0.9','-0.9'],\n# When using ECMWF ENS the members should be defined\n# # 'ENSBDMBR' => [ 0, 1..10],\n\n### Normally NO NEED to change the settings below\nLink to your newly generated Jb statistics from STEP1 :\nEdit in $HOME/hm_home/jbedaexp/scr/include.ass as follows (example for DOMAIN=METCOOP25D): In the section for your relevant domain point to the structure function stored in STEP one as follows:\n elif [ \"$DOMAIN\" = YOUR DOMAIN]; then\n JBDIR=${JBDIR-\"ec:/hirlam/harmonie_jbdata\"}\n JBDIR=ec:/$uid/jbdata\n f_JBCV=stabfiltn_your_exp.cv_jbconv.cv (without .gz)\n f_JBBAL=stabfiltn_your_exp.bal_jbconv.bal (without.gz)\nRun for two one-month (30 day) periods:\ncd $HOME/hm_home/jbedaexp\n~hlam/Harmonie start DTG=2016060100 DTGEND=2016070100\n#\n#~hlam/Harmonie start DTG=2017010100 DTGEND=2017013100\nGenerate the statistics using festat standalone: \nPlace yourself at $TEMP on ECMWF\nCopy Festat.standalone to $TEMP at ECMWF\nEdit the script to reflect your user and experiment details (in particular copy femars data ec:/$uid/harmonie/jbdownexp/femars/ to femars-directory on $TEMP)\nMake sure you have removed old femars_wrk directory and only have forecast differences from you EDA experiment in your femars directory As well preferably name files differently than in STEP 1 downscaling. ","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":" submit with \n\n ```bash\n qsub ./Festat.standalone\n ```\n \n you will get a log-file `festat.log` on `$TEMP` and results in directory `festat_wrk`\n when the program has finished do:\n \n ```bash\n cd festat_wrk\n emkdir ec:/smx/jbdata (with smx replaced with your own user id) \n gzip stab_your_eda_exp.cv\n gzip stab_your_eda_exp.bal\n ecp stab_your_eda_exp.cv.gz ec:/smx/jbdata/. (with your own filename and directory)\n ecp stab_your_eda_exp.bal.gz ec:/smx/jbdata/. (with your own filename and directory)\n ```\n\n also create a tar-file with all `*.xy`, `*.y`, `*.cv`, `*.bal` and `*.cvt` and put on ecfs for future diagnostical purposes) These new files are you final background error statistics to be diagnosed (compared with STEP 1 ones perhaps) and inserted to your data assimilation by modyfying `include.ass` (as in bullet 3 above) to point to your new files.","category":"page"},{"location":"DataAssimilation/StructureFunctions/#Diagnosis-of-background-error-statistics","page":"Structure functions","title":"Diagnosis of background error statistics","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"Diagnosis of background error statistics is a rather complicated task. To get an idea of what the correlations and covariances should look like take a look in the article: Berre, L., 2000: Estimation of synoptic and meso scale forecast error covariances in a limited area model. Mon. Wea. Rev., 128, 644-667. Software for investigating and graphically illustrate different aspects of the background error statistics has been developed and statistics generated for different domains has been investigated using the AccordDaTools package. With this software you can also compare your newly generated background error statistics with the one generated for other HARMONIE domains. This will give you and idea if your statistics seems reasonable. For diagnosing the newly derived background error statistics follow these instructions:\nGet the code and scripts:\nDownload and install AccordDaTools following instructions in the README\nDon't forget to add the package tools directory to your PATH: \nexport PATH=/path/to/da_tools:$PATH\nRun Jb diagnostics script:\nFor example for a new domain using horizontal grid-spacing of 2500 m and (Harmonie) 65 vertical levels:\njbdiagnose -b jb_data/stab_IRELAND25_064_480.bal -c jb_data/stab_IRELAND25_064_480.cv -g 2500 -l harmL65 -e jbdiag_IRELAND25_064\nThe output will be made written to jbdiag_IRELAND25_064","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"The AccordDaTools package also provides two tools for plotting the data produced by jbdiagnose, plotjbbal and plotjbdiag. plotjbbal plots Jb balances for different parameters. plotjbdiag produces spectral density (spdens) and vertical correlation (vercor) diagnostic plots for your structure funtions. For example:\nplotjbbal:\nplotjbbal -t stdv -p QQ -r jbdiag_ -e IRELAND25_064\nplotjbdiag:\nplotjbdiag -l 50 -t vercor -p QQ -r jbdiag_ -e IRELAND25_064","category":"page"},{"location":"DataAssimilation/StructureFunctions/#Run-3DVAR/4DVAR-with-the-new-background-error-statistics","page":"Structure functions","title":"Run 3DVAR/4DVAR with the new background error statistics","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"create hm_home/jb_da. Then cd $HOME/hm_home/jb_da.\ncreate experiment by typing\n~hlam/Harmonie setup -r ~hlam/harmonie_release/git/tags/harmonie-43h2.2.1\nIn scr/include.ass set JBDIR=ec:/$uid/jbdata (uid being your userid, in this example 'ec:/smx/jbdata') and f_JBCV is name of your .cv file in ec:/$uid/jbdata (without .gz) and f_JBBAL is 'name of your .bal file in ec:/$uid/jbdata (without .gz) (in this example, f_JBCV=stab_METCOOPD_65_20200601_360.cv, stab_METCOOPD_65_20200601_360.bal). Add these three lines instead of the three lines in include.ass that follows right after the elif statement: elif [ \"$DOMAIN\" = METCOOP25D]; then. If domain is other than METCOOP25D one has to look for the alternative name of the domain. \nFrom $HOME/hm_home/jb_da launch experiment by typing\n~hlam/Harmonie start DTG=2021010100 DTGEND=2021010103\nThe resulting analysis file be found under $TEMP/hm_home/jb_da/archive/2021/01/01/03 and it will be called MXMIN1999+0000 and on and ec:/$uid/harmonie/2021/01/01/03. To diagnose the 3D-VAR analysis increments of the jb_da-experiment, copy the files MXMIN1999+0000 (analysis) and ICMSHHARM+0003 (fg) to $SCRATCH. The first guess (background) file can be found on $TEMP/hm_home/jb_da/archive/2021/01/01/00 and ec:/$uid/harmonie/jb_da/2021/01/01/00. Convert from FA-file format to GRIB with the gl-software ($SCRATCH/hm_home/jb_da/bin/gl) by typing ./gl -p MXMIN1999+0000 and ./gl -p ICMSHANAL+0000. Then plot the difference between files file with your favorite software. Plot horizontal and vertical cross-sections of temperature and other variables using your favourite software (epygram for example).\nNow you have managed to insert the newly generated background error statistics to the assimilation system and managed to carry out a full scale data assimilation system and plot the analysis increments. The next natural step to further diagnose the background error statistics is to carry out a single observation impact experiment, utilizing your newly generated background error statistics. Note the variables REDNMC and REDZONE in include.ass. REDNMC is the scaling factor for the background error statistics (default value 0.6/0.9) for METCOOP25D/NEW_DOMAIN). REDZONE described how far from the lateral boundaries (in km) the observations need to be located to be assimilated (default value 150/100) for METCOOP25D/NEW_DOMAIN.","category":"page"},{"location":"DataAssimilation/StructureFunctions/#In-line-Interpolation-and-Extrapolation-of-Jb-statistics","page":"Structure functions","title":"In-line Interpolation and Extrapolation of Jb-statistics","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"In case you do not have existing background error statistics derived for your domain there is a built technical possibility to use Jb-files from another domain derived with the same number of vertical levels. From this host Jb-files background error statistics are then interpolated or extrapolated to the current domain configuration. The assumption is then (which is in general questionable) that the statistics derived derived on the host domain is as well valid for the current domain. If the longest side of the host domain is shorter than the longest side of the current domain an extrapolation of background error covariance spectra is needed. Such extrapolation should be avoided over a wide range of wavenumbers. Therefore it is recommended that the longest side of the host Jb-file is as long or longer than the longest side of the current domain.The interpolation is invoked by in ecf/config_exp.h set JB_INTERPOL=yeś and JB_REF_DOMAIN=$HOST_JB, where $HOST_JB is for example METCOOP25B. These settings will activate runnning of script jbconv.sh (in case no Jb files present for current domain), called from Fetch_assim_data. ","category":"page"},{"location":"DataAssimilation/StructureFunctions/#On-going-work-and-future-developments","page":"Structure functions","title":"On-going work & future developments","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"Recent and on-going work as well as plans for future developments:","category":"page"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"Ongoing-work regarding structure functions concerns investigations of effects on B statistics and data assimilation of the upper-level relaxation towards ecmwf at upper boundary condition through LUNBC=.true. Longer term research is towards flow dependent background error statistics and close link between the data assimilation and the ensemble forecasting system. Plans for future work also include adopting towards use of cy46 Festat.standalone, reading FA-files rather than femars-files. Here is a newly developed stand-alone tool for interpolation in of Jb-statistics as well between different vertical levels (not recommended) not yet publicly available and documented. Finally it should be mentioned that there are alternative methods to EDA for carrying out STEP 2 of teh background error statistics derivation. Such alternatives are BRAND and BREND and these have been tested and compared with EDA in various contexts, such as in reanalysis frameworks. The conclusion is that there are both pros and cons with BRAND as compared with EDA. The main conclusion is that both EDAand BRAND are hampered by the homogeneity and isotrophy assumptions in 3DVAR/4DVAR framework, so that differences are smaller than in hybrid DA frameworks. Therefore continued EDA/BRAND comparisons are carried out withing hybrid ensemble/da frameworks. Nevertherless we aim here to include as well instructions for optionally replacing STEP 2 EDA in procedure above with STEP 2 BRAND. As well we aim for introducing instructions for using extended complementary diagnosis tools for Jb statistics using fediacov tool and associated plotting scripts. Such tools do exist, but not yet publicly available and documented ","category":"page"},{"location":"DataAssimilation/StructureFunctions/#References","page":"Structure functions","title":"References","text":"","category":"section"},{"location":"DataAssimilation/StructureFunctions/","page":"Structure functions","title":"Structure functions","text":"festat_guidelines, Ryad El Katib, Meteo France, 2014\nfestatforfa_guidelines, Ryad El Katib, Meteo France, 2016","category":"page"},{"location":"PostProcessing/gl/#gl","page":"GL","title":"Post processing with gl","text":"","category":"section"},{"location":"PostProcessing/gl/#Introduction","page":"GL","title":"Introduction","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl ( as in griblist ) is a multi purpose tool for file manipulation and conversion. It uses ECMWF's ecCodes library, and can be compiled with and without support for HARMONIE FA/LFI or NETCDF files. The gl package also includes software for extraction for verification, fldextr, and field comparison, xtool.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" USAGE: gl file [-n namelist_file] [-o output_file] -[lfgmicp(nc)sdtq] [-lbc CONF]\n\n gl [-f] file, list the content of a file, -f for FA/lfi files \n -c : Convert a FA/lfi file to grib ( -f implicit ) \n -p : Convert a FA file to grib output without extension zone\n (-c and -f implicit ) \n -nc : Convert a FA/lfi file to NetCDF ( -f implicit ) \n -musc : Convert a MUSC FA file ASCII ( -c implicit ) \n -lbc ARG : Convert a CONF file to HARMONIE input \n where CONF is ifs or hir as in ECMWF/HIRLAM data \n climate_aladin assumed available \n -d : Together with -lbc it gives a (bogus) NH boundary file \n climate_aladin assumed available \n -s : Work as silent as possible \n -g : Prints ksec/cadre/lfi info \n -m : Prints min,mean,max of the fields \n -i : Prints the namelist options (useless) \n -tp : Prints the GRIB parameter usage \n -t : Prints the FA/lfi/GRIB table (useful) \n -wa : Prints the atmosphere FA/NETCDF/GRIB table in wiki fmt \n -ws : Prints the surfex FA/NETCDF/GRIB table in wiki fmt \n -q : Cross check the FA/lfi/GRIB table (try) \n -pl X : Give polster_projlat in degrees \n\n gl file -n namelist_file : interpolates file according to \n namelist_file \n gl -n namelist_file : creates an empty domain according to \n specifications in namelist_file \n -igd : Set lignore_duplicates=T \n -igs : Set lignore_shortname=T. Use indicatorOfParameter \n instead of shortName for selection \n","category":"page"},{"location":"PostProcessing/gl/#ecCodes-definition-tables","page":"GL","title":"ecCodes definition tables","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Since ecCodes has replaced grib_api as the ECMWF primary software package to handle GRIB, we will hereafter only refer to ecCodes but same similar settings applies for grib_api as well. With the change to ecCodes we heavily rely on the shortName key for identification. To get the correct connection between the shortnames and the GRIB1/GRIB2 identifiers we have defined specific tables for harmonie. These tables can be found in /util/gl/definitions. To use these tables you have to define the ECCODES_DEFINITION_PATH environment variable as ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"export ECCODES_DEFINITION_PATH=SOME_PATH/gl/definitions:PATH_TO_YOUR_ECCODES_INSTALLATION","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"If this is not set correctly the interpretation of the fields may be wrong.","category":"page"},{"location":"PostProcessing/gl/#GRIB/FA/LFI-file-listing","page":"GL","title":"GRIB/FA/LFI file listing","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Listing of GRIB/ASIMOF/FA/LFI files.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" gl [-l] [-f] [-m] [-g] FILE","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"where FILE is in GRIB/ASIMOF/FA/LFI format","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Option Description\n-l input format is LFI\n-f input format is FA\n -l and -f are equivalent\n-g print GRIB/FA/LFI header\n-m print min/mean/max values","category":"page"},{"location":"PostProcessing/gl/#GRIB/FA/LFI-file-conversion","page":"GL","title":"GRIB/FA/LFI file conversion","text":"","category":"section"},{"location":"PostProcessing/gl/#Output-to-GRIB1","page":"GL","title":"Output to GRIB1","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl [-c] [-p] FILE [ -o OUTPUT_FILE] [ -n NAMELIST_FILE]","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"where ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" \n-c converts the full field (including extension zone) from FA to GRIB1\n-p converts field excluding the extension zone (\"p\" as in physical domain) from FA to GRIB1","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The FA/LFI to GRIB mapping is done in a table defined by a util/gl/inc/trans_tab.h","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"To view the table:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl -t\ngl -tp","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"To check for duplicates in the table:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl -q","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The translation from FA/LFI to GRIB1 can be changed through a namelist like this one:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" &naminterp\n user_trans%full_name ='CLSTEMPERATURE',\n user_trans%t2v = 253,\n user_trans%pid = 123,\n user_trans%levtype = 'heigthAboveGround',\n user_trans%level = 002,\n user_trans%tri = 000,\n /","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"or for the case where the level number is included in the FA name","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" &naminterp\n user_trans%full_name='SNNNEZDIAG01',\n user_trans%cpar='S'\n user_trans%ctyp='EZDIAG01',\n ...\n /","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Conversion can be refined to convert a selection of fields. Below is and example that will write out ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"T (shortname='t',pid=011), u (shortname='u',pid=033) andv (shortname='v',pid=034) on all (level=-1) model levels (levtype='hybrid')\nT (shortname='t',pid=011) at 2m (lll=2) above the ground (levtype='heightAboveGround') [T2m]\nTotal precipitation (shortname='tp',pid=061,levtype='heightAboveGround',level=000)","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" &naminterp\n readkey%shortname= 't', 'u', 'v', 't', 'tp', 'fg',\n readkey%levtype='hybrid','hybrid','hybrid','heightAboveGround','heightAboveGround','heightAboveGround',\n readkey%level= -1, -1, -1, 2, 0, 10,\n readkey%tri = 0, 0, 0, 0, 4, 2,\n /","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"where ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"shortname is the ecCodes shortname of the parameter \nlevtype is the ecCodes level type\nlevel is the GRIB level\ntri means timeRangeIndicator and is set to distinguish between instantaneous, accumulated and min/max values.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The first three ones are well known to most users. The time range indicator is used in HARMONIE to distinguish between instantaneous and accumulated fields. Read more about the options here Note that for levtype hybrid setting level=-1 means all. ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"We can also pick variables using their FA/lfi name:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" &naminterp\n readkey%faname = 'SPECSURFGEOP','SNNNTEMPERATURE',\n /","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Where SNNNTEMPERATURE means that we picks all levels.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Fields can be excluded from the conversion by name","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" &naminterp\n exclkey%faname = 'SNNNTEMPERATURE'\n /","category":"page"},{"location":"PostProcessing/gl/#Output-to-GRIB2","page":"GL","title":"Output to GRIB2","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"To get GRIB2 files the format has to be set in the namelist as ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" &naminterp\n output_format = 'GRIB2'\n /","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The conversion from FA to GRIB2 is done in gl via the ecCodes tables. All translations are defined in util/gl/scr/harmonie_grib1_2_grib2.pm where we find all settings required to specify a parameter in GRIB1 and GRIB2.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"\n tmax => {\n editionNumber=> '2',\n comment=> 'Maximum temperature',\n discipline=> '0',\n indicatorOfParameter=> '15',\n paramId=> '253015',\n parameterCategory=> '0',\n parameterNumber=> '0',\n shortName=> 'tmax',\n table2Version=> '253',\n typeOfStatisticalProcessing=> '2',\n units=> 'K',\n },\n","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"To create ecCodes tables from this file run","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" cd gl/scr\n ./gen_tables.pl harmonie_grib1_2_grib2","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"and copy the grib1/grib2 directories to gl/definitions.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Note that there are no GRIB2 transations yet defined for the SURFEX fields!","category":"page"},{"location":"PostProcessing/gl/#Output-to-NetCDF","page":"GL","title":"Output to NetCDF","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl -nc [-p] FILE [ -o OUTPUT_FILE] [ -n NAMELIST_FILE]","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"where ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" \n-p converts field excluding the extension zone (\"p\" as in physical domain) from FA to NetCDF\n-o output file name\n-n namelist file to be used in conversion","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The FA/LFI to NetCDF mapping is done using tables defined by util/gl/inc/trans_tab.h and util/gl/inc/nc_tab.h","category":"page"},{"location":"PostProcessing/gl/#Namelist-options-for-NetCDF","page":"GL","title":"Namelist options for NetCDF","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The translation from FA/LFI to NetCDF can be changed through a namelist like this one:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" &naminterp\n user_nctrans%full_name ='SFX.SIC',\n user_nctrans%s_name = \"\" \n user_nctrans%l_name = \"Sea-Ice Area Percentage (Atmospheric Grid)\",\n user_nctrans%unit = \"%\" ,\n /","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The unit entry can be used to do a limited set of unit conversions, in the example above SIC will be converted from the original units (fraction) to a percentage.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Other specific naminterp options for converting to netcdf:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"variable description default\nlacc2flux T: Convert accumulated fields (tri=4) to fluxes by dividing by the length of the interval FF-F0. Names, units and tri are adapted. .FALSE.\nlmergelevs T: write all levels of a variable to the same file; F: each level in a separate file .FALSE.\nlclimate_fields T: don't add a time dimension and variable, useful for climate fields like land mask .FALSE.\nlvertices T: add vertices (corner points) to netcdf file, only possible for newly created files .FALSE.\nlhistory T: add history global attribute to netcdf file .FALSE.\nikindnc NetCDF version (3 4), 3: larger files, but faster; 4: compressed, but slow\nref_date Reference date, used to generate relative time axis 19500101\nref_hour Reference hour, used to generate relative time axis 0\nctimeis time refers to \"start\", \"middle\", or \"end\" of interval for non-instantaneous fields. If writing several variables to 1 file, that don't have the same timing (e.g. accumulated vs. instantaneous), then \"end\" is probably the only safe option! end\ncsep separator in derived netcdf file name _\ncdatefname format for date in derived netcdf file name, if not recognized as format, use whatever is passed YYYYMMDDHH\ncfiden used in derived netcdf file name to indicate origin (e.g. his, sfx, fp) \ncfreq used in derived netcdf file name and as \"frequency\" global attribute (e.g. 1hr, 3hr, day, mon) \nchm_rev HARMONIE version, used as \"model_id\" global attribute \ncdomain domain name, used in derived netcdf file name and as \"domain\" global attribute \ncexperiment experiment id, used in derived netcdf file name and as \"experiment_id\" global attribute \ncinstitute used as \"institute_id\" global attribute \nchostmod used as \"drivingmodelid\" global attribute ","category":"page"},{"location":"PostProcessing/gl/#Setting-fstart-for-min-max-fields","page":"GL","title":"Setting fstart for min-max fields","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Min and max fields, with tri=2 (time range indicator) are valid for a certain period. By default the period is 3h, but this can be changed via variables FREQ_RESET_TEMP and FREQ_RESET_GUST in ecf/config_exp.h, for example to 1 to store min/max temperature over an hour. By default gl doesn’t have info on this frequency and it is assumed they are valid since the start of the run. Use the namelist option fstart to assign the appropriate starting value, e.g.:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"fstart(15) = 3","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"for max t2m (parameter code 15). The fstart value is then used in the time_bnds. This value needs to be updated with FREQ_RESET. In Makegrib_gribex and convertFA there are examples of how to do this. Note that this works in the same way for NetCDF and GRIB.","category":"page"},{"location":"PostProcessing/gl/#Derived-file-name","page":"GL","title":"Derived file name","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"If no output file name is supplied (-o flag) an output file name is derived from available info:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"nc_fld_name[_levinfo][_cfiden][_cdomain][_cexperiment][_cfreq][_timeinfo].nc","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"with: | | | | –- | –- | | ncfldname | name of netcdf variable as defined in nctab.h | | levinfo | indication of level info, with *lev if all levels are written to the same file (lsplitlev), or a level number/height otherwise | | cfiden | identifier of input file (e.g. his, sfx), set via namelist | | cdomain | domain name, set via namelist | | cexperiment | experiment name, set via namelist | | cfreq | frequency, set via namelist | | timeinfo | indicator of file date/time, format controlled via cdatefname namelist variable, not used if lclimatefields |","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"If elements are not set, either via the namelist or by a default value, they are excluded from the name. The separator is a _ by default but can be changed via the csep namelist variable. ","category":"page"},{"location":"PostProcessing/gl/#Time-axis","page":"GL","title":"Time axis","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"A relative time axis (days since …) is created using the refdate and refhour as reference. In existing files the current time step is looked up, and if it already exists it is overwritten. If it doesn’t exist yet, it is appended to the end of the file. Note that if time steps are not converted in order the time axis will not be consecutive. For non-instantaneous fields a time bounds variable is added. The start of the interval is taken from fstart (from tri=2) or outkey%F0, or just from the input file (usually 0). The end of the interval is the current time step. Whether the time variables refers to the beginning, middle or end of the interval can be controlled with the ctimeis namelist variable. If instantaneous and non-instantaneous files are written to the same file, it is best to use ctimeis=end. Start may also work, but this should be tested first. With the namelist variable cdatefname you can write output from multiple cycles to the same file. For example by setting it to YYYYMM the derived file name will contain year and month info, but not day and hour, so all cycles from a month are written to the same file. Be careful with the first time step of a cycle when using cdatefname as gl will overwrite the last time step of the previous cycle with those of the first step of the new cycle. You can decide to skip the first time step, or multiple steps, if cycles overlap more than 1 step. ","category":"page"},{"location":"PostProcessing/gl/#Multilevel-fields-and-fields-on-heights-or-pressure-levels","page":"GL","title":"Multilevel fields and fields on heights or pressure levels","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"All levels of multilevel fields can be written to one file (lmergelevs=.TRUE.) or to separate files (default). This is possible for model levels, pressure levels and height levels.","category":"page"},{"location":"PostProcessing/gl/#All-levels-in-one-file","page":"GL","title":"All levels in one file","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"If all levels are written to the same file and additional dimension is needed: lev for model levels, height for height levels. For model level fields, named SNNN, the number of levels is derived from the input file (glistnlev). For height level fields, HNNNNN, currently the heights must be set via the hlevlist namelist variable. The heights in this list are used to expand the HNNNNN (to H00010, H00250 etc) and are also used as coordinate variable. For pressure levels PNNNNN is used in the same way.","category":"page"},{"location":"PostProcessing/gl/#Single-level-fields-on-height","page":"GL","title":"Single level fields on height","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"For single level fields on a specific height such as 2m temperature and 10m wind a height variable is added with that height. This is done if level type = 105 and level ≠ 0. This may not be appropriate in all cases. Note that for some fields level is abused (e.g. level 760 for the sea tile), which gives useless height. The same approach is used when outputting multilevel fields with lsplitlev=.TRUE. (default).","category":"page"},{"location":"PostProcessing/gl/#Don’t-mix-fields","page":"GL","title":"Don’t mix fields","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"There is a check that all fields on height levels in one file have the same height specification, because only 1 height variable can be specified at the moment. So t2m and w10m cannot be in the same file. Not sure if the check is foolproof. It may be possible to define multiple heights in the code, e.g. height, height2 etc., but this has not been implemented yet. ","category":"page"},{"location":"PostProcessing/gl/#NetCDF-3-or-NetCDF-4","page":"GL","title":"NetCDF 3 or NetCDF 4","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"With the ikindnc namelist options the netcdf format can be set. NetCDF 4 files are compressed in gl, however, this makes the conversion much slower. At the moment it seems better to let gl use the NetCDF 3 format and then convert them to NetCDF 4 after creation of the file has finished. This can be done with the following command:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"nccopy -k 4 -d 1 -s $nc3_file $nc4_file","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"where:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":" \n-d deflate level\n-s shuffling (can improve compression, speed and size)","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"In scr/convertFA this can be done by setting nc3to4=yes (default). At the end of the script the files are then converted from netcdf3 to netcdf4-classic with compression.","category":"page"},{"location":"PostProcessing/gl/#Direction-of-fluxes","page":"GL","title":"Direction of fluxes","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"A new element positive was added to the nctrans derived type in moduletypez, and in nctab.h positive, can be empty, 'd' or 'u'. If it is not empty a positive attribute is added to the variable in the NetCDF file. If it has value 'u', the values of the variable are multiplied by −1 to change the direction from towards the surface to away from the surface.","category":"page"},{"location":"PostProcessing/gl/#Fill-value","page":"GL","title":"Fill value","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"By default no missing value is added for atmospheric fields. For SURFEX fields either 999 (version ≤ 4) or 1+e20 (version ≥ 5) is used. It is possible to add a missing value via the namelist. To do so, in the namelist set variable lcheck_misval to .TRUE. and set rmisval to the correct value.","category":"page"},{"location":"PostProcessing/gl/#Adding-new-netcdf-variables","page":"GL","title":"Adding new netcdf variables","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"If you get messages like:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"No NETCDF conversion for ....","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"then you need to add the field to util/gl/inc/nc_tab.h, which contains the translation from FA to netcdf names. The file util/gl/inc/trans_tab.h contains the conversion to FA names to GRIB codes. If the field you would like to is absent there, it is probably best to add it in that file as well, as for example GRIB level types are used for functionality in the netcdf conversion as well. Remember to recompile.","category":"page"},{"location":"PostProcessing/gl/#postprocessing","page":"GL","title":"postprocessing","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl can be used to produce postprocessed parameters possibly not available directly from the model. ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Postprocessed parameters are defined in util/gl/grb/postprocess.f90 and util/gl/grb/postp_pressure_level.f90. Some more popular parameters are listed:\nPseudo satellite pictures\nTotal precipitation and snow\nWind (gust) speed and direction\nCloud base, cloud top, cloud mask and significant cloud top","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"For a comprehensive list please check the output information for each cycle. NOTE that all parameters may not be implemented in gl","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"To produce \"postprocessed\" MSLP and accumulated total precipitation and visibility use the following namelist, nam_FApp:\n&naminterp\n pppkey(1:3)%shortname='pres','tp','vis',\n pppkey(1:3)%levtype='heightAboveSea','heightAboveGround','heightAboveGround'\n pppkey(1:3)%level= 0, 0, 0,\n pppkey(1:3)%tri= 0, 4, 0,\n lwrite_pponly= .TRUE.,\n/\ngl -p ICMSHHARM+0003 -o output_pp.grib -n nam_FApp\nNote:\nSet lwrite_pponly as true to only write the postprocessed fields to file\nSet lwrite_pponly as false write all fields will be written to the file, input fields as well as the postprocessed fields.","category":"page"},{"location":"PostProcessing/gl/#Vertical-interpolation","page":"GL","title":"Vertical interpolation","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl can be used to carry out vertical interpolation of parameters. Four types are available","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"HeightAboveSea, give height above sea in meters\nHeightAboveGround, give height above ground in meters\nHeightAboveGroundHighPrecision, give height above ground in centimeters\nisobaricInHpa, give height above sea in hPa\nTo interpolation temperature to 1.40m (level 140 in cm) use the following namelist, nam_hl:\n&naminterp\n pppkey(1:1)%shortname='t',\n pppkey(1:1)%levtype='heightAboveGroundHighPrecision',\n pppkey(1:1)%level= 140,\n pppkey(1:1)%tri= 0,\n vint_z_order=1,\n lwrite_pponly= .TRUE.,\n/\ngl -p ICMSHHARM+0003 -o output_hl.grib -n nam_hl\nNote:\nVertical interpolation to z levels is controlled by VINTZORDER: 0 is nearest level, 1 is linear interpolation\nTo height interpolation (Levls 500, 850 and 925 in hPa, type=100) use the following namelist, nam_pl:\n&naminterp\n pppkey(1:3)%shortname='t','t','t',\n pppkey(1:3)%levtype='isobaricInhPa','isobaricInhPa','isobaricInhPa',\n pppkey(1:3)%level= 500, 850, 925,\n pppkey(1:3)%tri= 0, 0, 0,\n vint_z_order=1,\n lwrite_pponly= .TRUE.,\n/\ngl -p ICMSHHARM+0003 -o output_pl.grib -n nam_pl","category":"page"},{"location":"PostProcessing/gl/#Horizontal-interpolation","page":"GL","title":"Horizontal interpolation","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Interpolation/resampling between different geometries such as regular lat lon, Lambert conformal, Polar steregraphic, rotated lat lon and rotated Mercator is possible with gl\nThe interpolation methods available are:\nnearest grid-point (order=-2)\nmost representative grid-point (order=-1)\nnearest grid-point (order=0)\nbi-linear (order=1)\nbi-quadratic (order=2, mask not respected)\nbi-cubic (order=3, mask not respected)\nExample of (an Irish) rotated lat lon domain, nam_FArotll:\n&naminterp\n outgeo%nlon=50,\n outgeo%nlat=50,\n outgeo%nlev=-1,\n outgeo%gridtype='rotated_ll',\n outgeo%west=-2.5,\n outgeo%south=-2.5,\n outgeo%dlon=0.1,\n outgeo%dlat=0.1,\n outgeo%polon=-6.7,\n outgeo%polat=-36.2,\n order= 1,\n/","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"where DLON/DLAT are in degrees.The HIRLAM Domain Tool may be of use for viewing rotated lat lon domains.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl -p ICMSHHARM+0003 -n nam_FArotll -o output.grib","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Example of a lambert domain\n&naminterp\n outgeo%nlon = 50 ,\n outgeo%nlat = 50,\n outgeo%nlev = -1,\n outgeo%gridtype = 'lambert',\n outgeo%west = 15.0\n outgeo%south = 50.0\n outgeo%dlon = 10000.\n outgeo%dlat = 10000.\n outgeo%projlat = 60.\n outgeo%projlat2 = 60.\n outgeo%projlon = 15.\n/\nwhere DLON/DLAT are in meters.The HIRLAM Domain Tool may be of use for viewing rotated lat lon domains.\nExample polar stereographic projection\n&naminterp\n outgeo%nlon = 50 ,\n outgeo%nlat = 50,\n outgeo%nlev = -1,\n outgeo%gridtype = 'polar_stereographic',\n outgeo%west = 15.0\n outgeo%south = 50.0\n outgeo%dlon = 10000.\n outgeo%dlat = 10000.\n outgeo%projlat = 60.\n outgeo%projlon = 15.\n/\nwhere DLON/DLAT are in meters.Note: the GRIB1 standard assumes that the projection plane is at 60 degrees north whereas HARMONIE assumes it is at 90 degrees north.\nExample rotated Mercator\n&naminterp\n outgeo%nlon = 50 ,\n outgeo%nlat = 50,\n outgeo%nlev = -1,\n outgeo%projection = 11,\n outgeo%west = 15.0\n outgeo%south = 50.0\n outgeo%dlon = 10000.\n outgeo%dlat = 10000.\n outgeo%projlat = 60.\n outgeo%projlon = 15.\n/\nwhere DLON/DLAT are in metersNote: rotated Mercator is not supported in GRIB1.\nGeographical points is a special case of projection 0 use namelist file, nam_FAgp:\n&naminterp\n outgeo%nlon=3 ,\n outgeo%nlat=1,\n outgeo%nlev=-1,\n outgeo%gridtype='regular_ll',\n outgeo%arakawa= 'a',\n order = 0,\n readkey(1:3)%shortname='t','u','v',\n readkey(1:3)%levtype='heightAboveGround','heightAboveGround','heightAboveGround',\n readkey(1:3)%level= 2, 10, 10,\n readkey(1:3)%tri= 0, 0, 0,\n linterp_field = f,\n gplat = 57.375,57.35,57.60\n gplon = 13.55,13.55,14.63\n/\nThe result will be written to a ASCII file with the name gpYYYYMMDDHHLLL.\ngl -p ICMSHHARM+0003 -n nam_FAgp \ncat gp20140702_1200+003","category":"page"},{"location":"PostProcessing/gl/#Extract-(crop)-a-sub-domain","page":"GL","title":"Extract (crop) a sub-domain","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl can be used to \"cut out\" a sub-domain from an input file using the namelist namCUT:","category":"page"},{"location":"PostProcessing/gl/#Crop-using-lower-left-and-upper-right-coordinates","page":"GL","title":"Crop using lower left and upper right coordinates","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"&naminterp\nistart = 150\njstart = 150\nistop = 350\njstop = 350\n/","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Use this command:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"gl input.grib -n namCut -o cutout.grib","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Another way of specifying your sub domain is to define how many points to exclude in the end","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"&naminterp\nistart = 150\njstart = 150\nistop = -10\njstop = -10\n/","category":"page"},{"location":"PostProcessing/gl/#Crop-using-SW,NE-corner-and/or-number-of-points","page":"GL","title":"Crop using SW,NE corner and/or number of points","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Here, you specify any of the SouthWest, NorthEast corners and/or the number of gridpoints","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"&naminterp\noutgeo%gridtype = 'crop',\noutgeo%nlat = 200,\noutgeo%nlon = 300,\noutgeo%south = 50.155,\noutgeo%west = -12.88,\n/","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Or ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"&naminterp\noutgeo%gridtype = 'crop',\noutgeo%north = 58.277,\noutgeo%east = 12.3,\noutgeo%south = 50.155,\noutgeo%west = -12.88,\n/","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"If you specify outgeo%gridtype as 'crop', the SouthWest corner will be translated to lower left grid coordinates and Nlat,Nlon will translate to upper right coordinates. You may specify any of SW, NE, nlat/nlon. Priority is given to SW, NE. The behaviour is as follows:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"SW and NE have priority, these will anchor either corner. If a corner is not specified, Nlat/Nlon will extend from the other corner.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"If only one coordinate is specified, the other corner becomes the corner of the input domain. So: ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Specify only SW and you get a crop from there to the NE corner of the input domain.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Specify only NE and you get a crop from the SW corner of the input domain.\nSpecify SW and NE and you get a crop between these corners","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Specify SW and Nlat/Nlon and you get Nlat x Nlon from SW corner.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Specify NE and Nlat/Nlon and you get Nlat x Nlon south and west of NE corner.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Specify SW, NE and Nlat/Nlon and you get a crop between SW/NE corners. Nlat/Nlon are ignored.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Specify only Nlat/Nlon and you get Nlat x Nlon from SW corner of the input domain.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The crop must be within the original domain unless you set","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"ldemand_inside = .FALSE.","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"in the namelist. In this case, the crop will be adjusted to lie within the original domain and the output will be smaller than Nlat x Nlon. In the case where the requested crop lies entirely outside the original domain, the program will abort.","category":"page"},{"location":"PostProcessing/gl/#Rotating-wind-components","page":"GL","title":"Rotating wind components","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"HARMONIE produces u and v wind components relative to the model grid. gl by default always outputs u and v relative to the output grid. So if no regridding is done and the output is still on the LCC grid, u and v will also still be relative to the LCC grid. But if the output is regridded to a regular lat-lon grid, then u and v will be rotated and will be relative to the regular lat-lon grid. Wind (from) direction (parameter 31), however, is always relative to a regular lat-lon grid. To rotate u and v to regular lat-lon, while retaining the data on the LCC grid set uvrelativetogrid=0 in the namelist. All u and v vectors that will be processed will be rotated to geographical E and N directions. ","category":"page"},{"location":"PostProcessing/gl/#Output-to-several-files","page":"GL","title":"Output to several files","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"It is possible to let gl read data once and do processing loops with these data. Let us look at an example namelist","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"&naminterp\n OUTPUT_FORMAT='MEMORY'\n/\n&naminterp\n INPUT_FORMAT='MEMORY'\n OUTPUT_FORMAT='GRIB'\n OUTFILE='test1.grib'\n/\n&naminterp\n INPUT_FORMAT='MEMORY'\n OUTPUT_FORMAT='GRIB'\n OUTFILE='test2.grib'\n READKEY%FANAME='SNNNTEMPERATURE'\n/\n&naminterp\n INPUT_FORMAT='MEMORY'\n OUTPUT_FORMAT='GRIB'\n READKEY%FANAME='CLSTEMPERATURE'\n outgeo%nlon = 50 ,\n outgeo%nlat = 50,\n outgeo%nlev = -1,\n outgeo%gridtype = 'polar_stereographic',\n outgeo%west = 15.0\n outgeo%south = 50.0\n outgeo%dlon = 10000.\n outgeo%dlat = 10000.\n outgeo%projlat = 60.\n outgeo%projlon = 15.\n OUTFILE='test3.grib'\n/","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"In the first loop we read data and store it in memory. In the second look we read the data from memory and output to the file test1.grib. Then we make two more loops where we in the first one only output a subset and in the last one also do an interpolation to a new grid. The data in memory is however still untouched.","category":"page"},{"location":"PostProcessing/gl/#Input-from-several-files","page":"GL","title":"Input from several files","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"It's also possible to read several files and write them into one. This is used to gather the various FA fields written from the IO-server. A typical namelist would look like","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"&naminterp\n maxfl=28,\n output_format='MEMORY',\n output_type = 'APPEND',\n input_format='FA',\n infile='forecast/io_serv.000001.d/ICMSHHARM+0003.gridall',\n/\n&naminterp\n output_format='MEMORY',\n output_type = 'APPEND',\n input_format='FA',\n infile='forecast/io_serv.000002.d/ICMSHHARM+0003.gridall',\n/\n...\n&naminterp\n input_format = 'MEMORY',\n output_format= 'GRIB'\n output_type = 'NEW',\n outfile = 'test.grib'\n/","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Where maxfl tells how many files that will be read.","category":"page"},{"location":"PostProcessing/gl/#domain_prop","page":"GL","title":"domain_prop","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop is used do extract various properties from a file. ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Climate: $MPPGL $BINDIR/domain_prop -DOMAIN_CHECK $LCLIMDIR/m$M1 -f || \\","category":"page"},{"location":"PostProcessing/gl/#Check-an-existing-domain-with-a-namelist-specification","page":"GL","title":"Check an existing domain with a namelist specification","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -DOMAIN_CHECK -f CLIMATE_FILE","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"The geometry is read from fort.10 and the program aborts if the new and old geometries differs. See scr/Climate for an example.","category":"page"},{"location":"PostProcessing/gl/#Check-if-Q-is-in-gridpoint-or-spectral-representation","page":"GL","title":"Check if Q is in gridpoint or spectral representation","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -f -QCHECK FAFILE","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"returns 1 if Q is spectral and 0 if Q is in gridpoint space.","category":"page"},{"location":"PostProcessing/gl/#Check-if-a-specific-field-is-present","page":"GL","title":"Check if a specific field is present","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -f -CHECK_FIELD S001CLOUD_FRACTI","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"returns 1 if S001CLOUD_FRACTI is found, 0 otherwise","category":"page"},{"location":"PostProcessing/gl/#Check-the-number-of-levels-in-a-file","page":"GL","title":"Check the number of levels in a file","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -f -NLEV FAFILE ","category":"page"},{"location":"PostProcessing/gl/#Check-the-geographical-extension-of-the-domain","page":"GL","title":"Check the geographical extension of the domain","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -f -MAX_EXT FAFILE ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"This is used in several places to determine the domain to be extracted from MARS or limit the observations sample. Another way is to provide the projection parameters of your domain as input","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -MAX_EXTR \\\n-NLON $NLON -NLAT $NLAT \\\n-LATC $LATC -LONC $LONC \\\n-LAT0 $LAT0 -LON0 $LON0 \\\n-GSIZE $GSIZE","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"To get the geographical position of the lower left corner use","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -f -LOW_LEFT FAFILE ","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"To print out the important projection parameters in a file use:","category":"page"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -f -4JB FAFILE","category":"page"},{"location":"PostProcessing/gl/#Get-time-information-from-a-file","page":"GL","title":"Get time information from a file","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"domain_prop -f -DATE FAFILE","category":"page"},{"location":"PostProcessing/gl/#fldextr-and-obsextr","page":"GL","title":"fldextr and obsextr","text":"","category":"section"},{"location":"PostProcessing/gl/","page":"GL","title":"GL","text":"Read about the verification extraction programs here","category":"page"},{"location":"System/MFaccess/#Using-Météo-France-Servers","page":"MF Access","title":"Using Météo-France Servers","text":"","category":"section"},{"location":"System/MFaccess/#Introduction","page":"MF Access","title":"Introduction","text":"","category":"section"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"The procedure to get access to MF servers and their read-only git repository is outlined here","category":"page"},{"location":"System/MFaccess/#First-steps","page":"MF Access","title":"First steps","text":"","category":"section"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"Discuss your requirements for access to MF servers with the HIRLAM System project leader, Daniel Santos (dsantosm@aemet.es).\nDownload two forms \"Undertaking for the use of Météo-France computer resources\" and \"Demande d'authorisation de conexion au résau de Météo Franc\" from http://www.cnrm.meteo.fr/aladin/spip.php?article157. \nThe \"Undertaking for the use of Météo-France computer resources\" form is to be signed by you only\nThe \"Demande d'authorisation de conexion au résau de Météo France\" must be signed by you and your department head. It must also include an institute stamp. You should enter details in Contacts, Compte d'accesés aux machines du Centre de Cacul and at the bottom with authorization from you institute manager with institute stamp. - A scan of both forms with a brief introductory note should be sent to Eric Escaliere (eric.escaliere@meteo.fr) and cc'ed to Daniel Santos (dsantosm@aemet.es) and Claude Fischer (claude.fischer@meteo.fr).\nBe careful with the \"Machine du client\". I had to specify the name and IP address of my institute's Firewall server as this is what the outside world sees when I access external servers from my PC.\nMétéo-France will send (by post) your username (Identificateur) and password (Mot de passe) for log in.\nThe authentication process itself remains in two steps (first “parme”, then target), as before. \nA few specific examples follow (see MF's instructions for full details):\nbeaufix:","category":"page"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"ewhelan@realin23:gcc-8.3.1:.../~> which beaufix\nalias beaufix='telnet beaufix.meteo.fr'\n\t/usr/bin/telnet\newhelan@realin23:gcc-8.3.1:.../~> beaufix \nTrying 137.129.240.110...\nConnected to beaufix.meteo.fr.\nEscape character is '^]'.\nCheck Point FireWall-1 authenticated Telnet server running on mascarpone\nUser: whelane\npassword: your_parme_password\nUser whelane authenticated by FireWall-1 authentication\n\nConnected to 137.129.240.110\nRed Hat Enterprise Linux Server release 6.9 (Santiago)\nKernel 2.6.32-696.6.3.el6.x86_64 on an x86_64\nbeaufixlogin0 login: whelane\nPassword: your_ldap_password\nLast login: Tue Oct 13 10:15:53 from gw2.met.ie\n _ __ _ \n| | / _|(_) \n| |__ ___ __ _ _ _ | |_ _ __ __\n| '_ \\ / _ \\ / _` || | | || _|| |\\ \\/ /\n| |_) || __/| (_| || |_| || | | | > < \n|_.__/ \\___| \\__,_| \\__,_||_| |_|/_/\\_\\ \n\n[whelane@beaufixlogin0 ~]$ ","category":"page"},{"location":"System/MFaccess/#What-next?-**TO-BE-CONFIRMED**","page":"MF Access","title":"What next? TO BE CONFIRMED","text":"","category":"section"},{"location":"System/MFaccess/#Access-to-MF-servers-via-parme","page":"MF Access","title":"Access to MF servers via parme","text":"","category":"section"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"Once you are happy that you can access PARME from your PC you should once again contact Eric Escaliere (eric.escaliere@meteo.fr) and request login details for merou (Eric will send you a temporary password) and LDAP login details to front-id to enable access to COUGAR, YUKI, BEAUFIX and ID-FRONT\nAn automatic e-mail will be sent from expl-identites@meteo.fr with you LDAP repository password.\nfront-id requires certain criteria for your password. These are detailed in French below. When you have received LDAP login details for front-id:\newhelan@eddy:~> telnet parme.meteo.fr\nTrying 137.129.20.1...\nConnected to parme.meteo.fr.\nEscape character is '^]'.\nCheck Point FireWall-1 authenticated Telnet server running on parmesan\nUser: whelane\npassword: ********\nUser whelane authenticated by FireWall-1 authentication\nHost: front-id\n\nConnected to id-front\nRed Hat Enterprise Linux AS release 4 (Nahant Update 5)\nKernel 2.6.9-55.ELsmp on an x86_64\nlogin: whelane\nPassword: \nLast login: Mon Nov 4 05:14:22 from gw2.met.ie\nBienvenue EOIN WHELAN\nVous pouvez changer votre mot de passe\n-------------------------------------------------------------------------\n- Controle de validite sur les mots de passe avant de poster la demande -\n- Le OLD doit etre fourni. -\n- Au moins 8 car, au plus 20 car. -\n- Au moins 2 car. alpha et 2 car. non-alpha. -\n- Ne pas ressembler a UID NAME et OLD sur une syllabe de + de 2 car. -\n-------------------------------------------------------------------------\n-------------------------------------------------------------------------\nHello EOIN WHELAN\nYou may change your password\n-------------------------------------------------------------------------\n- Validity control before demand acceptation -\n- You must enter the old password first -\n- The new password must contain: -\n- At least 8 characters, 20 characters maximum -\n- At least 2 alphanumeric characters and 2 non-alphanumeric characters -\n- The passwd must contain a part of UID NAME -\n-------------------------------------------------------------------------\nChanging password for user 'whelane(56064)'.\nEnter login(LDAP) password: \nNew password: \nRe-enter new password: \nVotre mot de passe a ete change\nWhen you have received login details for merou from Eric:\newhelan@eddy:~> telnet parme.meteo.fr\nTrying 137.129.20.1...\nConnected to parme.meteo.fr.\nEscape character is '^]'.\nCheck Point FireWall-1 authenticated Telnet server running on parmesan\nUser: whelane\npassword: ********\nUser whelane authenticated by FireWall-1 authentication\nHost: merou\n\nConnected to merou\nRed Hat Enterprise Linux Server release 5.6 (Tikanga)\nKernel 2.6.18-238.el5 on an x86_64\nlogin: whelane\nPassword: \nLast login: Tue Nov 5 10:06:35 from gw2.met.ie\n[whelane@merou ~]$ passwd\nChanging password for user whelane.\nChanging password for whelane\n(current) UNIX password: \nNew UNIX password: \nRetype new UNIX password: \npasswd: all authentication tokens updated successfully.\n[whelane@merou ~]$ ","category":"page"},{"location":"System/MFaccess/#Access-to-(read-only)-MF-git-arpifs-git-repository","page":"MF Access","title":"Access to (read-only) MF git arpifs git repository","text":"","category":"section"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"MF use ssh keys to allow access to their read-only git repository. If approved by the HIRLAM System PL you should request access to the repository by sending a request e-mail to Eric Escaliere and cc'ed to Daniel Santos and Claude Fischer your ssh public key attached.","category":"page"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"Once you have been given access you can create a local clone by issuing the following commands:","category":"page"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"cd $HOME\nmkdir arpifs_releases\ncd arpifs_releases\ngit clone ssh://reader054@git.cnrm-game-meteo.fr/git/arpifs.git","category":"page"},{"location":"System/MFaccess/","page":"MF Access","title":"MF Access","text":"Happy gitting!","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Data-assimilation-algorithms","page":"Algorithms","title":"Data assimilation algorithms","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/#3D-Var","page":"Algorithms","title":"3D-Var","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"The default upper-air data assimilation algorithm is three-dimensional variational assimilation (3D-Var). To use 3D-Var no changes to ecf/config_exp.h should be required assuming structure function data files are available for your domain. Structure function input is defined in scr/include.ass.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Settings","page":"Algorithms","title":"Settings","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"List of 3D-Var settings that the user should be concerned about.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#4D-Var","page":"Algorithms","title":"4D-Var","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"With 43h2.2 four-dimensional variational assimilation (4D-Var) is available as a non-default option. In order to setup an experiment to use 4D-Var one should issue the following commands:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"mkdir -p /path/to/home/hm_home/MY_EXP\ncd /path/to/home/hm_home/MY_EXP\n/path/to/Harmonie/config-sh/Harmonie setup -r /path/to/Harmonie -c AROME_4DVAR","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"To use 4D-Var no changes to ecf/config_exp.h should be required assuming suitable structure function data files are available for your domain (see also ILRES in the settings section). Structure function input is defined in scr/include.ass.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Settings-2","page":"Algorithms","title":"Settings","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"List of 4D-Var settings that the user should be concerned about.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"With the following settings the working of the 4D-Var can be changed. Defaults values are given","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"NOUTERLOOP=2 # number of 4DVAR outer loops\nILRES=6,3 # Resolution (in parts of full) of outer loops as compared to the forecast resolution. The domain NLATxNLON should have the property that for the settings of ILRES both NLON/ILRES and NLAT/ILRES are of the form 5^c^ 3^d^ 2^e^, where c, d and e are integers >= 0 and e>=1.\nTSTEP4D=300,150 # Timestep length (seconds) of outer loops TL+AD\nTSTEPTRAJ=300,300 # How often the model state is saved for linearization\nNITER4D=10,15 # Maximum number of inner loop iterations in 4D-Var outer loops\nNSIMU4D=15,20 # Maximum number of cost function simulations in 4D-Var outer loops\nCH_RES_SPEC=yes # yes => change of resolution of the increment spectrally; no => by FULLPOS\nFORCE1=no # yes => tendency increment; no => analysis increment in loop 1\nFORCE2=no # yes => tendency increment; no => analysis increment in loop 2","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Schematic-work-flow-of-4D-Var","page":"Algorithms","title":"Schematic work flow of 4D-Var","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"(Image: )","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"After the screening 4DVscreen for the default 3h observation window (2h before and 1h after the start of the main forecast), \n4DVprolog prepares the initial conditions at the appropriate resolution (ILRES settings) for the forecasts used in minimization. The input here is the background BGHR (ICMSHHARM+0001 fields of the former cycle supplemented with some surface fields). \nSubsequently 4DVminim produces initially the low resolution at the beginning of the observation window (an_lowres_$loop), which is then (CH_RES_SPEC=yes in config_exp.h) transformed to the field at forecast resolution (an_hr_begwin). \nThis field is complemented in Blendhr with necessary surface fields and the resulting field (anb_hr_begwin) acts as the initial condition for the trajectory run 4DVtraj. \nFrom the 2h forecast of 4DVtraj (ICMSHTRAJ+0002 fields) the main forecast is started.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Cloudingest-Cloud-Initialization","page":"Algorithms","title":"Cloudingest - Cloud Initialization","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"The cloud initialization modifies temperature and humidity fields with help of MSGcloudYYYYMMDDHH.grib file, which contains 2-d fields of cloudtop-temperature [K], cloudmask [0-1] and cloudbase [m]. Pre cy46h the Cloudingest happened within MASTERODB in src/arpifs specifically in src/arpifs/phys_dmn/msginit.F90 routine. Since cy46h the Cloudingest uses pysurfex and gl to do the job. ","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Settings-3","page":"Algorithms","title":"Settings","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"Enable Cloudingest in ecf/config_exp.h:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"USE_MSG=yes # Use MSG data for adjustment of inital profiles, EXPERIMENTAL! (no|yes), expects MSGcloudYYYYMMDDHH.grib in $OBDIR","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"And in src/include.ass:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"export CLOUD_BASE=1 # 1 and USE_MSG=true (ecf/config_exp.h) => msginit","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"to replace the cloudbase field in MSGcloudYYYYMMDDHH.grib with an OI interpolated field of SYNOP observations of CloudBaseHeights and postprocessed field of cloudbases of the first-guess file.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Ecflow-and-schematic-work-flow-of-Cloudingest","page":"Algorithms","title":"Ecflow and schematic work flow of Cloudingest","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"In case of 3DVar assimilation, the ecflow AnUA family should look like this:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"(Image: )","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"And a schematic work-flow of tasks and files involved:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"(Image: )","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"\nusing Graphviz_jll\nrun(`$(dot()) ../assets/cloudingest.dot -Tsvg -o ../assets/cloudingest.svg`)","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Incremental-Analysis-Updates-IAU","page":"Algorithms","title":"Incremental Analysis Updates - IAU","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"This is a modified and experimental version of the default implemented Forward-Facing Incremental Analysis Update, that is described elsewhere (and mainly controlled via namelist settings). This algoritm is using the same code but is changed in a few fundamental ways on the logistical plane. Instead of introducing the innovations gradually in the forecast from the analysis point (as in default 3Dvar) the analysis is done at \"t=0\" and then the forecast is started at an earlier point so that the center of the introductions of the innovations are located at the above mentioned analysis point.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"This is done through shifting the forecast start to 1h before \"t=0\" and activating a namelist change in the forecast namelist:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"NAMIAU=>{\n 'LIAU' => '.TRUE.,',\n 'TSTARTIAU' => '1800.0,',\n 'TSTOPIAU' => '5400.0,',\n 'ALPHAIAU' => '1.0,',","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"Hence the forecast is running freely for 30min and then starts adding the increments during 1h centered around the \"t=0\" point, during 1h. If these times are subject of change (for example if a larger window is desired, mind that you also need to have change what files are linked to various process such as the forecast so that the correct start files are linked to that process.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Settings-4","page":"Algorithms","title":"Settings","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"If you want to try this method, or want to test it in order to develop it further there is a few thing to keep in mind. Before doing anything else you have to modify the write up times in ecf/config_exp.h so that the model have access to the startfiles that it needs inorder to start.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"From the DEFAULT:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":" LL_LIST=\"12,3\" # Forecast lengths for the cycles [h], replaces LL, LLMAIN\n HWRITUPTIMES=\"00-21:3,24-60:6\" # History file output times\n FULLFAFTIMES=$HWRITUPTIMES # History FA file IO server gather times\n PWRITUPTIMES=\"00-60:3\" # Postprocessing times\n PFFULLWFTIMES=$PWRITUPTIMES # Postprocessing FA file IO server gathering times\n VERITIMES=\"00-60:1\" # Verification output times, changes PWRITUPTIMES/SFXSELTIMES\n SFXSELTIMES=$PWRITUPTIMES # Surfex select file output times","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"to:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":" LL_LIST=\"12,6\" # Forecast lengths for the cycles [h], replaces LL, LLMAIN\n HWRITUPTIMES=\"00-06:1,06-21:3,24-60:6\" # History file output times\n FULLFAFTIMES=$HWRITUPTIMES # History FA file IO server gather times\n PWRITUPTIMES=\"00-06:1,06-60:3\" # Postprocessing times\n PFFULLWFTIMES=$PWRITUPTIMES # Postprocessing FA file IO server gathering times\n VERITIMES=\"00-60:1\" # Verification output times, changes PWRITUPTIMES/SFXSELTIMES\n SFXSELTIMES=$PWRITUPTIMES # Surfex select file output times\n # Only meaningful if SURFEX_LSELECT=yes\n SFXSWFTIMES=$SFXSELTIMES # SURFEX select FA file IO server gathering times\n SWRITUPTIMES=\"00-06:1\" # Surfex model state output times\n SFXWFTIMES=$SWRITUPTIMES # SURFEX history FA file IO server gathering times","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"Then run one cycle for at least 6h so that all the prerequesite files are generated before turning on IAUVAR.\nSecond thing, when the previous run has completed, is to set the IAUVAR ecf/config_exp.h to yes and keep running. It should be automatic to continue as usual. Make sure that you do not manually set LL to be shorter then 6h.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"The surface assimilation is moved to the start of the forecast and hence it is only the upper air assimilation that is involved in the IAU.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"note: Note\nThere is a difference between the first run with IAUVAR and all the following, due to the first run couples to a run done without IAU and the others couple to a run that has done IAU, so the files used as startfiles are different in valid times. To this effect the first run saves a semaphore file in the $SCRATCH/hm_home/exp_name/ directory for the following runs to react to! So if you need to rerun the first run, for some reason, that semaphore file (named is_iauvar) needs to be manually removed!!","category":"page"},{"location":"DataAssimilation/DaAlgorithms/#Flow-diagram-of-IAU-(Magnus-will-help)","page":"Algorithms","title":"Flow diagram of IAU (Magnus will help)","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/#Variational-Constraints-(VC)","page":"Algorithms","title":"Variational Constraints (VC)","text":"","category":"section"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"Balances among analysed meteorological variables in the data assimilation process can be effectively introduced by means of the well-known method of variational constraints (VC). In the case of the ALADIN-NH dynamics, its semi-implicit linear system for the non-hydrostatic fully compressible Euler equations (SI) appears as a convenient way of giving a precise definition to these constraints. An interesting feature of this method is the integration in the analysis of the vertical velocity field, which clearly must be important in convection permitting NWP. In this on-line doc resource only practical aspects on how to use this option will be mentioned, comprehensive information on this method can be found , for instance, in \"Variational Constraints for Data Assimilation in ALADIN-NH Dynamics\" Available from: https://www.researchgate.net/publication/326479446VariationalConstraintsforDataAssimilationinALADIN-NHDynamics.","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"The VC algorithm is activated by setting TRUE the LVARCONS switch in ecf/config_exp.h","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"# **** Assimilation ****\nLOOPS=\"no\"\nANAATMO=3DVAR # Atmospheric analysis (3DVAR|4DVAR|blending|none)\nLVARCONS='.TRUE.' # apply VC to analysis increments for a better balanced initial state\nIAUVAR=no\n","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"Obviously this will have any effect on the analysis only when 3DVAR is active and other initialization schemes (e.g. DFI) are off. The VC algorithm is applied after the 3DVAR minimization to the analysis increments. When any of the other options for ANAATMO are active, VC will not be applied. Application of DFI or other filters on the analysis will be redundant as VC is supposed to remove much of the noise in principle present in the t=0 dynamical fields: horizontal and vertical divergence, temperature, pressure departure and surface pressure. Note that vorticity is not part of the SI system, and therefore is not processed by VC. ","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"The whole VC run configuration is controlled by means of a single the namelist NEMVARCONS to be found in nam/harmonie_namelists.pm . The default values are:","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"\n%minimization=(\n...\n\nNEMVARCONS=>{\n'VCVERBOSELEV' => '0,',\n'LVCTESTID' => '.FALSE.,',\n'lVCPLOTGF' => '.FALSE.,',\n'LVCFHD' => '.TRUE.,',\n'LVCFT' => '.TRUE.,',\n'LVCFPS' => '.TRUE.,',\n'LVCFGW' => '.FALSE.,',\n'VCWEIGHT' => '10.0,',\n'VCWEIGHTHD' => '-1.50,',\n'VCWEIGHTT' => '-1.50,',\n'VCWEIGHTPS' => '-1.50,',\n'VCWEIGHTGW' => '-1.50,',\n'LVCFGWSCALE' => '.TRUE.,',\n'VCLOGPS' => '0,',\n'VCTSTEP' => '60.0,',\n'VCPSREF' => '90000.0,',\n'VCTREF' => '350.0,',\n'VCTREFA' => '70.0,',\n},\n)\n","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"Modification of these values should be done in nam/harmonie_namelists.pm itself. To use effectively VC, the user has to be aware of only a subset of these parameters :","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"'VCVERBOSELEV' => '0,', # for more detailed printouts regarding VC , raise this value ( up to 4 )\n\n'VCWEIGHT' => '10.0,', # Critical parameter of the algorithm. Gives the relative weight between increments to be\n # filtered and dynamical constraints. Values should be between 0.5 (zero not allowed !) and 10.\n # The higher the value, the less constrained is the analysis. This default value of 10\n # guarantees that final analysis increments are very close to 3DVAR increments.\n # In the many different tests done during validation and verification experiemnts,\n # a value of about 1.0 has given best results.\n\n'VCTSTEP' => '60.0,', # These parameters correspond to the SI parameters and to the time step. Although the \n'VCPSREF' => '90000.0,', # SI in forecast mode is different from the SI used by VC ( in particular the numerical \n'VCTREF' => '350.0,', # method is different ) these values should agree with those used in forecast mode for \n'VCTREFA' => '70.0,', # optimal performance\n},\n","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"Maybe the only other information that is required to effectively use this VC algorithm, concerns the parameters :","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"'VCWEIGHTHD' => '-1.50,',\n'VCWEIGHTT' => '-1.50,',\n'VCWEIGHTPS' => '-1.50,',","category":"page"},{"location":"DataAssimilation/DaAlgorithms/","page":"Algorithms","title":"Algorithms","text":"They enable the flexibility of considering different weights for different 3DVAR analysis increments. When VC operates with a low VCWEIGHT value (strongly constrained mode), it can remove some overfitting to wind, temperature and/or surface pressure observations, and this may produce an apparent degradation in operational verification curves close to t=0. These three parameters permit adjust individually each variable. Note that negative values ( as in the default ) automatically reverts to the VCWEIGHT value. ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#MUSC","page":"MUSC","title":"MUSC","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/#MUSC-using-the-develop-branch-(CY46)-in-the-git-repository","page":"MUSC","title":"MUSC using the develop branch (CY46) in the git repository","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"If you find any issues with any of the instructions or scripts, feel free to notify Emily Gleeson (emily.gleesonATmet.ie) and Eoin Whelan (eoin.whelanATmet.ie)","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Currently a \"reference\" test case, called musc_ref, works on ATOS, as well as the ARMCU cases (with and without SURFEX for both AROME and HARMONIE namelists) and the two microphysics-related cases (supercooled liquid) developed by Bjorg Jenny Engdahl in cycle 40. ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Some instructions on how to use MUSC are included below. See here for some information on HARMONIE-AROME experiments using MUSC but note that the scripts have changed somewhat since that paper was written.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Set-up-MUSC","page":"MUSC","title":"Set up MUSC","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Get the code:\nmkdir -p $SCRATCH/harmonie_releases/git/HCY46\ncd $SCRATCH/harmonie_releases/git/HCY46\ngit clone git@github.com:Hirlam/Harmonie.git\ncd Harmonie\ngit checkout dev-CY46h1 \n# If you already have a clone of the code but want to update it to the latest version, \nuse \"git pull\" rather than \"git branch\".\nSet up a MUSC experiment using HARMONIE scripting \nIn this example the ECMWF.atos config file is used. \nmkdir -p $HOME/hm_home/test_0001\ncd $HOME/hm_home/test_0001\n$SCRATCH/harmonie_releases/git/HCY46/Harmonie/config-sh/Harmonie setup -r $SCRATCH/harmonie_releases/git/HCY46/Harmonie/ -h ECMWF.atos \nCompile your experiment (still in $HOME/hm_home/test_0001)\n$SCRATCH/harmonie_releases/git/HCY46/Harmonie/config-sh/Harmonie install BUILD_WITH=cmake\n# Note that for the ARMCU cases cmake needs FFT modifications (not yet committed by Yurii)\nSome MUSC specific settings including copying over scripts and a check that Harmonie setup has been run\n$SCRATCH/harmonie_releases/git/HCY46/Harmonie/util/musc/scr/musc_setup.sh -r $SCRATCH/harmonie_releases/git/HCY46/Harmonie/\nGenerate your namelist, unless you're using an idealised case with pre-defined namelists (so for ARMCU* you do not generate the namelists for example). If you wish to change the radiation scheme (RADSCHEME- RAYFM (IFS) or RAY (ACRANEB2)) or how you use aerosols (BDAER - cams or none), you need to edit ecf/configexp.h in your expt before running muscnamelist.sh. For using NRT aerosols, they need to be included in your input files already e.g. the MUSCIN* files should come from a 3D NRT aerosol expt.\ncd $HOME/hm_home/test_0001\n./musc_namelist.sh -h\n./musc_namelist.sh -l -i \n -[N nudging - optional]\nGet a copy of the input files\ncd $SCRATCH\nretrieve the input files from https://github.com/Hirlam/HarmonieMuscData","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Note that if you need to do experiments with 2 patches etc, ensure you derive some MUSC input files yourself using 3D HARMONIE-AROME files run with 2 patches. MUSC*REFL65* input files have only 1 patch. Changing MUSC namelists won't enable 2 patch output from a MUSC run.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Run-MUSC","page":"MUSC","title":"Run MUSC","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Musc_ref","page":"MUSC","title":"Musc_ref","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"The reference test is a X-hr experiment (change CSTOP in musc_namelist.sh if you wish to change the run length) and produces Out *lfa files for each model time-step of the time period. ICM* files are produced at each hour.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Run your experiment\ncd $HOME/hm_home/test_0001\n./musc_run.sh -h\n./musc_run.sh -d $SCRATCH/muscCY46InputData/musc_ref -n REFL65 -i DEF [ -e ECOCLIMAP_PATH]\n# optional path for ECOCLIMAP data may be given. For musc_ref -i must be given as no \n# namelists are provided with this experiment and must be generated before musc_run.sh \n# is executed. For the idealised cases, if -i is not specified -i becomes the name of \n# the idealised case once the namelist files are copied to $HOME/hm_home/test_0001 e.g. \n# for armcu the namelist files become namelist_atm_armcu etc.\n","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#ARMCU","page":"MUSC","title":"ARMCU","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"** Note that these will not work until we can compile with FFTW.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"This is an idealized SCM test case, the \"Sixth GCSS WG-1 case (ARM—Atmospheric Radiation Measurement)\", focussing on the diurnal cycle of cumulus clouds over land (Brown et al, 2002, Lenderink et al, 2004. The input files and namelist settings have been taken from /src/validation/mitraille/namelist/L1ARO but the atmospheric namelist needed editing for use in our environment. Atmospheric and surface forcings are included in the MUSC input files and the namelists in the util/musc/test/armcu directory are set up specifically for this case and are hence not edited by musc_run.sh.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Run your experiment\ncd $HOME/hm_home/test_0001\n./musc_run.sh -h\n./musc_run.sh -d $HOME/muscCY46InputData/ARMCU_HAR -n ARMCU (There are now 4 ARMCU experiments to chose from e.g. ARMCU_EB and ARMCUs_EB are ones that use AROME namelists, ARMCU_Har and ARMCUs_Har use HARMONIE-AROME namelists. Currently, the results are a bit different.)","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#MUSC-Output","page":"MUSC","title":"MUSC Output","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/#DDH-Toolbox","page":"MUSC","title":"DDH Toolbox","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"The outputs from a MUSC run are small files in lfa format. DDH tools can be used to handle these files.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"To download the DDH toolbox, go to https://www.umr-cnrm.fr/gmapdoc/spip.php?article19 and download the tarball. Untar it and within the tools folder run ./install. Now the various \"tools\" are compiled. For example lfaminm $file shows you the max, min and mean of all the output variables in a file. lfac $file $var shows the value(s) of $var in $file e.g. lfac Out.000.0000.lfa PTS shows you surface temperature. In order to be able to use the plotting scripts below, you'll need the lfac tool in your path. ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"For example on ATOS, I set the following paths (may differ a bit for you depending on where you downloaded the ddhtools to). Perhaps add to your .bashrc file:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"export PATH=$HOME/ddhtoolbox/tools/lfa/:$PATH\nexport DDHI_BPS=$HOME/ddhtoolbox/ddh_budget_lists/\nexport DDHI_LIST=$HOME/ddhtoolbox/ddh_budget_lists/conversion_list","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Plot-output-time-series-from-the-MUSC-output-lfa-files","page":"MUSC","title":"Plot output time-series from the MUSC output lfa files","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"cd $HOME/hm_home/test_0001\n./musc_plot1Dts.sh -d \n\n## python based plotting scripts and \"default\" png plots \n## will be produced in $HOME/hm_home/test_0001/plots1Dts\n","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Extract-output-from-the-MUSC-output-ICM*-fa-files-and-plot-time-series-using-these","page":"MUSC","title":"Extract output from the MUSC output ICM* fa files and plot time-series using these","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"By default you get ICM* files on the hour - you can change the namelist should you require a higher frequency.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"cd $HOME/hm_home/test_0001\n./musc_convertICM2ascii.sh -l -f \n\n## Generates an OUT ascii file for each atm and sfx ICM* input file\n## ICM files have additional input not in lfa files e.g. TKE which is useful - also similar to 3D outputs\n\n./musc_plot_profiles_ICMfiles.sh -d -p -l \n","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Creating-your-own-input-files","page":"MUSC","title":"Creating your own input files","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"A converter script, musc_convert.sh, is available to extract a MUSC column from a model state file (ICMSHHARM+HHHH). musc_convert.sh is a Bash script that calls gl_grib_api to carry the data conversions.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Extract-a-MUSC-input-file","page":"MUSC","title":"Extract a MUSC input file","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"cd $HOME/hm_home/test_0001\n./musc_convert.sh -d $HOME/muscCY46InputData/harm_arome/ -c extr3d -n REFIRL -l 53.5,-7.5 -t 6\nmkdir $HOME/muscCY46InputData/musc_refirl\ncp MUSCIN_REFIRL_atm.fa MUSCIN_REFIRL_sfx.fa MUSCIN_REFIRL_pgd.fa $HOME/muscCY46InputData/musc_refirl/","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Convert-MUSC-FA-to-MUSC-ASCII","page":"MUSC","title":"Convert MUSC FA to MUSC ASCII","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"cd $HOME/hm_home/test_0001\n./musc_convert.sh -c fa2ascii -d $HOME/muscCY46InputData/musc_refirl -n REFIRL\nls -ltr\ncp MUSCIN_REFIRL_atm.ascii MUSCIN_REFIRL_sfx.ascii $HOME/muscCY46InputData/musc_refirl/","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Convert-MUSC-ASCII-to-MUSC-FA","page":"MUSC","title":"Convert MUSC ASCII to MUSC FA","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"cd $HOME/hm_home/test_0001\n./musc_convert.sh -c ascii2fa -d $HOME/muscCY46InputData/musc_refirl -n REFIRL\nls -ltr","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Forcing-in-MUSC","page":"MUSC","title":"Forcing in MUSC","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"musc_convert.sh includes forcing for temperature (11), humidity (51) and wind speed (32) . ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"You may edit the following lines to include other forcing:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":" PPPKEY(1:4)%shortname = 'ws','#','#','#',\n PPPKEY(1:4)%faname = '#','SNNNFORC001','SNNNFORC002','SNNNFORC003'\n PPPKEY(1:4)%levtype = 'hybrid','hybrid','hybrid','hybrid',\n PPPKEY(1:4)%level = -1,-1,-1,-1,\n PPPKEY(1:4)%pid = 32,-1,-1,-1,\n PPPKEY(1:4)%nnn = 0,0,0,0,\n PPPKEY(1:4)%lwrite = F,T,T,T,\n IFORCE = 11,51,32,","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Further information on forcing is available here: MUSC/Forcing","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#MUSC-local-adaptation","page":"MUSC","title":"MUSC local adaptation","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/#KNMI-workstations","page":"MUSC","title":"KNMI workstations","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"The following files were added to make it possible to run MUSC on KNMI workstations:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"config-sh/config.LinuxPC-MPI-KNMI\nconfig-sh/submit.LinuxPC-MPI-KNMI\nutil/makeup/config.linux.gfortran.mpi-knmi ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"for use with the setup script:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"./musc_setup.sh [...] -c LinuxPC-MPI-KNMI","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"In addition, the following workaround has to be applied to be able to run the REFL65 test case:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"$ git diff src/ifsaux/utilities/echien.F90\ndiff --git a/src/ifsaux/utilities/echien.F90 b/src/ifsaux/utilities/echien.F90\nindex 55d5ce94e..694c87d83 100644\n--- a/src/ifsaux/utilities/echien.F90\n+++ b/src/ifsaux/utilities/echien.F90\n@@ -532,7 +532,7 @@ IF((KINF == 0).OR.(KINF == -1).OR.(KINF == -2).OR.(KINF == -3)) THEN\n & 'LEVEL ',JFLEV,' : ',&\n & 'FILE = ',ZVALH(JFLEV), ' ; ARGUMENT = ',PVALH(JFLEV)\n IERRA=1\n- IERR=1\n+! IERR=1\n ENDIF\n IF(ABS(ZVBH(JFLEV)-PVBH(JFLEV)) > PEPS) THEN\n WRITE(KULOUT,*) ' VERTICAL FUNCTION *B* MISMATCH ON ',&","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"Then you are ready to compile:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"remove the file experimentislocked from the experiment directory.\nremove the directory with your previous build (if any).\nstart the compile with the musc_compile.sh script","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"When starting the MUSC run, add the PATH to mpirun and the libraries:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"export PATH=$PATH:/usr/lib64/openmpi/bin\nexport LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/lib64/openmpi/lib\n./musc_run.sh [...]","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#MUSC-FAQ","page":"MUSC","title":"MUSC FAQ","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"If there is an error, what files do I look in? NODE.001_01 and lola in your output directory.\nHow to I handle the output files? The output files are of the form Out.XXX.XXXX and appear in your output directory. There are in lfa format and can be handled using ddh tools. See the bash script musc_plot1Dts.sh for ideas. There are also ICM*lfa output files that are also handy for plotting profiles - use musc_convertICM2ascii.sh to convert these files to ASCII and musc_plot_profiles_ICMfiles.sh to plot some profiles e.g. TKE, cloud liquid etc.\nI ran a different idealised case but did not get different results? The likely reason for this is that you did not delete the namelists from your experiment directory. If the namelists are there, the musc_run.sh script neither creates them nor copies them from the repository.\nHow do I create a new idealised case? This is not straightforward but the following was used to create the ASTEX cases in cy43 using info from cy38: https://www.overleaf.com/7513443985ckqvfdcphnng\nHow can I access a list of MUSC output parameters? Ensure you have the ddhtoolbox compiled. Then use lfaminm $file on any of your output files and it will show what is there. To look at a particular variable try lfac $file $parameter e.g. lfac $file PTS (for surface temperature). You can use cat to copy the values to an ASCII file for ease of use (e.g. lfac $file PTS > $ASCIIfile). \nIs MUSC similar to the full 3D model version - is the physics the same? Yes, if you checkout develop then you have MUSC up-to-date with that.\nDo I need to recompile the model if I modify code? Yes, if you modify code in a single file you must recompile the code but do not delete the original compiled model first. This will recompile relatively quickly. If you modify code in multiple files and you change what variables are passed between files, then you must delete your original compiled model and recompile the code. This will take longer to recompile. ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#MUSC-variable-names","page":"MUSC","title":"MUSC variable names","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"A list of variable names found in the MUSC lfa output files can be found here. Please note that this is not a complete list of MUSC output parameters (yet). The variables in regular ICMSH... fa output are documented here","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#Outstanding-Issues","page":"MUSC","title":"Outstanding Issues","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"ARMCU and Jenny's cases run without surface physics, radiation etc and hence return NANs in apl_arome. To circumvent this on ECMWF, we needed to compile less strictly. This needs to be investigated further.\nThe ASTEX cases currently do not run on ECMWF but work perfectly at Met Eireann - debugging needed.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC/#MUSC-using-EMS","page":"MUSC","title":"MUSC using EMS","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC/","page":"MUSC","title":"MUSC","text":"These instructions have moved to MUSC EMS","category":"page"},{"location":"EPS/SPPImplementation/#The-SPP-implementation-in-IAL-and-HARMONIE","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"","category":"section"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"The Stochastically Perturbed Parameterizations scheme (SPP) introduces stochastic perturbations to values of chosen closure parameters representing efficiencies or rates of change in parameterized atmospheric (sub)processes. See here for more information. See the main SPP documentation for selection of settings.","category":"page"},{"location":"EPS/SPPImplementation/#Controling-routines","page":"The SPP implementation in IAL and HARMONIE","title":"Controling routines","text":"","category":"section"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"The SPP data structure and logics is controlled by the following routines","category":"page"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"Routine Description\nsrc/arpifs/module/spp_mod.F90 Defines SPP scheme types TSPP_CONFIG_PAR and TSPP_CONFIG for the parameter config and the overall config respectively\nsrc/arpifs/module/spp_mod_type.F90 Harmonie specific data types TSPP_CONFIG_TYPE, ATM_SPP_VARS, SFX_VARS, control and the methods CLEAR_SSP_TYPE, SET_SPP_TYPE, APPLY_SPP, APPLY_SPP_SURFEX, DIA_SPP, SET_ALL_ATM_SPP, SET_ALL_SFX_SPP, CLEAR_ALL_ATM_SPP, CLEAR_ALL_SFX_SPP\nsrc/surfex/SURFEX/modd_sfx_spp.F90 SURFEX specific data types, control and methods CLEAR_SFX_SPP, SET_SFX_SPP, APPLY_SFX_SPP, CLEAR_ALL_SFX_SPP, SPP_MASK, SPP_DEMASK, PREP_SPP_SFX. Partly duplicates spp_mod_type.F90\nsrc/arpifs/namelist/namspp.nam.h The SPP namelist\nsrc/arpifs/setup/get_spp_conf.F90 Setup defaults and read the SPP namelist. Initialises the SPG parameters\nsrc/arpifs/phys_dmn/ini_spp.F90 Initialises the pattern used for SPP\nsrc/arpifs/phys_dmn/evolve_spp.F90 Control routine for pattern propagation\nsrc/mse/internals/aroset_spp.F90 Initialises the SURFEX part of SPP","category":"page"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"Note that the control routines shared with IFS will be totally rewritten, and much neater, with the introduction of CY49T1. See e.g. spp_def_mod.F90, spp_gen_mod.F90 ","category":"page"},{"location":"EPS/SPPImplementation/#SPG-routines","page":"The SPP implementation in IAL and HARMONIE","title":"SPG routines","text":"","category":"section"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"The pattern used for SPP within HARMONIE is SPG and the code for this is found under src/utilities/spg. For the propagation of the pattern we find the routine EVOLVE_ARP_SPG in src/arp/module/spectral_arp_mod.F90","category":"page"},{"location":"EPS/SPPImplementation/#Applying-the-patterns","page":"The SPP implementation in IAL and HARMONIE","title":"Applying the patterns","text":"","category":"section"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"In apl_arome.F90 the HARMONIE specific data types are initialised with SET_ALL_ATM_SPP and SET_ALL_SFX_SPP. These routine groups the different parameters and connects them to a pattern and a the correct diagnostic field EZDIAG if requested.","category":"page"},{"location":"EPS/SPPImplementation/#Applying-the-patterns-in-the-upper-air-part","page":"The SPP implementation in IAL and HARMONIE","title":"Applying the patterns in the upper air part","text":"","category":"section"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"In the routine were a specific parameter is used the pattern is applied by calling APPLY_SPP. This is done for each parameter accoding to the table below.","category":"page"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"Perturbation Routine\nRADGR src/arpifs/phys_dmn/apl_arome.F90\nRADSN src/arpifs/phys_dmn/apl_arome.F90\nRFAC_TWOC src/arpifs/phys_dmn/vdfexcuhl.F90\nRZC_H src/arpifs/phys_dmn/vdfexcuhl.F90\nRZL_INF src/arpifs/phys_dmn/vdfexcuhl.F90\nRZMFDRY src/arpifs/phys_dmn/vdfhghtnhl.F90\nRZMBCLOSURE src/arpifs/phys_dmn/vdfhghtnhl.F90\nCLDDPTHDP src/arpifs/phys_dmn/vdfhghtnhl.F90\nRLWINHF src/arpifs/phys_radi/recmwf.F90\nRSWINHF src/arpifs/phys_radi/recmwf.F90\nPSIGQSAT src/mpa/micro/internals/condensation.F90\nICE_CLD_WGT src/mpa/micro/internals/condensation.F90\nICENU src/mpa/micro/internals/rain_ice_old.F90\nKGN_ACON src/mpa/micro/internals/rain_ice_old.F90\nKGN_SBGR src/mpa/micro/internals/rain_ice_old.F90\nALPHA src/mpa/micro/internals/rain_ice_old.F90\nRZNUC src/mpa/micro/internals/rain_ice_old.F90","category":"page"},{"location":"EPS/SPPImplementation/#Applying-the-patterns-in-SURFEX","page":"The SPP implementation in IAL and HARMONIE","title":"Applying the patterns in SURFEX","text":"","category":"section"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"As SURFEX should have no dependencies to external modules the data is copied into the internalt SURFEX SPP data structure in AROSET_SPP called from ARO_GROUND_PARAM.","category":"page"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"For SURFEX the parameter table looks like","category":"page"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"Perturbation Routine\nCV src/surfex/SURFEX/coupling_isban.F90\nLAI src/surfex/SURFEX/coupling_isban.F90\nRSMIN src/surfex/SURFEX/coupling_isban.F90","category":"page"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"In SURFEX we also have to pack/unpack the data arrays to only use the active points for a specific tile or patch. This is done in the SPP_MASK and SPP_DEMASK routines found in src/surfex/SURFEX/modd_sfx_spp.F90 and called from src/surfex/SURFEX/coupling_surf_atmn.F90. At the time of writing returning the diagnostics of the pattern doesn't work satisfactory.","category":"page"},{"location":"EPS/SPPImplementation/","page":"The SPP implementation in IAL and HARMONIE","title":"The SPP implementation in IAL and HARMONIE","text":"The additional code changes done for SPP in SURFEX can be viewed here","category":"page"},{"location":"DataAssimilation/NWECHKEVO/#NWECHKEVO","page":"NWECHKEVO","title":"NWECHKEVO","text":"","category":"section"},{"location":"DataAssimilation/NWECHKEVO/#Introduction","page":"NWECHKEVO","title":"Introduction","text":"","category":"section"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"The new utility NWECHKEVO was introduced in order to make the generation of diagnostics for the study of spin-up in dynamics effects more efficient. The utility CHKEVO turned out to slow down the forecast run to unpractical times. NWECHKEVO produces timeseries for the variables log(Ps), horiz. vorticity, horiz. divergence, vertical divergence, pressure departure and temperature for the first 180 timesteps of integration at timestep resolution. These timeseries are produced at selected points within the domain and at all levels for the last upper-air five variables (HVor, HDiv, VDiv, PD and T). ","category":"page"},{"location":"DataAssimilation/NWECHKEVO/#Preparations.-NAMCHK-namelist","page":"NWECHKEVO","title":"Preparations. NAMCHK namelist","text":"","category":"section"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"The user must select a list of points at which locations he wants the diagnostics to be generated. The coordinates are given in GPx and GPy coordinates, not geographical coordinates. These co-ordinates are then introduced in the namelist NAMCHK as in the following example ","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"NAMCHK=>{\n 'NGPCHK' => '17,',\n 'NXCHK(1:17)' => '263,335,447,525,606,390,420,540,644,333,509,329,388,480,266,259,271,',\n 'NYCHK(1:17)' => '462,472,469,398,388,406,325,284,300,293,243,215,167,178,358,279,200,',\n },","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"NB: These points correspond to the locations of 17 different weather radars in the domain ","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"'IBERIAxxm_2.5'=>{\n 'TSTEP' => '60',\n 'NLON' => '800',\n 'NLAT' => '648',\n 'LONC' => '-4.5',\n 'LATC' => '40.0',\n 'LON0' => '-4.5',\n 'LAT0' => '40.0',\n 'GSIZE' => '2500.',\n },","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"The default value for LNWECHKEVO is set to FALSE in suechk.F90. To enable this option, set LNWECHKEVO=.TRUE in ecf/config_exp.h. This isetting will be carried over to scr/Forecast at the time of namelist_forecast specification. NWECHKEVO takes priority over ECHKEVO, that is, when LNWECHKEVO=.TRUE., LECHKEVO is set to FALSE no matter what is specified at the namelist level ( awarning message will appear in the logs). This is so in order to avoid conflicts between the previous and the new utilities. If the user wants the previous method just activate LECHKEVO and make sure not to activate LNWECHKEVO.","category":"page"},{"location":"DataAssimilation/NWECHKEVO/#Important-Info","page":"NWECHKEVO","title":"Important Info","text":"","category":"section"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"NWECHKEVO speeds up the generation of diagnostics by minimizing MPI overhead. During the set-up, it is determined which MPI-task processes each point igiven via NAMCHK. The internal arrays that contain this info are given a size such that no more than 10 points can be handled by each MPI-task. Therefore, when running with a small number of MPI-tasks it is possible that some of the points in NAMCHK are ignored. This situation however is very unlikely because the usual number of MPI-tasks is quite big. Nonetheless, the parameter NWJPGPCHK (in module YOMCHK) can be given a bigger value if necessary.","category":"page"},{"location":"DataAssimilation/NWECHKEVO/#Modifications-in-scr/Forecast","page":"NWECHKEVO","title":"Modifications in scr/Forecast","text":"","category":"section"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"For ease of use, NWECHKEVO dumps the results in text format to the standard output ( /NODE* files ). The size of these files remains managable unless the number of points in NAMCHK is very big. As the diagnostics are generated avoiding MPI comms., the standard output for all tasks must be activated by using the NOUTPUT parameter in NAMPAR0. This is reason why scr/Forecast must be modified. ","category":"page"},{"location":"DataAssimilation/NWECHKEVO/#Results","page":"NWECHKEVO","title":"Results","text":"","category":"section"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"The results can be easily obtained by grepping out from the NODEs files. First , in order to know which NODE file contains a given point we can do on NODE.001_01","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"grep SETUPNEWECHKEVO NODE.001_01","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"obtaining a table like this","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"SETUPNEWECHKEVO: JPROC 120 ISETA= 8 ISETB= 8 ISETW= 8 ISETV= 8\nSETUPNEWECHKEVO: IGP= 13 (nychk,nxchk)= 167 388 CC in chunk= 1032\nSETUPNEWECHKEVO: JPROC 138 ISETA= 9 ISETB= 10 ISETW= 9 ISETV= 10\nSETUPNEWECHKEVO: IGP= 14 (nychk,nxchk)= 178 480 CC in chunk= 527","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"which indicates that GP number 13 (co-ordinates 167,388) on the NAMCHK is allocated to MPI-task 120. The point has ordinate number 1032 within the domain chunk assigned to this MPI-task. It outputs to NODE.008_08","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"Once we know the NODE file for this GP, we grep out again the results. For PS","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"grep NWECHKEVO:PS NODE.008_08","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"NWECHKEVO:PS 13 000 0.11505382282124E+02 0.99248519555856E+05\nNWECHKEVO:PS 13 001 0.11505380867601E+02 0.99248379166582E+05\nNWECHKEVO:PS 13 002 0.11505354069965E+02 0.99245719580317E+05\nNWECHKEVO:PS 13 003 0.11505387331977E+02 0.99249020747545E+05\nNWECHKEVO:PS 13 004 0.11505409155285E+02 0.99251186713122E+05\nNWECHKEVO:PS 13 005 0.11505330599600E+02 0.99243390274322E+05\nNWECHKEVO:PS 13 006 0.11505214514108E+02 0.99231870225232E+05\nNWECHKEVO:PS 13 007 0.11505186261496E+02 0.99229066705312E+05\nNWECHKEVO:PS 13 008 0.11505301938506E+02 0.99240545891039E+05\nNWECHKEVO:PS 13 009 0.11505222637195E+02 0.99232676297636E+05\nNWECHKEVO:PS 13 010 0.11505173119706E+02 0.99227762666337E+05","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"the second column is the GP number, the third the time step, the fourth is log(Ps) and the fith is fo Ps (Pa)","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"For the upper-air variables we do","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"grep 'NWECHKEVO:UA 13' NODE.008_08 ","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"NWECHKEVO:UA 13 000 001 0.14364086697030E-04 -0.50805426278410E-05 0.22599560795742E+03 0.00000000000000E+00 0.80040376159261E-07\nNWECHKEVO:UA 13 000 002 0.33741231171738E-05 0.97134353537265E-05 0.21290057631960E+03 0.00000000000000E+00 0.29001061928223E-06\nNWECHKEVO:UA 13 000 003 0.69003223306262E-05 -0.10070828988605E-04 0.21384249855800E+03 0.00000000000000E+00 0.47109613483406E-06\nNWECHKEVO:UA 13 000 004 -0.30497765593290E-06 0.82334865627784E-05 0.21374844679188E+03 0.00000000000000E+00 -0.26145222191521E-06\nNWECHKEVO:UA 13 000 005 0.26070697062412E-04 -0.12291493034515E-04 0.21545103834209E+03 0.00000000000000E+00 0.39088530137662E-05\nNWECHKEVO:UA 13 000 006 0.15371311363862E-04 0.52048057489868E-05 0.21791733622236E+03 0.00000000000000E+00 0.81362351433664E-05\nNWECHKEVO:UA 13 000 007 0.51032970986510E-04 -0.13111824709755E-04 0.22004808804352E+03 0.00000000000000E+00 0.15756687491581E-04\nNWECHKEVO:UA 13 000 008 -0.19648225032526E-04 -0.10918787276801E-04 0.22163556827039E+03 0.00000000000000E+00 0.64289394144994E-05\nNWECHKEVO:UA 13 000 009 -0.17176145579185E-05 0.45899398495276E-04 0.22275608584458E+03 0.00000000000000E+00 -0.10037800019117E-04\n....\nNWECHKEVO:UA 13 000 056 0.13986028555310E-02 -0.66909930879866E-03 0.28628543762220E+03 0.00000000000000E+00 0.54829685721891E-03\nNWECHKEVO:UA 13 000 057 0.14038615170298E-02 -0.75779689206785E-03 0.28663735655175E+03 0.00000000000000E+00 0.59806781225837E-03\nNWECHKEVO:UA 13 000 058 0.13261896477891E-02 -0.74616001898282E-03 0.28698938510509E+03 0.00000000000000E+00 0.49588753382099E-03\nNWECHKEVO:UA 13 000 059 0.14489950905375E-02 -0.74227210464472E-03 0.28734520941903E+03 0.00000000000000E+00 0.27710629383431E-03\nNWECHKEVO:UA 13 000 060 0.17744491137974E-02 -0.61965914425958E-03 0.28761932093752E+03 0.00000000000000E+00 0.40814002634823E-03\nNWECHKEVO:UA 13 000 061 0.19899360944093E-02 -0.54623343124852E-03 0.28790566719217E+03 0.00000000000000E+00 0.47915020836391E-03\nNWECHKEVO:UA 13 000 062 0.23402553668209E-02 -0.78436980306753E-03 0.28827960078154E+03 0.00000000000000E+00 0.37896218758797E-03\nNWECHKEVO:UA 13 000 063 0.20467568555637E-02 -0.11207508904165E-02 0.28854880261461E+03 0.00000000000000E+00 0.46291124700252E-03\nNWECHKEVO:UA 13 000 064 0.16656728750091E-02 -0.10495856132860E-02 0.28883596008959E+03 0.00000000000000E+00 0.25911350987737E-03\nNWECHKEVO:UA 13 000 065 0.14631444722784E-02 -0.94533311737612E-03 0.28903858779818E+03 0.00000000000000E+00 0.64914638106075E-05","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"which gives at timestep 0 ( third col.) the profile (model level is fourth column) for HVor(s-1), Hdiv (s-1), T (K), PD (Pa for NPDVAR=2) and VD (units depend on NVDVAR definition) ","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"Consecutive timesteps follow","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"NWECHKEVO:UA 13 001 001 0.14391950814966E-04 -0.10126750142077E-04 0.22601300427477E+03 0.26233794693211E-05 0.13011272170719E-05\nNWECHKEVO:UA 13 001 002 0.35975280461999E-05 0.30191811529161E-05 0.21293465156805E+03 -0.29989147375432E-04 0.10063954562282E-06\nNWECHKEVO:UA 13 001 003 0.79264193785264E-05 -0.15031046611816E-04 0.21385134119954E+03 -0.33856415073342E-04 0.42661347477312E-05\nNWECHKEVO:UA 13 001 004 0.21090675053822E-05 0.31713133370971E-05 0.21377935010403E+03 -0.40445121858208E-04 -0.54989449665528E-05\nNWECHKEVO:UA 13 001 005 0.30451493480920E-04 -0.18284403001908E-04 0.21545646796919E+03 -0.42130887042681E-04 0.14684047934687E-04\n....","category":"page"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"up to timestep 180 (hard-coded,the first 3 hours if timestep 1 minute)","category":"page"},{"location":"DataAssimilation/NWECHKEVO/#Plotting","page":"NWECHKEVO","title":"Plotting","text":"","category":"section"},{"location":"DataAssimilation/NWECHKEVO/","page":"NWECHKEVO","title":"NWECHKEVO","text":"The results are easily plotted with any graphs utility (e.g. gnuplot)","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#ecmwfatos","page":"Running on Atos","title":"Running Harmonie on Atos","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Before-you-start","page":"Running on Atos","title":"Before you start","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"New Harmonie users will require membership of the accord user group at ECMWF. Please contact the HIRLAM System Manager, Daniel Santos, to make this request on your behalf. Futhermore ECMWF will have to setup a virtual machine for you to run the ecFlow server on (see here). Finally, make sure that your login shell is set to /bin/bash.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"tip: Tip\nTo share your experiments with the members of the accord group do: chmod 755 $HOME $SCRATCH $PERM $HPCPERM\nchgrp -R accord $HOME/hm_home $SCRATCH/hm_home $PERM/HARMONIE $HPCPERM/hm_home\nchmod g+s $HOME/hm_home $SCRATCH/hm_home $PERM/HARMONIE $HPCPERM/hm_homeThe chmod g+s sets the SGID bit which will ensure that new experiments created in hm_home will automatically be in the accord group","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Configure-your-experiment-(option-1)","page":"Running on Atos","title":"Configure your experiment (option 1)","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Create an experiment directory under $HOME/hm_home and use the master script Harmonie to set up a minimum environment for your experiment. \nmkdir -p $HOME/hm_home/my_exp\ncd $HOME/hm_home/my_exp\nln -sf /path/to/git/repository/config-sh/Harmonie\n./Harmonie setup -r /path/to/git/repository -h ECMWF.atos\nwhere \n-r Specifies the path to the git repository. Make sure you have checkout-ed the correct branch. \n-h tells which configuration files to use. At ECMWF config.ECMWF.atos is the default one. For harmonie-43h2.2 use -h config.aa\ntip: Tip\nAn Atos tagged versions of Harmonie are available in ~hlam/harmonie_release/git/tags/ln -sf ~hlam/harmonie_release/git/tags//config-sh/Harmonie \nHarmonie setup -r ~hlam/harmonie_release/git/tags/ -h ECMWF.atos","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"This would give you the default setup which currently is AROME physics with CANARI+OI_MAIN surface assimilation and 3DVAR upper air assimilations with 3h cycling on a domain covering Denmark using 2.5km horizontal resolution and 65 levels in the vertical.\nNow you can edit the basic configuration file ecf/config_exp.h to configure your experiment scenarios. Modify specifications for domain, data locations, settings for dynamics, physics, coupling host model etc. Read more about the options here. You can also use some of the predefined configurations by calling Harmonie with the -c option:\n./Harmonie setup -r PATH_TO_HARMONIE -h YOURHOST -c CONFIG -d DOMAIN\nwhere CONFIG is one of the setups defined in scr/Harmonie_configurations.pm. If you give -c without an argument or a non existing configuration a list of configurations will be printed.\nIn some cases you might have to edit the general system configuration file config-sh/config.ECMWF.atos. See here for further information. \nThe rules for how to submit jobs on Atos are defined in config-sh/submit.ECMWF.atos. See here for further information\nIf you experiment in data assimilation you might also want to change settings in scr/include.ass.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Configure-your-experiment-using-github-repo-(option-2)","page":"Running on Atos","title":"Configure your experiment using github repo (option 2)","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Disadvantage of option 1 for version control in git is that code is located in two places. Instead you can : ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Make a fork of the Harmonie repository. From now we assume your fork will be located at https://github.com//Harmonie.\nLog in to ATOS as usual and perform the following commands:","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"mkdir -p $PERM/hm_home && cd $PERM/hm_home/\ngit clone -b git@github.com:/Harmonie.git \ncd \ngit checkout -b \nexport PERL5LIB=$(pwd)\nconfig-sh/Harmonie setup -r $(pwd) -h ECMWF.atos","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Where the git clone command clones a specific branch into a directory called . git checkout with the -b flag, then creates a new branch for you to work on. Call it something meaningful. Then the experiment is set up as usual, but using your local repository as reference to itself.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Then you do some work and when ready to commit something you do","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"git add \ngit commit --author \"Name \" -m \"Commit message\"\ngit push --set-upstream origin ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Specifying --set-upstream origin to git push is only necessary the first time you push your branch to the remote. When ready you can now go to GitHub and make a pull-request to the Harmonie repository from your fork.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Start-your-experiment","page":"Running on Atos","title":"Start your experiment","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Launch the experiment by giving start time, DTG, end time, DTGEND","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"./Harmonie start DTG=YYYYMMDDHH DTGEND=YYYYMMDDHH\n# e.g., ./Harmonie start DTG=2022122400 DTGEND=2022122406","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"If successful, Harmonie will identify your experiment name and start building your binaries and run your forecast. If not, you need to examine the ECFLOW log file $HM_DATA/ECF.log. $HM_DATA is defined in your Env_system file. At ECMWF $HM_DATA=$SCRATCH/hm_home/$EXP where $EXP is your experiment name. Read more about where things happen further down.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Continue-your-experiment","page":"Running on Atos","title":"Continue your experiment","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"If your experiment have successfully completed and you would like to continue for another period you should write","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"./Harmonie prod DTGEND=YYYYMMDDHH","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"By using prod you tell the system that you are continuing the experiment and using the first guess from the previous cycle. The start date is take from a file progress.log created in your $HOME/hm_home/my_exp directory. If you would have used start the initial data would have been interpolated from the boundaries, a cold start in other words.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Start/Restart-of-ecflow_ui","page":"Running on Atos","title":"Start/Restart of ecflow_ui","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"To start the graphical window for ECFLOW","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"./Harmonie mon","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"The graphical window runs independently of the experiment and can be closed and restarted again with the same command. With the graphical interface you can control and view logfiles of each task. ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Making-local-changes","page":"Running on Atos","title":"Making local changes","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Very soon you will find that you need to do changes in a script or in the source code. Once you have identified which file to edit you put it into the current $HOME/hm_home/my_exp directory, with exactly the same subdirectory structure as in the reference. e.g, if you want to modify a namelist setting ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"./Harmonie co nam/harmonie_namelists.pm # retrieve default namelist harmonie_namelists.pm\nvi nam/harmonie_namelists.pm # modify the namelist","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Next time you run your experiment the changed file will be used. You can also make changes in a running experiment. Make the change you wish and rerun the InitRun task from the viewer. The InitRun task copies all files from your local experiment directory to your working directory $HM_DATA. Once your InitRun task is complete your can rerun the task you are interested in. If you wish to recompile something you will also have to rerun the Build tasks.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Issues","page":"Running on Atos","title":"Issues","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Harmonie exp stop at ECMWF(Atos) due $PERM mounting problem https://github.com/Hirlam/Harmonie/issues/628","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Account","page":"Running on Atos","title":"Account","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"In order to change the billing account, open Env_submit and find the definition of scalar_job. Then add a line like","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"'ACCOUNT' => $submit_type.' --account=account_name' to the definition of the dictionary.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Directory-structure","page":"Running on Atos","title":"Directory structure","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#SCRATCH","page":"Running on Atos","title":"$SCRATCH","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"In $SCRATCH/hm_home/$EXP you will find ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Directory Content\nbin Binaries\nlib Source code synced from $HM_LIB and compiled code\nlib/src Object files and source code (if you build with makeup, set by MAKEUP_BUILD_DIR)\nlib/util Utilities such as makeup, gl_grib_api or oulan\nclimate Climate files\nYYYYMMDD_HH Working directory for the current cycle. If an experiment fails it is useful to check the IFS log file, NODE.001_01, in the working directory of the current cycle. The failed job will be in a directory called something like Failed_this_job.\narchive Archived files. A YYYY/MM/DD/HH structure for per cycle data. ICMSHHARM+NNNN and ICMSHHARM+NNNN.sfx are atmospheric and surfex forecast output files\nextract Verification input data. This is also stored on the permanent disk $HPCPERM/HARMONIE/archive/$EXP/parchive/archive/extract\nECF.log Log of job submission","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#ECFS","page":"Running on Atos","title":"ECFS","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Since $SCRATCH is cleaned regularly we need to store data permanently on ECFS, the EC file system, as well. There are two options for ECFS, ectmp and ec. The latter is a permanent storage and first one is cleaned after 90 days. Which one you use is defined by the`ECFSLOC variable. To view your data type e.g.\nels ectmp:/$USER/harmonie/my_exp\nThe level of archiving depends on ARSTRATEGY in ecf/config_exp.h. The default setting will give you one YYYY/MM/DD/HH structure per cycle data containing:\nSurface analysis, ICMSHANAL+0000[.sfx]\nAtmospheric analysis result MXMIN1999+0000\nBlending between surface/atmospheric analysis and cloud variable from the first guess LSMIXBCout\nICMSHHARM+NNNN and ICMSHHARM+NNNN.sfx are atmospheric and surfex forecast model state files\nPFHARM* files produced by the inline postprocessing\nICMSHSELE+NNNN.sfx are surfex files with selected output\nGRIB files for fullpos and surfex select files\nLogfiles in a tar file logfiles.tar\nObservation database and feedback information in odb_stuff.tar.\nExtracted files for obsmon in sqlite.tar\nClimate files are stored in the climate directory\nOne directory each for vfld and vobs data respectively for verification data","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#PERM","page":"Running on Atos","title":"$PERM","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Directory Content\nHARMONIE/$EXP ecflow log and job files\nhm_lib/$EXP/lib Scipts, config files, ecf and suite, source code (not compiled, set by $HM_LIB). Reference with experiment's changes on top","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#HPCPERM","page":"Running on Atos","title":"$HPCPERM","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"In $HPCPERM/hm_home/$EXP","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Directory Content\nparchive/archive/extract/ Verification input data.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#HOME-on-ecflow-gen-{user}-001","page":"Running on Atos","title":"$HOME on ecflow-gen-${user}-001","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Directory Content\necflow_server/ ecFlow checkpoint and log files","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Cleanup-of-old-experiments","page":"Running on Atos","title":"Cleanup of old experiments","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"danger: Danger\nThese commands may not work properly in all versions. Do not run the removal before you're sure it's OK","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Once you have complete your experiment you may wish to remove code, scripts and data from the disks. Harmonie provides some simple tools to do this. First check the content of the different disks by","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Harmonie CleanUp -ALL","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Once you have convinced yourself that this is OK you can proceed with the removal.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Harmonie CleanUp -ALL -go ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"If you would like to exclude the data stored on e.g ECFS ( at ECMWF ) or in more general terms stored under HM_EXP ( as defined in Env_system ) you run ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Harmonie CleanUp -d","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"to list the directories intended for cleaning. Again, convince yourself that this is OK and proceed with the cleaning by","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Harmonie CleanUp -d -go","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"You can always remove the data from ECFS directly by running e.g.","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"erm -R ec:/YOUR_USER/harmonie/EXPERIMENT_NAME ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"or","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"erm -R ectmp:/YOUR_USER/harmonie/EXPERIMENT_NAME ","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"For more information about cleaning with Harmonie read here\nFor more information about the ECFS commands read here","category":"page"},{"location":"System/ECMWF/RunningHarmonieOnAtos/#Debugging-Harmonie-with-ARM-DDT","page":"Running on Atos","title":"Debugging Harmonie with ARM DDT","text":"","category":"section"},{"location":"System/ECMWF/RunningHarmonieOnAtos/","page":"Running on Atos","title":"Running on Atos","text":"Follow instructions here. Use Run DDT client on your Personal Computer or End User Device ","category":"page"},{"location":"EPS/SLAF/Get_pertdia.pl.pm/#Get_pertdia","page":"Get_pertdia","title":"Get_pertdia","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/#Run-MUSC-with-EMS","page":"MUSC EMS","title":"Run MUSC with EMS","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"These instructions require the use of dev-CY46h1.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"If you find any issues with any of the instructions or scripts, feel free to notify Emily Gleeson (emily.gleesonATmet.ie) and Eoin Whelan (eoin.whelanATmet.ie)","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"In this section a description of how to install and run MUSC using EMS is provided. This is based on compilation and execution in a Ubuntu 20.04 container (tested using Apptainer on the ECMWF Atos HPC) and use of the EMS system to execute MUSC and convert the output to NetCDF. EMS is primarily developed by Romain Roehrig (Météo France) https://github.com/romainroehrig/EMS.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/#Start-your-container","page":"MUSC EMS","title":"Start your container","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"Log in to hpc-login on the Atos\nLoad the Apptainer module and start the Ubuntu 20.04 container:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"module load apptainer\n/home/dui/musc_ubuntu.sif","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"Further details concerning Apptainer on the Atos are available here","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/#Compile-the-code","page":"MUSC EMS","title":"Compile the code","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"I (Eoin) have not had time to sort out compilation using CMake but the following instructions provide a minimalist approach to compile the code using makeup. These instructions depend on you having a clone or copy of Harmonie (dev-46h1) in your $PERM directory on Atos where GHUSER is your Github username.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"cd $PERM\nGHUSER=your_github_username\ngit clone git@github.com:$GHUSER/Harmonie.git harmonie_git/$GHUSER/dev-CY46h1 -b dev-CY46h1\n\n(Ensure that your fork is up-to-date before doing this or else take the code from the main\nCY46 repo. Note that to run GABLS1 you need to use two updated surfex subroutines `tszo.F90`\nand `read_pgd_tsz0_parn.F90`. You can copy these files from util/musc/patches as follows:\n\ncp $PERM/harmonie_git/$GHUSER/dev-CY46h1/util/musc/patches/tsz0.F90 $PERM/harmonie_git/$GHUSER/dev-CY46h1/src/surfex/SURFEX/tsz0.F90\ncp $PERM/harmonie_git/$GHUSER/dev-CY46h1/util/musc/patches/read_pgd_tsz0_parn.F90 $PERM/harmonie_git/$GHUSER/dev-CY46h1/src/surfex/SURFEX/read_pgd_tsz0_parn.F90\nor by using git apply gabls.patch).\n","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"Compile the code in your code checkout/copy:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"cd $PERM/harmonie_git/$GHUSER/dev-CY46h1\nEXP=$(basename $(git symbolic-ref --short HEAD))\n. config-sh/config.ubuntu20_nompi\nutil/makeup/build -n 4 config.${HARMONIE_CONFIG}\nmkdir ${EXP}\nmv makeup.${HARMONIE_CONFIG} bin configlog.1 makelog.1 ${EXP}/","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/#Install-EMS","page":"MUSC EMS","title":"Install EMS","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"The following instructions provide details on how to download a HIRLAM version of EMS and install locally in your own account:","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"cd $HOME\ngit clone git@github.com:ewhelan/EMS.git -b hirlam EMS_git/EMS\ncd EMS_git/EMS/\nmkdir build\ncd build/\nexport EMS_DIR=$HOME/metapp/ems\ncmake .. -DCMAKE_INSTALL_PREFIX=$EMS_DIR && make && ctest && make install\nexport PATH=${EMS_DIR}/bin:$PATH\nexport PYTHONPATH=${EMS_DIR}","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"note: Note\nFor GABLS1 in prep_nam_sfx.py_orig the following lines need to be commented out:nam[nn]['XUNIF_CLAY'] = ['1.']\nnam[nn]['XUNIF_SAND'] = ['0.']\nnam[nn]['XUNIF_RUNOFFB'] = ['0.5']\n\nnam[nn]['XHUG_SURF'] = ['-10.']\nnam[nn]['XHUG_ROOT'] = ['-10.']\nnam[nn]['XHUG_DEEP'] = ['-10.']\n","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/#Run-EMS","page":"MUSC EMS","title":"Run EMS","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"Here are some instructions on how to use EMS to execute idealised cases","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"export EMS_DIR=$HOME/metapp/ems\nexport PATH=${EMS_DIR}/bin:$PATH\nexport PYTHONPATH=${EMS_DIR}\nmkdir $HOME/ems_exec\ncd $HOME/ems_exec\ncp ${EMS_DIR}/share/config/config_46h1_HARMAROME_DEV.py .\n### edit his file to point to your binaries\nems_list_cases.py\nexport PYTHONPATH=$(pwd):$PYTHONPATH\n# MUSC.py -config config_46h1_HARMAROME_DEV.py -case $CASE -subcase $SUBCASE\nMUSC.py -config config_46h1_HARMAROME_DEV.py -case ARMCU -subcase REF","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"Output and log file can be found in $HOME/ems_exec/simulations/46t1/46h1_HARMAROME_DEV/${CASE}/${SUBCASE}","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/#EMS-Cases","page":"MUSC EMS","title":"EMS Cases","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"The table below lists the cases available in EMS and results of early tests.","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"Case Status Reference(s)\nGABLS1 REF ❌[1] \nGABLS1 MESONH ❌[1] \nGABLS4 STAGE3 ❌[1] \nGABLS4 STAGE3-SHORT ❌[1] \nAYOTTE 00SC ✔️ \nAYOTTE 00WC ✔️ \nAYOTTE 03SC ✔️ \nAYOTTE 05SC ✔️ \nAYOTTE 05WC ✔️ \nAYOTTE 24SC ✔️ \nIHOP REF ✔️ \nSCMS REF ✔️ \nRICO SHORT ✔️ \nRICO MESONH ✔️ \nARMCU REF ✔️ \nARMCU MESONH ✔️ \nARMCU E3SM ✔️ \nBOMEX REF ❌[2] \nMPACE REF ✔️ \nFIRE REF ✔️ \nSANDU REF ✔️ \nSANDU FAST ✔️ \nSANDU SLOW ✔️ \nAMMA REF ✔️ \nDYNAMO NSA3A ✔️ Takes a long time!\nDYNAMO NSA3A_D1 ✔️ \nDYNAMO NSA3A_D30 ❌[3] \nDYNAMO NSA3A_MJO1 ✔️ ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"[1]: Issue with SURFEX namelist","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"[2]: Python issue L241 $EMS_DIR/ems/prep_init_forc_atm_GMAP.py","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"[3]: Missing data_input.nc ","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/#Analysing-results-using-Atlas","page":"MUSC EMS","title":"Analysing results using Atlas","text":"","category":"section"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"Still in the Apptainer container","category":"page"},{"location":"ForecastModel/SingleColumnModel/MUSC_EMS/","page":"MUSC EMS","title":"MUSC EMS","text":"cd $HOME\ngit clone git@github.com:ewhelan/SCM-atlas.git -b hirlam SCM-atlas_git/ewhelan/hirlam\nexport PATH=$HOME/SCM-atlas_git/ewhelan/hirlam/apptools:$PATH\nexport PYTHONPATH=$HOME/SCM-atlas_git/ewhelan/hirlam:$PYTHONPATH\nexport ATLAS_CONFIG=\"\"\nmkdir -p $HOME/Atlas\ncd $HOME/Atlas\nmkdir config\ncp $HOME/SCM-atlas_git/ewhelan/hirlam/examples/config/config_HARM.py config/\n### edit config/config_HARM.py\nrun_atlas1d.py -config config/config_HARM.py","category":"page"},{"location":"Build/Build_with_makeup/#makeup","page":"Makeup","title":"Building with MAKEUP","text":"","category":"section"},{"location":"Build/Build_with_makeup/#Background","page":"Makeup","title":"Background","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Makeup is an alternative mechanism to build the HARMONIE system Instead of using GMKPACK to build the libraries and binaries, standard GNU make (gmake) procedures are used, making build of executables an easier task. Also parallel make comes for free, thus enhanced turn-around time for build process. Furthermore, rebuilds and change of compiler flags – either per project and/or per source files basis – are now trivial to do.","category":"page"},{"location":"Build/Build_with_makeup/#MAKEUP-very-quickly","page":"Makeup","title":"MAKEUP very quickly","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The process of using the MAKEUP system in stand-alone fashion is described next.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Lets define two helper variables for the presentation purposes:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The variable $HARMONIE_SRC refers to the directory, where the AROME source code is situated. Another variable $HARMONIE_MAKEUP refers to the directory, where build configuration files and MAKEUP's scripts are located. ","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"#!sh\n# In ksh/bash\nexport HARMONIE_SRC=/some/path/harmonie/src\nexport HARMONIE_MAKEUP=/some/path/harmonie/util/makeup\n# In csh/tcsh\nsetenv HARMONIE_SRC /some/path/harmonie/src\nsetenv HARMONIE_MAKEUP /some/path/harmonie/util/makeup","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Usually $HARMONIE_MAKEUP is $HARMONIE_SRC/../util/makeup , but it doesn't have to be (e.g. in FMI's production system the $HARMONIE_MAKEUP is situated on a separate disk than the source code $HARMONIE_SRC) – and MAKEUP can handle this now.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The process of building HARMONIE executable contains just a few steps:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Goto directory $HARMONIE_MAKEUP and create/edit your configuration file (config.*). Beware of preferred naming convention:\nconfig.....\nRun MAKEUP's configure script under $HARMONIE_SRC (for example):\ncd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\nIf applicable, adjust environment settings before launching of make. e.g., on some platforms, one needs to remember loading adequate modules, such as for DMI Cray XT5,\nmodule swap PrgEnv-pgi PrgEnv-pathscale # if pathscale is to be used\nmodule swap xt-mpt xt-mpt/3.5.0\nmodule swap xt-asyncpe/3.8 xt-asyncpe/3.4\nGoto $HARMONIE_SRC directory and type make (or gmake, if make is non-GNU make). Redirect output to a file & terminal:\ncd $HARMONIE_SRC\ngmake 2>&1 | tee logfile # ksh/bash\ngmake |& tee logfile # csh/tcsh","category":"page"},{"location":"Build/Build_with_makeup/#Using-MAKEUP-to-build-auxlibs-(bufr,-gribex,-rgb)","page":"Makeup","title":"Using MAKEUP to build auxlibs (bufr, gribex, rgb)","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"You can now build EMOS- and related libraries by using the MAKEUP. All you need to know is what is your sources. that you would use to build this stuff anyway. Pass that generic name to the MAKEUP's configure through -E option and you're in business. An example for FMI's Cray:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure -E sources.crayxt $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\ngmake","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"This will create extra libs (so called MY_SYSLIBS) libbufr.a, libgribex.a and librgb.a and they will end up being linked into your executables, like MASTERODB.","category":"page"},{"location":"Build/Build_with_makeup/#Using-MAKEUP-to-build-also-util/gl-tools","page":"Makeup","title":"Using MAKEUP to build also util/gl -tools","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"HARMONIE utility package GL as located in util/gl directory can also be built as part of MAKEUP process, if option -G is also given to the configure:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure -G $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\ngmake","category":"page"},{"location":"Build/Build_with_makeup/#Using-MAKEUP-to-build-also-Oulan-and/or-Monitor-tools","page":"Makeup","title":"Using MAKEUP to build also Oulan and/or Monitor -tools","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"HARMONIE utility package MONITOR and obs-preprocessor OULAN can also be build with MAKEUP. If you add option -B , then you will get Oulan and Monitor executables built, too. Or you can be more selective and oopt only for oulan with -b oulan, or just monitor -b monitor :","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n# Request for building both oulan & monitor, too\n$HARMONIE_MAKEUP/configure -B $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\n# .. or add oulan only :\n$HARMONIE_MAKEUP/configure -b oulan $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\n# .. or add monitor only :\n$HARMONIE_MAKEUP/configure -b monitor $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\ngmake","category":"page"},{"location":"Build/Build_with_makeup/#Building-objects-away-from-HARMONIE_SRC-directory","page":"Makeup","title":"Building objects away from $HARMONIE_SRC-directory","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"If you do not want to pollute your source directories with objects and thus making it hard to recognize which files are under version handling system SVN and which ain't (... although SVN command svn -q st would tell ...), then use -P option. This will redirect compilations away from source code, under $HARMONIE_SRC/../makeup.ZZZ, where ZZZ is the suffix of your config-file, e.g. FMI.cray_xt5m.pathscale.mpi+openmp.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The operation sequence is as follows:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure [options] -P $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\ncd $HARMONIE_SRC/../makeup.FMI.cray_xt5m.pathscale.mpi+openmp/src\ngmake","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The drawback with this approach is that whenever there is an update in the master source directories, you need to run lengthy configure in order to rsync the working directory up to date. We may need to introduce a separate command for this to avoid full rerun of configure.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"You can also use lowercase -p option with argument pointing to a directory-root, where to compile:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure [options] -p /working/path $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\ncd /working/path/src\ngmake","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Now, it is important to understand that this /working/path has no connection to version handling i.e. if you change something in your master copy (say : issue a svn up-command), then your working directory remains unaltered. To synchronize it, do the following:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd /working/path/src\ngmake rsync","category":"page"},{"location":"Build/Build_with_makeup/#More-details","page":"Makeup","title":"More details","text":"","category":"section"},{"location":"Build/Build_with_makeup/#Re-running-configure","page":"Makeup","title":"Re-running configure","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Afterwards you can rerun configure as many times as you wish. Please note that the very first time is always slowed (maybe 10 minutes) as interface blocks for arp/ and ald/ projects are generated.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Usually running configure many times is not necessary – not even when you have changed your config-file (!) – except when interface blocks needs to be updated/re-created (-c or -g options). For example, when subroutine/function call argument list has changed. Then the whole config+build sequence can be run under $HARMONIE_SRC as follows:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n# -c option: Check if *some* interface blocks need regeneration and regenerate\n$HARMONIE_MAKEUP/configure -c $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\n# -g option: Force to regenerate interface blocks \n# $HARMONIE_MAKEUP/configure -g $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\ngmake","category":"page"},{"location":"Build/Build_with_makeup/#Changing-the-number-of-tasks-for-compilation","page":"Makeup","title":"Changing the number of tasks for compilation","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The number of tasks used for gmake-compilations is set by default to 8. See NPES parameter in $HARMONIE_MAKEUP/defaults.mk To change the default, you can have two choices:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Add NPES to your config-file, for example set it to 2:\nNPES=2\nInvoke gmake with NPES parameter, e.g. set it to 10:\ngmake NPES=10","category":"page"},{"location":"Build/Build_with_makeup/#Inserting-DRHOOK-for-Meso-projects","page":"Makeup","title":"Inserting DRHOOK for Meso-projects","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"To insert DrHook profiling automatically for mpa/ and mse/ projects, reconfigure with -H option:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure -H $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"You can also pick and choose either mpa/ or mse/ projects with -h option (can be supplied several times):","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure -h mpa $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\n$HARMONIE_MAKEUP/configure -h mse $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\n# The following are the same as if the option -H was used\n$HARMONIE_MAKEUP/configure -h mpa -h mse -h surfex $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\n$HARMONIE_MAKEUP/configure -h mpa:mse:surfex $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"In the future it may not be necessary to insert DrHook automagically, if the insertion has been done in the svn (version handling) level.","category":"page"},{"location":"Build/Build_with_makeup/#Speeding-up-compilations-by-use-of-RAM-disk","page":"Makeup","title":"Speeding up compilations by use of RAM-disk","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"To further speedup compilation and if you have several GBytes of Linux RAM-disk (/dev/shm) available, do the following:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Create your personal RAM-disk subdirectory and check available disk space\nmkdir /dev/shm/$USER\ndf -kh /dev/shm/$USER\nReconfigure with RAM-disk either by defining LIBDISK in your config-file or running\ncd $HARMONIE_SRC\n$HARMONIE_MAKEUP/configure -L /dev/shm/$USER $HARMONIE_MAKEUP/config.FMI.cray_xt5m.pathscale.mpi+openmp\nAlso define TMPDIR to point to /dev/shm/$USER to allow compiler specific temporary files on RAM-disk\n# In ksh/bash-shells:\nexport TMPDIR=/dev/shm/$USER\ngmake 2>&1 | tee logfile\n# In csh/tcsh-shells:\nsetenv TMPDIR /dev/shm/$USER\ngmake |& tee logfile","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Please note that the step-2 creates all libraries AND executablus under the directory pointed by the -L argument. Object files and modules still, however, are placed under corresponding source directories.","category":"page"},{"location":"Build/Build_with_makeup/#What-if-you-run-out-of-RAM-disk-space-?","page":"Makeup","title":"What if you run out of RAM-disk space ?","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Sometimes you may find that the disk space becomes limited in /dev/shm/$USER. Then you have an option to supply LIBDISK parameter directly to gmake-command without need to reconfigure:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"gmake LIBDISK=`pwd`","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"This usually increases the throughput time as creation of the AROME executable to disk rather than RAM-disk may be 5-10 times slower. But at least you won't run out of disk space.","category":"page"},{"location":"Build/Build_with_makeup/#How-is-ODB-related-stuff-handled-?","page":"Makeup","title":"How is ODB related stuff handled ?","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The Observational DataBase (ODB) is a complicated beast for good reasons. Unlike any other project, which produce just one library per project, correct use of ODB in variational data assimilation requires several libraries.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The trick to manage this with MAKEUP is to create a bunch of symbolic links pointing to $HARMONIE_SRC/odb/ -project directory. There will be one (additional) library for each link. And then we choose carefully the correct subdirectories and source codes therein to be compiled for each library.","category":"page"},{"location":"Build/Build_with_makeup/#Specific-ODB-libraries,-their-meaning-and-the-source-files-included","page":"Makeup","title":"Specific ODB-libraries, their meaning & the source files included","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Library Description Source files\nlibodb ODB core library lib/ & aux/ : [a-z]*.F90 [a-z]*.c\n module/ & pandor/module : *.F90\nlibodbport Interface between IFS (ARPEGE/ALADIN/AROME) & ODB cma2odb/ & bufr2odb/ : *.F90\n – also contains BUFR2ODB routines pandor/extrtovs & pandor/fcq & pandor/mandalay : *.F90\nlibodbdummy ODB-related dummies lib/ : [A-Z]*.F90 [A-Z]*.c\nlibodbmain ODB tools, main programs (C & Fortran) tools/ : [A-Z]*.F90 *.c *.F\nlibPREODB ERA40 database (not needed, but good for debugging) ddl.PREODB/*.sql , ddl.PREODB/*.ddl\nlibCCMA Compressed Central Memory Array database (minimization) ddl.CCMA/*.sql , ddl.CCMA/*.ddl\nlibECMA Extended Central Memory Array database (obs. screening) ddl.ECMA/*.sql , ddl.ECMA/*.ddl\nlibECMASCR Carbon copy of ECMA for obs. load balancing between PEs ddl.ECMASCR/*.sql , ddl.ECMASCR/*.ddl","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"From the file $HARMONIE_MAKEUP/configure you can also find how different files are nearly hand-picked for particular libraries. Search for block","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":" if [[ \"$d\" = @(odb|odbport|odbdummy|odbmain)]] ; then\n case \"$d\" in\n odb) case \"$i\" in\n lib|aux) files=$(\\ls -C1 [a-z]*.F90 [a-z]*.c 2>/dev/null) ;;\n module|pandor/module) files=$(\\ls -C1 *.F90 2>/dev/null) ;;\n esac ;;\n odbport) case \"$i\" in\n cma2odb|bufr2odb) files=$(\\ls -C1 *.F90 2>/dev/null) ;;\n pandor/extrtovs|pandor/fcq|pandor/mandalay) files=$(\\ls -C1 *.F90 2>/dev/null) ;;\n esac ;;\n odbdummy) [[ \"$i\" != \"lib\"]] || files=$(\\ls -C1 [A-Z]*.F90 [A-Z]*.c 2>/dev/null) ;;\n odbmain) [[ \"$i\" != \"tools\"]] || files=$(\\ls -C1 [A-Z]*.F90 *.c *.F 2>/dev/null) ;;\n esac\n elif [[ \"$d\" = @($case_odbs)]] ; then\n [[ \"$i\" != \"ddl.$d\"]] || {\n files=$(\\ls -C1 *.ddl *.sql 2>/dev/null)\n mkdepend=$CMDROOT/sfmakedepend_ODB\n }\n else\n ... ","category":"page"},{"location":"Build/Build_with_makeup/#Handling-SQL-query-and-data-layout-files","page":"Makeup","title":"Handling SQL-query and data layout files","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"For SQL-query compilations (ODB/SQL queries are translated into C-code for greater performance), odb98.x SQL-compiler executable is also built as a first thing in the MAKEUP process.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Queries and data definition layouts (DDL-files) are always under /ddl./ directory.","category":"page"},{"location":"Build/Build_with_makeup/#Miscellaneous-stuff","page":"Makeup","title":"Miscellaneous stuff","text":"","category":"section"},{"location":"Build/Build_with_makeup/#Selective-compilation","page":"Makeup","title":"Selective compilation","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"It is very easy to deviate from the generic compilation options for certain source files or even projects. If you want to change compiler option (say) from -O3 to -O2 for routine src/arp/pp_obs/pppmer.F90, you can add the following lines at the end of your config-file:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"pppmer.o: FCFLAGS := $(subst -O3,-O2,$(FCFLAGS))","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"If you want to apply this to all pppmer*.F90-routines, then you need to enter the following \"wildcard\"-sequence:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"pppme%.o: FCFLAGS := $(subst -O3,-O2,$(FCFLAGS))","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Note by the way that for some reason we need to use pppme%.o as the more natural (from Unix) pppmer%.o would choose only routines pppmertl.F90 and pppmerad.F90, not the routine pppmer.F90 at all!","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Applying different compiler flags for project (say) arp only, then one can put the following at the end of config-file:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"ifeq ($(PROJ),arp)\n%.o: FCFLAGS := $(subst -O3,-O2,$(FCFLAGS))\nendif","category":"page"},{"location":"Build/Build_with_makeup/#(Re-)building-just-one-project","page":"Makeup","title":"(Re-)building just one project","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Sometime you could opt for rebuilding only (say) the xrd-project i.e. libxrd.a. This can be done as follows:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"gmake PROJ=xrd","category":"page"},{"location":"Build/Build_with_makeup/#Cleaning-up-files","page":"Makeup","title":"Cleaning up files","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"You can clean up by","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"gmake clean","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"... or selectively just the project arp:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"gmake PROJ=arp clean","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"This clean does not wipe out makefiles i.e. you don't have to rerun configure after this.","category":"page"},{"location":"Build/Build_with_makeup/#Restoring-and-cleaning-up-the-state-of-HARMONIE_SRC","page":"Makeup","title":"Restoring and cleaning up the state of $HARMONIE_SRC","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The following command you can run only once before issuing another configure command. It will remove all related object and executable files as well as generated makefiles, logfiles etc. stuff which was generated by MAKEUP's configure :","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"cd $HARMONIE_SRC\ngmake veryclean\n\n# .. or alternatively :\n$HARMONIE_MAKEUP/unconfigure","category":"page"},{"location":"Build/Build_with_makeup/#Ignoring-errors","page":"Makeup","title":"Ignoring errors","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"Sometimes it is useful to enforce compilations even if one or more routines fail to compile. In such cases recommended syntax is:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"gmake -i\n\n# or not to mess up the output, use just one process for compilations\n\ngmake NPES=1 -i","category":"page"},{"location":"Build/Build_with_makeup/#Creating-precompiled-installation","page":"Makeup","title":"Creating precompiled installation","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"If you want to provide precompiled libraries, objects, source code to other users so that they do not have to start compilation from scratch, then make a distribution or precompiled installation as follows:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"gmake PRECOMPILED=/a/precompiled/rootdir precompiled","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"After this the stuff you just compiled ends up in directory /a/precompiled/rootdir with two subdirectories : src/ and util/. All executables are currently removed.","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"You can repeat this call, and it will just rsync the modified bits.","category":"page"},{"location":"Build/Build_with_makeup/#Update/check-your-interface-blocks-outside-configure","page":"Makeup","title":"Update/check your interface blocks outside configure","text":"","category":"section"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"The configure has options -c or -g to check up or enforce for (re-)creation of interface blocks of projects arp and ald. To avoid full and lengthy configure-run, you can just do the following:","category":"page"},{"location":"Build/Build_with_makeup/","page":"Makeup","title":"Makeup","text":"gmake intfb","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/#Monitoring-Harmonie-suites-with-Teleport","page":"Teleport","title":"Monitoring Harmonie suites with Teleport","text":"","category":"section"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"In order to monitor the progress of your Harmonie suite(s) at ECMWF the ecFlow GUI ecflow_ui can be used directly from your local PC/server. This relies on teleport and ssh port forwarding which is described in more detail below. ","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/#Open-Teleport-connection","page":"Teleport","title":"Open Teleport connection","text":"","category":"section"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"This relies on a Teleport connection to ECMWF. Further details on Teleport are available here:","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"[itops@reaserve ~]$ tsh status\n> Profile URL: https://jump.ecmwf.int:443\n Logged in as: itops@met.ie\n Cluster: jump.ecmwf.int\n Roles: *\n Logins: duit\n Valid until: 2021-03-23 22:00:35 +0000 UTC [valid for 11h21m0s]\n Extensions: permit-X11-forwarding, permit-agent-forwarding, permit-port-forwarding, permit-pty\n\n\n* RBAC is only available in Teleport Enterprise\n https://gravitational.com/teleport/docs/enterprise\n[itops@reaserve ~]$ ","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"In order to open a new Teleport connection execute the following and submit credential via browser:","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"[ewhelan@reaserve ~]$ tsh login --proxy=jump.ecmwf.int:433","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/#Log-in","page":"Teleport","title":"Log in","text":"","category":"section"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"To log in to ECMWF's Atos:","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"[itops@reaserve ~]$ ssh -X hpc-login","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"The Teleport connection to ECMWF is configured as follows:","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"[ewhelan@reaserve ~]$ cat .ssh/config \nHost hpc-login\n User dui\n IdentityFile ~/.tsh/keys/jump.ecmwf.int/eoin.whelan@met.ie\n ProxyCommand bash -c \"tsh login; ssh -W %h:%p %r@jump.ecmwf.int\"\n[ewhelan@reaserve ~]$ ","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/#Open-ecFlow-ports","page":"Teleport","title":"Open ecFlow ports","text":"","category":"section"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"The following opens ports to ECMWF (dui, ECF_PORT=3141) ecFlow server. Based on instructions provided by [https://confluence.ecmwf.int/display/ECFLOW/Teleport+-+using+local+ecflow_ui]. In a new terminal:","category":"page"},{"location":"System/ECMWF/ECMWF_teleport/","page":"Teleport","title":"Teleport","text":"ssh hpc-login -C -N -L 3141:ecflow-gen-dui-001:3141","category":"page"},{"location":"DataAssimilation/ObservationOperators/#Observation-operators","page":"HOP_DRIVER","title":"Observation operators","text":"","category":"section"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"This documentation summarises the observation operator in HARMONIE and the use of the HOP_DRIVER tool. The test harness, HOP_DRIVER, calls the observation operator and generates FG departures without calling any model code or initialising any model modules. Firstly, the IFS is used to dump a single-observation gom_plus to file from the 1st trajectory of an experiment. Dumping multiple observations would require a more complex and full-featured dump (good file format, multi-process parallel). For code refactoring HOP_DRIVER can be used to test changes to the observation operator of a particular observation type.","category":"page"},{"location":"DataAssimilation/ObservationOperators/#HARMONIE-and-HOP_DRIVER","page":"HOP_DRIVER","title":"HARMONIE and HOP_DRIVER","text":"","category":"section"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"The HOP_DRIVER program was first added to CY42R2 code. The tool was initially implemented to test refactoring of the IFS observation operator code src/arpifs/op_obs/hop.F90. Instructions on how to prepare the code and run HOP_DRIVER using HARMONIE are outlined below. Presentation made at [wiki:HirlamMeetings/ModelMeetings/ObOpWorkshop OOPS Observation Operator Workshop] may provide some useful background information.","category":"page"},{"location":"DataAssimilation/ObservationOperators/#Comments-on-the-branch","page":"HOP_DRIVER","title":"Comments on the branch","text":"","category":"section"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"Code changes were required in order to compile cy42r2bf.04 + mods (provided by MF/ECMWF) in the HARMONIE system: [14312], [14325], [14326], [14330], [14331], [14332], [14333], [14334].\nChanges were made to makeup in order to compile HOP_DRIVER correctly: [14310], [14327], [14328], [14329], [14335], [14362], [14382], [14392].\nIncluded in [14362] is a change to ODBSQLFLAGS which is set to ODBSQLFLAGS=-O3 -C -UCANARI -DECMWF $(ODBEXTRAFLAGS) in order to use ECMWF flavoured ODB used by HOP_DRIVER\nOn cca GNU compilers 4.9 are not fully supported, ie I had to build GRIB-API and NetCDF locally using gcc/gfortran 4.9 on cca\nAn environment variable, HOPDIR, is used to define the location of necessary input data for HOP_DRIVER\nAn environment variable, HOPCOMPILER, is used by the HOP_driver script to define the compiler used. This is used to compare results.","category":"page"},{"location":"DataAssimilation/ObservationOperators/#HOPOBS:-amsua","page":"HOP_DRIVER","title":"HOPOBS: amsua","text":"","category":"section"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"Currently there is only one observation type, AMSU-A (HOPOBS=amsua), available for testing with HOP_DRIVER. Alan Geer (ECMWF) has already carried out the refactoring of the HOP code related to AMSU-A observations. A single observation is provided in the ECMA and is used to test the refactoring of the HOP code. To carry out the testing of the amsua refactoring HOPOBS should be set to amsua in ecf/config_exp.h.","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"reportype@hdr obstype@hdr sensor@hdr statid@hdr stalt@hdr date@hdr time@hdr degrees(lat) degrees(lon) report_status@hdr datum_status@body obsvalue@body varno@body vertco_type@body\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 12 173.28 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 12 158.86 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 3 227.40 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 3 260.82 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 256.90 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 239.60 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 12 NULL 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 3 217.69 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 209.39 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 214.05 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 223.02 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 234.42 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 245.14 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 1 257.18 119 3\n1007 7 3 ' 4' 832800 !20140131 215914 -29.5906 0.3113 1 12 227.91 119 3","category":"page"},{"location":"DataAssimilation/ObservationOperators/#HOP_DRIVER","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"","category":"section"},{"location":"DataAssimilation/ObservationOperators/#Using-HOP_DRIVER","page":"HOP_DRIVER","title":"Using HOP_DRIVER","text":"","category":"section"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"With LHOP_RESULTS=.TRUE. HOP_DRIVER will write results to a file called hop_results${MYPROC} for comparison between online and offline results. (The results file is opened by src/arpifs/var/taskob.F90. HOP_DRIVER results are written to hop_results${MYPROC} in src/arpifs/op_obs/hop.F90:","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":" :\n :\nIF(LHOP_RESULTS) THEN\n!$OMP CRITICAL\n ! Output for comparison between online and offline results:\n WRITE(CFILENAME,'(\"hop_results\",I4.4)') MYPROC\n OPEN(NEWUNIT=IU,FILE=CFILENAME,POSITION='APPEND',ACTION='WRITE',FORM='FORMATTED')\n DO JOBS = 1,KDLEN\n DO JBODY=1,IMXBDY\n IF (JBODY>ICMBDY(JOBS)) CYCLE\n IBODY = ROBODY%MLNKH2B(JOBS)+(JBODY-1)\n WRITE(IU,'(6I8,2F30.14)') MYPROC, KSET, JOBS, NINT(ROBHDR%DATA(JOBS,ROBHDR%SEQNO_AT_HDR)),&\n & NINT(ROBODY%DATA(IBODY,ROBODY%VERTCO_REFERENCE_1_AT_BODY)), &\n & NINT(ROBODY%DATA(IBODY,ROBODY%VARNO_AT_BODY)), ZHOFX(JOBS,JBODY), ZXPPB(JOBS,JBODY)\n\n ENDDO\n ENDDO\n CLOSE(IU)\n!$OMP END CRITICAL\nENDIF\n :\n :","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"The HOPdriver script (based a script provided by MF) sorts the contents of the `hopresults0001` file for comparison with some results made available by ECMWF/MF:","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":" :\n :\n#\n# Check HOP_DRIVER results (available for gfotran and intel)\n#\nln -s $HOPDIR/${HOPOBS}/results.$HOPCOMPILER .\ncat hop_results* | sort -k1,1n -k2,2n -k3,3n -k5,5n -k6,6n > results.driver\necho\ncmp -s results.$HOPCOMPILER results.driver\nif [ $? -eq 0] ; then\n echo \"RESULTS ARE STRICTLY IDENTICAL TO THE REFERENCE FOR HOPCOMPILER=$HOPCOMPILER :-)\"\nelse\n echo Compare exactly against the results dumped from hop:\n echo \"xxdiff results.$HOPCOMPILER results.driver &\"\n diff results.$HOPCOMPILER results.driver\n exit 1\nfi\n :\n :","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"On cca you will find useful output from HOP_DRIVER in cca:$TEMP/hm_home/rfexp/archive/HOPDRIVEROUT:","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"fort.4\nNODE.001_01\nhop_results0001\nresults.gfortran\nresults.driver","category":"page"},{"location":"DataAssimilation/ObservationOperators/#The-code","page":"HOP_DRIVER","title":"The code","text":"","category":"section"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"HOP_DRIVER is a short program written by Deborah Salmond (ECMWF) to test code changes made to the observation operator. The program src/arpifs/programs/hop_driver.F90 is summarised here.","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"The program sets up the model geometry and observations:","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":" :\n :\nCALL GEOMETRY_SET(YRGEOMETRY)\nCALL MODEL_SET(YRMODEL)\n\nCALL IFS_INIT('gc7a')\n\nCALL SUINTDYN\n\nCALL SUGEOMETRY(YRGEOMETRY) !From GEOMETRY_SETUP\n\nCALL SURIP(YRGEOMETRY%YRDIM) !From MODEL_CREATE\n\n! Set up Observations, Sets\nCALL SUDIMO(YRGEOMETRY,NULOUT) !From SU0YOMB\nCALL SUOAF !From SU0YOMB\nCALL SUALOBS !From SU0YOMB\nCALL SURINC !From SU0YOMB\nCALL SETUP_TESTVAR !From SU0YOMB\nCALL SUOBS(YRGEOMETRY) !From CNT1\nCALL ECSET(-1,NOBTOT,0) !From OBSV\nCALL SUPHEC(YRGEOMETRY,NULOUT)\n\n! Setup varbc (from cnt1.F90) and read VARBC.cycle\nCALL YVARBC%SETUP_TRAJ\n :\n :","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":"HOP_DRIVER then loops over the number of observation sets (NSETOT) and reads a GOM PLUS for each observation set. HRETR and HOP are then called:","category":"page"},{"location":"DataAssimilation/ObservationOperators/","page":"HOP_DRIVER","title":"HOP_DRIVER","text":" :\n :\nDO ISET=1,NSETOT\n IDLEN = MLNSET(ISET)\n IMXBDY = MAX(MMXBDY(ISET),1)\n\n ALLOCATE(ZHOFX(IDLEN,IMXBDY))\n ZHOFX=RMDI\n\n ! READ GOM_PLUS FROM DUMP\n CALL GOM_PLUS_READ_DUMP(YGP5,ISET)\n\n IF(IDLEN /= YGP5%NDLEN) THEN\n CALL ABOR1('Sets are incompatible')\n ENDIF\n\n :\n :\n :\n\n CALL HRETR(YRGEOMETRY%YRDIMV,IDLEN,IMXBDY,ISET,1,YGP5,YVARBC)\n\n CALL HOP(YRGEOMETRY%YRDIMV,YGP5,YVARBC,IDLEN,IMXBDY,ISET,1,LDOOPS=.TRUE.,PHOFX=ZHOFX)\n\n !write(0,*)'ZHOFX',ZHOFX\n DEALLOCATE(ZHOFX)\n\n CALL GOM_PLUS_DESTROY(YGP5)\n\nENDDO\n\n :\n :","category":"page"},{"location":"EPS/Setup/#eps-setup","page":"Setup","title":"Setup","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/#boundary-file-preparation","page":"Preparation","title":"Preparation of initial and boundary files","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/#Introduction","page":"Preparation","title":"Introduction","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"HARMONIE can be coupled with external models as IFS, ARPEGE, HIRLAM. Internally it is possible to nest the different ALADIN/ALARO/AROME with some restrictions. In the following we describe the host initial and boundary files are generated depending on different configurations. Boundary file preparation basically includes two parts: forecast file fetching and boundary file generation.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"The ECFLOW tasks for initial and boundary preparation","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#Boundary-strategies","page":"Preparation","title":"Boundary strategies","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"There are a number of ways to chose which forecast lengths you use as boundaries. The strategy is determined by BDSTRATEGY in ecf/config_exp.h and there are a number of strategies implemented.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"available : Search for available files in BDDIR adn try to keep forecast consistency. This is ment to be used operationally since it will at least keep your run going, but with old boundaries, if no new boundaries are available.\nsimulate_operational : Mimic the behaviour of the operational runs using ECMWF 6h old boundaries.\nsame_forecast : Use all boundaries from the same forecast, start from analysis\nanalysis_only : Use only analyses as boundaries. Note that BDINT cannot be shorter than the frequency of the analyses.\nlatest : Use the latest possible boundary with the shortest forecast length\nRCR_operational : Mimic the behaviour of the RCR runs, ie\n12h old boundaries at 00 and 12 and\n06h old boundaries at 06 and 18\njb_ensemble : Same as same_forecast but used for JB-statistics generation. With this you should export JB_ENS_MEMBER=some_number\neps_ec : ECMWF EPS members (on reduced Gaussian grid). It is only meaningful with ENSMSEL non-empty, i.e., ENSSIZE > 0","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"All the strategies are defined in scr/Boundary_strategy.pl. The script generates a file bdstrategy in your working directory that could look like:","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":" Boundary strategy\n\n DTG: 2011090618\n LL: 36\n BDINT: 3\n BDCYCLE: 6\n STRATEGY: simulate_operational\n BDDIR: /scratch/snh/hm_home/alaro_37h1_trunk/ECMWF/archive/@YYYY@/@MM@/@DD@/@HH@\nHOST_MODEL: ifs\nINT_BDFILE: /scratch/snh/hm_home/alaro_37h1_trunk/20110906_18/ELSCFHARMALBC@NNN@\n\n# The output bdstrategy file has the format of \n# NNN|YYYYMMDDHH INT_BDFILE BDFILE BDFILE_REQUEST_METHOD \n# where \n# NNN is the input hour\n# YYYYMMDDHH is the valid hour for this boundary\n# INT_BDFILE is the final boundary file\n# BDFILE is the input boundary file\n# BDFILE_REQUEST_METHOD is the method to the request BDFILE from e.g. MARS, ECFS or via scp\n\nSURFEX_INI| /scratch/snh/hm_home/alaro_37h1_trunk/20110906_18/SURFXINI.lfi \n000|2011090618 /scratch/snh/hm_home/alaro_37h1_trunk/20110906_18/ELSCFHARMALBC000 /scratch/snh/hm_home/alaro_37h1_trunk/ECMWF/archive/2011/09/06/12/fc20110906_12+006 MARS_umbrella -d 20110906 -h 12 -l 6 -t\n003|2011090621 /scratch/snh/hm_home/alaro_37h1_trunk/20110906_18/ELSCFHARMALBC001 /scratch/snh/hm_home/alaro_37h1_trunk/ECMWF/archive/2011/09/06/12/fc20110906_12+009 MARS_umbrella -d 20110906 -h 12 -l 9 -t\n...","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Meaning that the if the boundary file is not found under BDDIR the command MARS_umbrella -d YYYYMMDD -h HH -l LLL -t BDDIR will be executed. A local interpretation could be to search for external data if your file is not on BDDIR. Like the example from SMHI:","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":" Boundary strategy\n\n DTG: 2011090112\n LL: 24\n BDINT: 3\n BDCYCLE: 06\n STRATEGY: latest\n BDDIR: /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/g05a/archive/@YYYY@/@MM@/@DD@/@HH@\nHOST_MODEL: hir\nINT_BDFILE: /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/20110901_12/ELSCFHARMALBC@NNN@\n EXT_BDDIR: smhi_file:/data/arkiv/field/f_archive/hirlam/G05_60lev/@YYYY@@MM@/G05_@YYYY@@MM@@DD@@HH@00+@LLL@H00M\nEXT_ACCESS: scp\n\n# The output bdstrategy file has the format of \n# NNN|YYYYMMDDHH INT_BDFILE BDFILE BDFILE_REQUEST_METHOD \n# where \n# NNN is the input hour\n# YYYYMMDDHH is the valid hour for this boundary\n# INT_BDFILE is the final boundary file\n# BDFILE is the input boundary file\n# BDFILE_REQUEST_METHOD is the method to the request BDFILE from e.g. MARS, ECFS or via scp\n\n# hh_offset is 0 ; DTG is \nSURFEX_INI| /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/20110901_12/SURFXINI.lfi \n000|2011090112 /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/20110901_12/ELSCFHARMALBC000 /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/g05a/archive/2011/09/01/12/fc20110901_12+000 scp smhi:/data/arkiv/field/f_archive/hirlam/G05_60lev/201109/G05_201109011200+000H00M \n003|2011090115 /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/20110901_12/ELSCFHARMALBC001 /nobackup/smhid9/sm_esbol/hm_home/ice_36h1_4/g05a/archive/2011/09/01/12/fc20110901_12+003 scp smhi:/data/arkiv/field/f_archive/hirlam/G05_60lev/201109/G05_201109011200+003H00M ","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"In this example an scp from smhi will be executed if the expected file is not in BDDIR. There are a few environment variables that one can play with in sms/confi_exp.h that deals with the initial and boundary files","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"HOST_MODEL : Tells the origin of your boundary data * ifs : ecmwf data * hir : hirlam data * ald : Output from aladin physics, this also covers arpege data after fullpos processing. * ala : Output from alaro physics * aro : Output from arome physics\nBDINT : Interval of boundaries in hours\nBDLIB : Name of the forcing experiment. Set\nECMWF to use MARS data\nRCRa to use RCRa data from ECFS\nOther HARMONIE/HIRLAM experiment\nBDDIR : The path to the boundary file. In the default location BDDIR=$HM_DATA/${BDLIB}/archive/@YYYY@/@MM@/@DD@/@HH@ the file retrieved from e.g. MARS will be stored in a separate directory. On could also consider to configure this so that all the retrieved files are located in your working directory $WRK. Locally this points to the directory where you have all your common boundary HIRLAM or ECMWF files.\nINT_BDFILE : is the full path of the interpolated boundary files. The default setting is to let the boundary file be removed by directing it to $WRK.\nINT_SINI_FILE : The full path of the initial surfex file. ","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"There are a few optional environment variables that could be used that are not visible in config_exp.h ","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"EXT_BDDIR : External location of boundary data. If not set rules are depending on HOST_MODEL\nEXT_ACCESS : Method for accessing external data. If not set rules are depending on HOST_MODEL\nBDCYCLE : Assimilation cycle interval of forcing data, default is 6h.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"More about this can be bounds in the Boundary_strategy.pl script.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"The bdstrategy file is parsed by the script ExtractBD. ","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"scr/ExtractBD Checks if data are on BDDIR otherwise copy from EXT_BDDIR. The operation performed can be different depending on HOST and HOST_MODEL. IFS data at ECMWF are extracted from MARS, RCR data are copied from ECFS.\nInput parameters: Forecast hour\nExecutables: none.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"In case data should be retrieved from MARS there is also a stage step. When calling MARS with the stage command we ask MARS to make sure data are on disk. In HARMONIE we ask for all data for one day of r forecasts ( normally four cycles ) at the time. ","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#Near-real-time-aerosols","page":"Preparation","title":"Near real time aerosols","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"The use of near real time aerosols require the presence of aerosol fields in the boundary files.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"BDAERO : Origin of the aerosol fields\nnone : no aerosols (default configuration)\ncams : aerosol from CAMS.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"A bdstrategy_cams file is generated. After the data is retrieved, the files are merge with the files from the HOST_MODEL to get the final boundary conditions files.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#Initial-and-Boundary-file-generation","page":"Preparation","title":"Initial and Boundary file generation","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"To be able to start the model we need the variables defining the model state.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"T,U,V,PS in spectral space\nQ in gridpoint or spectral space","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Optional:","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Q_l, Q_i, Q_r, Q_g, Q_s, Q_h\nTKE","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"For the surface we need the different state variables for the different tiles. The scheme selected determines the variables.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Boundary files (coupling files) for HARMONIE are prepared in two different ways depending on the nesting procedure defined by HOST_MODEL.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#Using-gl","page":"Preparation","title":"Using gl","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"If you use data from HIRLAM or ECMWF gl will be called to generate boundaries. The generation can be summarized in the following steps:","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Setup geometry and what kind of fields to read depending on HOST_MODEL\nRead the necessary climate data from a climate file\nTranslate and interpolate the surface variables horizontally if the file is to be used as an initial file. All interpolation respects land sea mask properties. The soil water is not interpolated directly but interpolated using the Soil Wetness Index to preserve the properties of the soil between different models. The treatment of the surface fields is only done for the initial file.\nHorizontal interpolation of upper air fields as well as restaggering of winds.\nVertical interpolation using the same method (etaeta) as in HIRLAM\nConserve boundary layer structure\nConserve integrated quantities\nOutput to an FA file ( partly in spectral space )","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"gl is called by the script scr/gl_bd where we make different choices depending on PHYSICS and HOST_MODEL","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"When starting a forecast there are options to whether e.g. cloud properties and TKE should be read from the initial/boundary file through NREQIN and NCOUPLING. At the moment these fields are read from the initial file but not coupled to. gl reads them if they are available in the input files and sets them to zero otherwise. For a Non-Hydrostatic run the non-hydrostatic pressure departure and the vertical divergence are demanded as an initial field. The pressure departure is by definition zero if you start from a non-hydrostatic mode and since the error done when disregarding the vertical divergence is small it is also set to zero in gl. There are also a choice in the forecast model to run with Q in gridpoint or in spectral space.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"It's possible to use an input file without e.g. the uppermost levels. By setting LDEMAND_ALL_LEVELS=.FALSE. the missing levels will be ignored. This is used at some institutes to reduce the amount of data transferred for the operational runs. ","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#Using-fullpos","page":"Preparation","title":"Using fullpos","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"If you use data generated by HARMONIE you will use fullpos to generate boundaries and initial conditions. Here we will describe how it's implemented in HARMONIE but there are also good documentation on the gmapdoc site.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"fullpos","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"In HARMONIE it is done by the script scr/E927. It contains the following steps:","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Fetcht climate files. Fullpos needs a climate file and the geometry definition for both the input and output domains. \nSet different moist variables in the namelists depending if your run AROME or ALADIN/ALARO.\nCheck if input data has Q in gridpoint or spectral space.\nDemand NH variables if we run NH.\nDetermine the number of levels in the input file and extract the correct levels from the definition in scr/Vertical_level.pl\nRun fullpos","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"E927 is also called from 4DVAR when the resolution is changed between the inner and outer loops.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#Generation-of-initial-data-for-SURFEX","page":"Preparation","title":"Generation of initial data for SURFEX","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"For SURFEX we have to fill the different tiles with correct information from the input data. This is called the PREP step in the SURFEX context. scr/Prep_ini_surfex creates an initial SURFEX file from an FA file if you run with SURFACE=surfex. ","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Read more about SURFEX","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#Reading-SST/SIC-information","page":"Preparation","title":"Reading SST/SIC information","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"It is possible to update sea-surface temperature (SST) and sea-ice concentration (SIC) from the LBC/coupling files. Since June 2018 and Cycle 45r1, ECMWF's IFS has used interactive ocean and sea ice components. It has been shown that use of these components \"... can significantly improve SST predictions in Europe, and as a result, predictions of near-surface air temperature\". The use of SST and SIC as surface boundary conditions has the potential to improve the quality of LAM NWP forecasts. See the ECMWF Newsletter article https://www.ecmwf.int/en/newsletter/156/news/effects-ocean-coupling-weather-forecasts describing examples of the coupling improved IFS forecasts in the seas near Europe.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"The reading of these data is controlled by the SSTSIC_UPD switch in ecf/config_exp.h. With SSTSIC_UPD=no (default) SST/SIC are read at analysis time and not updated during the forecast. With SSTSIC_UPD=yes SST and SIC are read by the model from files created by the Interpol_sst_mll task in the Boundaries ecFlow family.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#Data-preparation","page":"Preparation","title":"Data preparation","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"The ecf/Interpol_sst_mll.ecf task reads the bdstrategy file described above and calls the scr/Interpol_sst_mll script to \"Interpolate SST/SIC from various sources to the model geometry for given MLL & INFILE\". The script uses gl (with -sst3 option set) to carry out the interpolation.","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"Interpol_sst_mll input Description\n-h Commnand-line option. Model forecast hour\n-i Commnand-line option. Input file name\nSST_SOURCES Environment variable. External SST source used to set gl namelist\nEXT_SST_SIC_$LLL Hard-coded. Output filename expected by the code (LLL is the forecast length).","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/#The-code","page":"Preparation","title":"The code","text":"","category":"section"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"The reading of the SST/SIC input files (EXT_SST_SIC_$LLL) is controlled in the scripts by the SSTSIC_UPD environment variable. With it set to yes, the following NAMMCC namelist entries are set to .TRUE.:","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"&NAMMCC\n LMCC01_MSE=.TRUE.,\n LMCCECSST=.TRUE.,\n/","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"From src/arpifs/module/yommcc.F90:","category":"page"},{"location":"Boundaries/BoundaryFilePreparation/","page":"Preparation","title":"Preparation","text":"! LMCC01_MSE = .T. ===> THE CLIM.FIELD(S) ARE READ IN LBC FILE AND USED IN SURFEX\n :\n! LMCCECSST =.T. ===> SST FROM ECMWF (SST-ANA COMB with surf temp over seaice)\n! =.F. ===> SST FROM SURFTEMPERATURE","category":"page"},{"location":"ClimateGeneration/DownloadInputData/#download-input-data","page":"Input Data","title":"Download input data","text":"","category":"section"},{"location":"ClimateGeneration/DownloadInputData/","page":"Input Data","title":"Input Data","text":"Before you can start running HARMONIE experiments some input data (external from the code repository) needs to be available on your platform. The input data contains physiography data, topography information and climatological values determined from a one year ARPEGE assimilation experiment with a resolution of T79.","category":"page"},{"location":"ClimateGeneration/DownloadInputData/","page":"Input Data","title":"Input Data","text":" E923_DATA-harmonie-43h2.1.tar.gz: Climate and physiography data for atmospheric climate generation (E923)\n PGD-harmonie-43h2.1.tar.gz: Physiography data for SURFEX (PGD)\nGMTED2010-harmonie-43h2.1.tar.gz : Digital elevation model from UGS\n SOILGRID-harmonie-43h2.1.tar.gz: Soil type data from SOILGRIDS\n sat-harmonie-43h2.1.tar.gz: Constants for satellite information\nrttov7L54-harmonie-43h2.1.tar.gz : RTTOV constants\nECOCLIMAP second generation is available from here. It's also available on hpc-login:/ec/res4/hpcperm/hlam/data/climate/ECOCLIMAP2G\ntestbed-harmonie-43h2.1.tar.gz: Test data set with boundaries and observations for a small 50x50 domain]","category":"page"},{"location":"EPS/SPP/#spp","page":"SPP","title":"SPP in HarmonEPS","text":"","category":"section"},{"location":"EPS/SPP/#SPP-options-in-HARMONIE","page":"SPP","title":"SPP options in HARMONIE","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The Stochastically Perturbed Parameterizations scheme (SPP) introduces stochastic perturbations to values of chosen closure parameters representing efficiencies or rates of change in parameterized atmospheric (sub)processes. See here for more information. SPP is available since cy40h1.1.1.","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SPP is activated by setting SPP=yes in ecf/config_exp.h","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SPP uses the Stochastic Pattern Generator (SPG). The pattern characteristics are set by the following settings in config_exp.h:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":" SDEV_SPP = 1.0 # Standard deviation of the pattern\n TAU_SPP = 43200 # Time scale (seconds)\n XLCOR_SPP = 200000 # Length scale (m)\n SPGQ_SPP = 0.5 # Controls small vs. large scales \n SPGADTMIN_SPP=0.15 # initialization to ensure stationary statistics from the start of the integration\n SPGADTMAX_SPP=3.0 # initialization to ensure stationary statistics from the start of the integration\n NPATFR_SPP=-1 # Frequency to evolve pattern: >0 in timesteps, <0 in hours","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The parameters that can be perturbed are: ","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Perturbation Description Perturbs\nP1: LPERT_PSIGQSAT Perturb saturation limit sensitivity VSIGQSAT\nP3: LPERT_CLDDPTHDP Perturb threshold cloud thickness used in shallow/deep convection decision RFRMIN(20)\nP4: LPERT_ICE_CLD_WGT Perturb cloud ice content impact on cloud thickness RFRMIN(21)\nP5: LPERT_ICENU Perturb ice nuclei RFRMIN(9)\nP6: LPERT_KGN_ACON Perturb Kogan autoconversion speed RFRMIN(10)\nP7: LPERT_KGN_SBGR Perturb Kogan subgrid scale (cloud fraction) sensitivity RFRMIN(11)\nP8: LPERT_RADGR Perturb graupel impact on radiation RADGR\nP9: LPERT_RADSN Perturb snow impact on radiation RADSN\nP10:LPERT_RFAC_TWOC Perturb top entrainment RFAC_TWO_COEF\nP11:LPERT_RZC_H Perturb stable conditions length scale RZC_H\nP12:LPERT_RZL_INF Asymptotic free atmospheric length scale RZL_INF\nP13:LPERT_RSWINHF Short wave inhomogeneity factor RSWINHF\nP14:LPERT_RLWINHF Long wave inhomogeneity factor RLWINHF\nP15:LPERT_ALPHA Cloud droplet gamma distribution parameters alpha (over sea) ALPHA\nP16:LPERT_RZNUC Cloud droplet gamma distribution parameters nu (over land) RZNUC\nP17:LPERT_RZMFDRY Parameter for dry mass flux RZMFDRY\nP18:LPERT_RZMBCLOSURE Closure parameter for moist mass flux RZMBCLOSURE\nP19:LPERT_SLWIND Perturbing V(M) in the semi-lagrangian advection scheme SLWIND","category":"page"},{"location":"EPS/SPP/#Main-settings","page":"SPP","title":"Main settings","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The following describes the namelist flags for SPP. Namelist flags for SPP are found in the namelist NAMSPP in nam/harmonie_namelists.pm","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Activate perturbation of a parameter by setting LPERT_[PARAMETER] to TRUE in harmonie_namelists.pm, e.g.:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":" NAMSPP=>{\n 'LPERT_PSIGQSAT' => '.TRUE.,',\n ...\n },","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The size of the perturbation (the standard deviation of the parameter distribution) is set by CMPERT_[PARAMETER], e.g.:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":" NAMSPP=>{\n 'CMPERT_PSIGQSAT' => '0.3,',\n ...\n },","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The min/max range of each perturbed parameter can be controlled by the CLIP_[PARAMETER] namelist variable where the limits are specified as e.g.:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"NAMSPP=>{\n'CLIP_PSIGQSAT' => '0.0,0.1',\n...\n },","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Two distributions for the parameter values are possible: lognormal and pseudo uniform. Note: when a pseudo uniform distribution is used, the distribution may extend to negative values, which should be avoided. This can be assured by setting a clipping range (see above). Set LUNIFORM_[PARAMETER] to FALSE to use lognormal and to TRUE to use pseudo uniform, e.g.:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"NAMSPP =>{\n'LUNIFORM_PSIGQSAT' => '.FALSE.,',\n...\n },","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"When pseudo uniform is chosen, it is possible to shift the distribution by setting UNIFORM_OFFSET_[PARAMETER], where offset 0.5 is default, <0.5 moves the distribution to the right and >0.5 moves the distribution to the left e.g.:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"NAMSPP =>{\n'UNIFORM_OFFSET_PSIGQSAT' => '0.45,',\n...\n },","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"When lognormal distribution is chosen, the flag LLNN_MEAN1_[PARAMETER] decides if the mean or the median of the distribution corresponds to the unperturbed, deterministic value of the parameter. Set to FALSE to use the median and to TRUE to use the mean, e.g.:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"NAMSPP =>{\n'LLNN_MEAN1_PSIGQSAT' => '.TRUE.,',\n...\n },","category":"page"},{"location":"EPS/SPP/#Correlation-of-patterns","page":"SPP","title":"Correlation of patterns","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Any arbitrary number of parameters can be correlated by setting MP_X, where X is the name of the parameter, to the same number in in NAMSPP. Anticorrelation can be achieved by setting IC_X=-1. I.e. if we set","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"NAMSPP =>{\n...\n 'MP_RZC_H' => '77,',\n 'MP_RZL_INF' => '77,',\n 'MP_KGN_ACON' => '99',\n 'MP_KGN_SBGR' => '99,',\n 'IC_KGN_SBGR' => '-1,',\n...\n },","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"we will correlate RZC_H and RZL_INF and anticorrelate KGN_ACON with KGN_SBGR. The latter is achieved by setting IC_KGN_SBGR=-1. Note that the number for MP_X is used as an ID and should not be considered as a sequence number. It does not control the order of the patterns in the output or similar.","category":"page"},{"location":"EPS/SPP/#Define-the-time-and-length-scales-for-an-individual-pattern","page":"SPP","title":"Define the time and length scales for an individual pattern","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The pattern timescale TAU and lengthscale XLCOR are defined in NAMSPP and are then valid for all patterns. To specify the patterns individually we can set something like:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"NAMSPP =>{\n...\n 'YSPP_CONFIG_PAR(1)%TAG' => '\\'PSIGQSAT\\'',\n 'YSPP_CONFIG_PAR(1)%TAU' => '21600',\n 'YSPP_CONFIG_PAR(1)%XLCOR' => '150000',\n 'YSPP_CONFIG_PAR(2)%TAG' => '\\'KGN_ACON\\'',\n 'YSPP_CONFIG_PAR(2)%TAU' => '10800',\n 'YSPP_CONFIG_PAR(2)%XLCOR' => '350000',\n...\n },","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"where TAG is the name of the parameter to be perturbed. ","category":"page"},{"location":"EPS/SPP/#Recommended-SPP-settings-(cy43):","page":"SPP","title":"Recommended SPP settings (cy43):","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"CMPERT needs to be tuned for each parameter. CMPERT1 in the table below is the value that gives the range of values for the parameters recommended by the physics experts (when a lognormal distribution is used, if not stated otherwise). CMPERT is the value recommended for use. Tuning is ongoing, hence not all recommendations are in place yet. The well tested settings are in bold, preliminary suggestions are in italic.","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Perturbation Det. value Recom. range CMPERT (CMPERT1) Distribution Offset Mean Median Clipping Corr.\nPSIGQSAT 0.02 0-0.06 0.6 (0.3) Log-normal N.A. Mean No N.A.\nCLDDPTHDP 4000 0-10000 0.6 (0.3) Log-normal N.A. Mean No N.A.\nICE_CLD_WGT 1 0.2-2 1.2 (0.3) Uniform 0.5 N.A. 0.01 - 10 N.A.\nICENU 1 0.01-100 TBD (1.05) TBD TBD TBD TBD N.A.\nKGN_ACON 10 1-100 TBD (0.75) TBD TBD TBD TBD N.A.\nKGN_SBGR 1[1] 0.01-1 TBD (0.3) TBD TBD TBD 0., 1. N.A.\nRADGR 0.5 0-1 TBD (0.4) TBD TBD TBD 0., 2. N.A.\nRADSN 1 0-2 TBD (0.35) TBD TBD TBD 0., 2. N.A.\nRFAC_TWOC 2 0.5-3 TBD (0.3) TBD TBD TBD TBD N.A.\nRZC_H 0.11 0.1-0.2 1.05 (0.3) Uniform 0.475 N.A. 0.001, 100 Yes\nRZL_INF 40 20-200 0.45 (0.45) Log-normal N.A. Mean No Yes\nLPERT_RSWINHF 1? 0.95-1 Not tested \nLPERT_RLWINHF 1? 0.95-1 Not tested \nLPERT_ALPHA 3 0.2-5 1.4 (0.3) Uniform 0.5 N.A. TBD N.A.\nLPERT_RZNUC 3 0.2-10 0.6 (0.3) Log-normal N.A. Mean No N.A.\nLPERT_RZMFDRY 1 0.2-2 0.8 (0.3) Log-normal N.A. Mean No N.A.\nLPERT_RZMBCLOSURE 0.35 0.05-0.7 0.8 (0.3) Log-normal N.A. Mean No N.A.","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"[1]: Default/deterministic value of 1 and recommended range of 0.01-1 means the deterministic value is at the high end of the distribution. ","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"For more SPP details see src/arpifs/module/spp_mod.F90","category":"page"},{"location":"EPS/SPP/#Tendency-and-pattern-diagnostics","page":"SPP","title":"Tendency and pattern diagnostics","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Diagnostic output of tendencies and SPP patterns can be activated by setting TEND_DIAG=yes in ecf/config_exp.h. Activation gives six new 3D-fields","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"FANAME Description\nSNNNPTENDU U-component tendencies\nSNNNPTENDV V-component tendencies\nSNNNPTENDT Temperature tendencies\nSNNNPTENDR Moisture tendencies\nSNNNMULNOISE SPPT pattern, same for all levels\nSNNNSPP_PATTERN SPP pattern, distribution as explained below","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The raw and scaled patterns are stored in the vertical column of SNNNSPP_PATTERN using the index given for Diagnostic number in the SPP initialization. Thus, with the standard settings and the correlated pattern exampel above we get in the standard log file NODE.001_01 available in the Forecast task output :","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"...\nPSIGQSAT pattern/diagnostic numbers are: 1 1\nCLDDPTHDP pattern/diagnostic numbers are: 2 2\nICE_CLD_WGT pattern/diagnostic numbers are: 3 3\nICENU pattern/diagnostic numbers are: 4 4\nKGN_ACON pattern/diagnostic numbers are: 5 5\nKGN_SBGR pattern/diagnostic numbers are: 5 6\nRADGR pattern/diagnostic numbers are: 6 7\nRADSN pattern/diagnostic numbers are: 7 8\nRFAC pattern/diagnostic numbers are: 8 9\nRZC_H pattern/diagnostic numbers are: 9 10\nRZCL_INF pattern/diagnostic numbers are: 9 11\n...","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The raw pattern is stored as 2N-1 and the scaled one as 2N where N is the Diagnostic number given in the log file. This gives us the following table:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Perturbation raw pattern scaled pattern\nPSIGQSAT S001SPP_PATTERN S002SPP_PATTERN\nCLDDPTHDP S003SPP_PATTERN S004SPP_PATTERN\nICE_CLD_WGT S005SPP_PATTERN S006SPP_PATTERN\nICENU S007SPP_PATTERN S008SPP_PATTERN\nKGN_ACON S009SPP_PATTERN S010SPP_PATTERN\nKGN_SBGR S011SPP_PATTERN S012SPP_PATTERN\nRADGR S013SPP_PATTERN S014SPP_PATTERN\nRADSN S015SPP_PATTERN S016SPP_PATTERN\nRFAC S017SPP_PATTERN S018SPP_PATTERN\nRZC_H S019SPP_PATTERN S020SPP_PATTERN\nRZL_INF S021SPP_PATTERN S022SPP_PATTERN","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"where the numbering may depend on the number of actively perturbed parameters.","category":"page"},{"location":"EPS/SPP/#Cy40h111-settings-(NB-only-log-normal-distribution-possible)","page":"SPP","title":"Cy40h111 settings (NB only log-normal distribution possible)","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"In config_exp.h:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SDEV_SPP = 3.0 # Standard deviation of the pattern","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Perturbation Det. value Recommended range CMPERT (CMPERT1) Clipping range Mean or median\nLPERT_PSIGQSAT 0.02, but set to 0.03 0-0.06 0.4 (0.1) No Mean\nLPERT_CLDDPTHDP 4000 1000-8000 0.4 (0.1) No Mean\nLPERT_ICE_CLD_WGT 1 0-2 0.4 (0.1) No Mean\nLPERT_ICENU 1 0.1-10 0.7 (0.35) No Median\nLPERT_KGN_ACON 10 2-50 0.5 (0.25) No Mean\nLPERT_KGN_SBGR 1, but set to 0.5 0.01-1 0.2 (0.1) 0.0 - 1.0 Mean\nLPERT_RADGR 0, but set to 0.5 0-1 0.3 (0.15) 0.0 - 2.0 Mean\nLPERT_RADSN 0, but set to 0.5 0-1 0.3 (0.15) 0.0 - 2.0 Mean\nLPERT_RFAC_TWOC 2 0.5-3 0.4 (0.1) No Mean\nLPERT_RZC_H 0.15 0.1-0.25 0.4 (0.1) No Mean\nLPERT_RZL_INF 100 30-300 0.6 (0.15) No Mean","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"In cy 40 the output of patterns and tendencies was as follows:","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"The raw and scaled patterns are stored in the vertical column of SNNNEZDIAG01 using the index given in the SPP initialization. Thus","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"...\nKGET_SEED_SPP: PSIGQSAT 10000 1841082593\n pattern 1 for PSIGQSAT using seed 1841082593\nKGET_SEED_SPP: CLDDPTH 10002 570790063\n pattern 2 for CLDDPTH using seed 570790063\nKGET_SEED_SPP: CLDDPTHDP 10004 980493159\n pattern 3 for CLDDPTHDP using seed 980493159\nKGET_SEED_SPP: ICE_CLD_WGT 10008 1362729695\n pattern 4 for ICE_CLD_WGT using seed 1362729695\n...","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"would give us","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Perturbation raw pattern scaled pattern\nPSIGQSAT S001EZDIAG01 S002EZDIAG01\nCLDDPTH S003EZDIAG01 S004EZDIAG01\nCLDDPTHDP S005EZDIAG01 S006EZDIAG01\nICE_CLD_WGT S007EZDIAG01 S008EZDIAG01","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"and so on","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SPPT pattern EZDIAG02 (same in all levels)","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SPP tendencies PtendU EZDIAG03","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SPP tendencies PtendV EZDIAG04","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SPP tendencies PtendT EZDIAG05","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"SPP tendencies PtendQ EZDIAG06","category":"page"},{"location":"EPS/SPP/#Suggestions-for-parameters-to-include-in-SPP:","page":"SPP","title":"Suggestions for parameters to include in SPP:","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"Parameter Description Deterministic value cy43 Suggested range of values suggestion for parameter to correlate with Person responsible for implementing\n Terminal fall velocities of rain, snow and graupel Sibbo\nRFRMIN(39) Depo_rate_graupel RFRMIN 39 and 40 should approximately respect log10C = -3.55 x + 3.89, see eq. 6.2 on p. 108 in the meso-NH documentation: [https://hirlam.org/trac/attachment/wiki/HarmonieSystemDocumentation/EPS/SPP/sciICE3doc_p3.pdf Doc] Pirkka\nRFRMIN(40) Depo_rate_snow) RFRMIN 39 and 40 should approximately respect log10C = -3.55 x + 3.89, see eq. 6.2 on p. 108 in the meso-NH documentation: [https://hirlam.org/trac/attachment/wiki/HarmonieSystemDocumentation/EPS/SPP/sciICE3doc_p3.pdf Doc] Pirkka\nRFRMIN(16) Distr_snow_c to be correlated with RFRMIN(17) \nRFRMIN(17) Distr_snow_x to be correlated with RFRMIN(16) ","category":"page"},{"location":"EPS/SPP/#Experiments","page":"SPP","title":"Experiments","text":"","category":"section"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"List with cy43h22 experiments is here: [wiki:HarmonieSystemDocumentation/EPS/ExplistSPPcy43 List of experiments]","category":"page"},{"location":"EPS/SPP/","page":"SPP","title":"SPP","text":"A guide for running the tuning experiments is here: [wiki:HarmonieSystemDocumentation/EPS/HowtoSPPcy43 Guide]","category":"page"},{"location":"EPS/Howto/#eps-howto","page":"Howto","title":"How to run an ensemble experiment","text":"","category":"section"},{"location":"EPS/Howto/#Simple-configuration","page":"Howto","title":"Simple configuration","text":"","category":"section"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"Running an ensemble experiment is not very different from running a deterministic one. The basic instructions about setup are the same and will not be repeated here. ","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"What is different is that in ecf/config_exp.h one needs to pay attention to this particular section:","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"# *** Ensemble mode general settings. ***\n# *** For member specific settings use msms/harmonie.pm ***\nENSMSEL= # Ensemble member selection, comma separated list, and/or range(s):\n # m1,m2,m3-m4,m5-m6:step mb-me == mb-me:1 == mb,mb+1,mb+2,...,me\n # 0=control. ENSMFIRST, ENSMLAST, ENSSIZE derived automatically from ENSMSEL.\nENSINIPERT= # Ensemble perturbation method (bnd). Not yet implemented: etkf, hmsv, slaf.\nENSCTL= # Which member is my control member? Needed for ENSINIPERT=bnd. See harmonie.pm.\nENSBDMBR= # Which host member is used for my boundaries? Use harmonie.pm to set.\nENSMFAIL= # Failure tolerance for all members. Not yet implemented.\nENSMDAFAIL= # Failure tolerance for members doing own DA. Not yet implemented.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"In addition one should also look at BDSTRATEGY, choose eps_ec if you want to use EC EPS at the boundaries (this option gets the EC EPS data from the GLAMEPS ECFS archive). If you want to use SLAF see here.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"What really triggers EPS mode is having a non-empty ENSMSEL (ensemble member selection). The reason the specification looks a bit complicated is that our ensemble members do not necessarily have to be numbered consecutively from 0 or 1 and up, but can also be specified with steps. The rationale behind this is that we may want to e.g. downscale a subset of the 51 ECMWF EPS members, but not necessarily starting from their lowest number or taking them consecutively. ENSMSEL is a heritage from the Hirlam EPS system and has been retained in Harmonie.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"In the simplest case of consecutive numbering, say we want a control run (member 0) and 20 perturbed members. We can then put","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"ENSMSEL=0-20","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"Now assume that we still have a control and 20 members, but that we want to take only every second pair of the host EPS members, i.e., take 0,1,2, skip 3,4, take 5,6, skip 7,8 and so on. The following specifications are then equivalent:","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"ENSMSEL=0,1,2,5,6,9,10,13,14,17,18,21,22,25,26,29,30,33,34,37,38\nENSMSEL=0,1-37:4,2-38:4","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"In the second version we use the step option, so our list is 0, 1 to 37 in steps of 4 and 2 to 38 in steps of 4. The system will take care of transforming this into an ascending list for easier handling within the script system, but we don't have to worry about that.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"The ENSMSEL selection is still not totally flexible. It would not be possible to have more than one of our members having boundaries from the same member of the host model. This might be relevant in the case of multiple physics, and multiple control members. For this reason the variable ENSBDMBR has also been added (in [10953]). The usage of this variable is explained in the next section (advanced configuration).","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"For the rest of the ENS... variables, not everything planned is implemented by the time of this writing. The only valid choice (except empty) for ENSINIPERT (initial state perturbation method) is \"bnd\". This option means to take the perturbations of the first (interpolated) boundary file, and add these perturbations to a reference (control) analysis. This will involve the script scr/PertAna, a section of its header is quoted below:","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"#| Different perturbation methods are distinguished by\n#| ENSINIPERT. This script implements ENSINIPERT=bnd\n#|\n#| bnd: boundary data mode\n#| an($ENSMBR) = an(cntrl) + bnd1($ENSMBR) - bnd1(cntrl)\n#| where bnd1 denotes the first boundary file","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"Which member is the control member is specified by the variable ENSCTL.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"But how to specify that one (or more) member(s) run assimilation and others do not, or in other words, how to specify member specific values to the variables in config_exp.h? This is the topic of the next section.","category":"page"},{"location":"EPS/Howto/#Advanced-configuration,-member-specific-settings","page":"Howto","title":"Advanced configuration, member specific settings","text":"","category":"section"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"It would perhaps have been possible to also have member specific configuration in config_exp.h, but since perl is more flexible with lists than the shell, and since perl is already used extensively in the Harmonie system, it was decided to extend the handling of the template definition files in mini-SMS in such a way that every tdf can now also have an associated perl module to help in its interpretation. And, since after the changesets [10930] and [10932] there is no separate tdf for HarmonEPS anymore (harmonie.tdf is used also for EPS runs), the file that is used for member specific settings is thus msms/harmonie.pm.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"The idea of harmonie.pm is to be able to override some of the environment variables of config_exp.h with new values for selected members of the ensemble. This is achieved by populating the perl hash %env with key => value pairs. The keys are names of environment variables, like ANAATMO, ANASURF, PHYSICS etc. Only names that are present and exported in config_exp.h should be used as keys. Values can take four different forms:","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"A hash, i.e., a new set of key => value pairs. The syntax in this case is { m1 => val1, m2 => val2, ... }. The numbers m1, m2, etc. must be member numbers given in ENSMSEL. Order is irrelevant, and only members with values different from the default need be listed of course.\nAn array, where indices implicitly run from 0 and up. The syntax in this case is [ val0, val1, val2, ...]. Here the array should have as many values as members given in ENSMSEL, but if not, missing values will be recycled from the start of the array (as many times as necessary). Thus, using arrays will give values to all members, and order is important.\nA scalar (string). This string is subject to variable substitution, i.e., any occurrence of the substring @EEE@ will be replaced by the relevant 3-digit ensemble member number.\nA subroutine (reference), syntax is typically sub { my $mbr = shift; return \"something dependent on $mbr\"; }. The arguments given to the subroutine are the \"args\" of the invoking &Env('SOMEVAR',args) call (see below).","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"In addition to the hash %env, harmonie.pm also contains a subroutine Env. In msms/harmonie.tdf many earlier occurences of $ENV{SOMEVAR} have now been replaced by subroutine calls &Env('SOMEVAR','@EEE@'). The @EEE@ argument will be replaced by the relevant member number before invocation, and Env will check the hash %env for a member specific setting to possibly return instead of the default value $ENV{SOMEVAR}. There should normally be no need to make changes to the subroutine Env, putting entries into the hash %env ought to be enough.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"Note also that not every occurrence of $ENV{...} has been replaced by a corresponding &Env(...) in harmonie.tdf, only those variables that are most likely to have variations among members are changed. If you need variations in e.g. $HOST_MODEL, then harmonie.tdf needs to be updated so that those variations are respected within the ensemble (EEE) loops.","category":"page"},{"location":"EPS/Howto/#An-example","page":"Howto","title":"An example","text":"","category":"section"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"We will now look at one particular example, in order to (hopefully) make the descriptions above a bit more clear. Our intent is to have an ensemble with a mix of members with AROME and ALARO physics, with one control member and 10 perturbed members for each. The control members will both do their own 3DVAR assimilation, while perturbed members will have ANAATMO=blending. But with ENSINIPERT=bnd, the control analysis will be used also by the perturbed members. All members will do surface assimilation, but the forecast interval differs. The control members have a forecast interval of 6 hours (because of the 3D-Var), while the perturbed members have FCINT=12. To achieve this, we have the following settings in config_exp.h:","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"ANAATMO=blending\nANASURF=CANARI_OI_MAIN\nFCINT=12\nBDSTRATEGY=eps_ec\nENSMSEL=0-21\nENSINIPERT=bnd\nENSCTL=\nENSBDMBR=","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"In harmonie.pm our %env looks as follows:","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"%env = (\n 'ANAATMO' => { 0 => '3DVAR', 1 => '3DVAR' },\n 'FCINT' => { 0 => 6, 1 => 6 },\n 'PHYSICS' => [ 'arome','alaro','alaro','arome'],\n 'ENSCTL' => [ '000', '001', '001', '000'],\n 'ENSBDMBR' => [ 0, 0, 1..20],\n\n### Normally NO NEED to change the variables below\n 'ARCHIVE' => '${ARCHIVE}mbr@EEE@/',\n 'CLIMDIR' => '$CLIMDIR/mbr@EEE@',\n 'OBDIR' => '$OBDIR/mbr@EEE@',\n 'VFLDEXP' => '${EXP}mbr@EEE@',\n 'BDDIR' => sub { my $mbr = shift;\n if ($ENV{COMPCENTRE} eq 'ECMWF') {\n return '$BDDIR/mbr'.sprintf('%03d',$mbr);\n } else {\n return '$BDDIR/mbr'.sprintf('%03d',&Env('ENSBDMBR',$mbr));\n }\n },\n 'FirstHour' => sub { my $mbr = shift;\n return $ENV{StartHour} % &Env('FCINT',$mbr);\n }\n );","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"ANAATMO is straightforward, only the control members need an exception from blending, so using a hash is most appropriate. Similarly for FCINT. For PHYSICS we have used an array and the fact that the array will be recycled. Thus member 0 will be the AROME control, while member 1 will be the ALARO control. The reason why we did not simply put a 2-element array [ 'arome','alaro'] to be repeated is that since the ECMWF perturbations come in +/- pairs, we don't want all the '+' perturbations to be always with the same physics (and the '-' perturbations with the other type). Therefore, we added a second pair with the order reversed, to alternate +/- perturbations between AROME and ALARO members. ENSCTL follows the same pattern as PHYSICS. Note the need for 3-digit numbers in ENSCTL, at present this is necessary to avoid parsing errors in the preparation step of mini-SMS.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"Note also how we have used ENSBDMBR. For both the AROME control (member 0) and ALARO control (member 1), we have used the EC EPS control member 0 to provide boundaries. The syntax 1..20 is a perl shorthand for the list 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20.","category":"page"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"Note added after changeset [12537]: The setting of ENSBDMBR created a race condition in the boundary extraction for runs at ECMWF. This is hopefully solved by the new definition for BDDIR, which makes use of the possibility of having a subroutine to compute the member specific settings. Another example where a subroutine came out handy was for the setting of FirstHour.","category":"page"},{"location":"EPS/Howto/#Further-reading","page":"Howto","title":"Further reading","text":"","category":"section"},{"location":"EPS/Howto/","page":"Howto","title":"Howto","text":"More specific instructions and information about known problems can be found here.","category":"page"},{"location":"Build/Build_with_cmake/#Build-with-CMake","page":"CMake","title":"Build with CMake","text":"","category":"section"},{"location":"Build/Build_with_cmake/#Background","page":"CMake","title":"Background","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"CMake is a build system generator supporting multiple build systems and programming languages, specifically Fortran is a first-class citizen there, allowing, for example, out-of-the-box handling of the inter-module dependencies. A build system generator there means that description of the build procedure written in the CMake-script language is used by the cmake tool to generate the actual build system, for example using Unix Makefiles or Ninja generator. Thus, all modifications should be performed on the CMake-script level and not within the generated build system as these changes will be overwritten when re-running cmake at some point.","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"Why providing yet another alternative for building HARMONIE-AROME? Well, makeup does a very good job building the system, however it's an in-house solution which has a number of limitations:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"makeup is an in-house build system, so there are components that require more maintenance compared to a standardized build tool\nmakeup uses a considerable number of sequential steps, which increase the total build time\nthe configure step takes quite some time, although in some cases it can be skipped, but users have to remember when they must re-run configure and this dependency is not enforced by makeup\nnot all the dependencies are tracked by makeup, for example updating configure files does not trigger a re-build","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"In an attempt to fix these limitation of makeup, CMake was chosen as an alternative. CMake has a mature Fortran support and improves upon some shortcomings of makeup with little effort (well, it obviously has its own fair share of quirks, but that's a different story...). Additionally, using CMake allows us to enforce usage requirements and dependencies between different components of HARMONIE-AROME, for example, it's a good idea to ensure that SURFEX routines do not directly call cloud microphysics functions. Currently makeup does not enforce these boundaries and this task is left to the developers who implement the new code. Of course, something like this can also be implemented with makeup, but it would require considerable development efforts.","category":"page"},{"location":"Build/Build_with_cmake/#Getting-started-with-CMake","page":"CMake","title":"Getting started with CMake","text":"","category":"section"},{"location":"Build/Build_with_cmake/#Selecting-the-CMake-based-build-system-when-installing-HARMONIE-AROME","page":"CMake","title":"Selecting the CMake-based build system when installing HARMONIE-AROME","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"If all the config files are available, building HARMONIE-AROME with CMake should be as simple as setting the BUILD_WITH variable when invoking Harmonie:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"config-sh/Harmonie install BUILD_WITH=cmake","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"or alternatively, setting the desired option in ecf/config_exp.h.","category":"page"},{"location":"Build/Build_with_cmake/#Building-HARMONIE-AROME-with-CMake-from-the-command-line","page":"CMake","title":"Building HARMONIE-AROME with CMake from the command line","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"Sometimes calling Harmonie install is not the best choice and one might want to compile the code from the command line. In this case compilation of HARMONIE-AROME with CMake consists of three individual steps:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"compiling the auxiliary libraries (gribex and such)\ncompiling the main code of HARMONIE-AROME\noptionally, compile some additional tools (for example, gl)","category":"page"},{"location":"Build/Build_with_cmake/#1.-Compiling-the-auxiliary-libraries","page":"CMake","title":"1. Compiling the auxiliary libraries","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"This step is rather straightforward, assuming that HARMONIE-AROME code is located under the path stored in the HM_LIB environment variable one can adapt the following snippet to compile all the required libraries:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"CMAKE_FLAGS=\"-DCONFIG_FILE=\"\nINSTALL_DIR=\"\"\n\nAUX_LIBS='bufr_405 gribex_370 rgb_001 dummies_006/mpidummy'\nfor project in $AUX_LIBS; do\n echo \"Compiling $project\"\n current_project_dir=$HM_LIB/util/auxlibs/$project\n current_build_dir=\"build-`echo $project | sed 's|/|-|g'`\"\n\n mkdir -p $current_build_dir && cd $current_build_dir\n\n # CMake build type can be changed to Debug, if needed\n cmake $current_project_dir -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR $CMAKE_FLAGS\n # Here -j tells CMake how many parallel compilation processes to use\n cmake --build . --target install -j16\n\n cd ..\ndone","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"If a specific config file is not there, you can try your luck with using generic config files provided for different compiler types. To do so, just drop the -DCONFIG_FILE from the list of CMake command line arguments and CMake will try to load a suitable configuration file, if available.","category":"page"},{"location":"Build/Build_with_cmake/#2.-Compiling-the-main-code-of-HARMONIE-AROME","page":"CMake","title":"2. Compiling the main code of HARMONIE-AROME","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"Following the procedure described in the previous step, one can use a similar approach to compile the main code (here, one of the generic configuration files is used, of course it can be replaced with a different one or dropped but it should be the same config file which was used to compile auxiliary libraries):","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"mkdir build && cd build\n# Configure and generate the build system\ncmake $HM_LIB/src \\\n -G Ninja # Use Ninja to build HARMONIE-AROME, drop to build with Makefiles\n -DCMAKE_BUILD_TYPE=Release \\\n -DCONFIG_FILE=$HM_LIB/util/util/cmake/config.GNU.cmake \\\n -Dbufr_DIR=$INSTALL_DIR/lib/cmake/bufr \\\n -Dgribex_DIR=$INSTALL_DIR/lib/cmake/gribex \\\n -Drgb_DIR=$INSTALL_DIR/lib/cmake/rgb \\\n -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR\n\n# Build and install HARMONIE-AROME\ncmake --build . --target install -j16","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"note: Note\nObviously, when compiling from command line, additional command line arguments might be provided to CMake at the configure step as needed. However, a preferred solution is to use a configuration file to handle as much of the machine-specific details as possible.","category":"page"},{"location":"Build/Build_with_cmake/#3.-Compiling-the-tools","page":"CMake","title":"3. Compiling the tools","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"The approach is the same as with the main code, however, you might want to add -Dharmonie_DIR=$INSTALL_DIR/lib/cmake/harmonie if the tool in question needs HARMONIE-AROME libraries for compilation.","category":"page"},{"location":"Build/Build_with_cmake/#Configuration-files","page":"CMake","title":"Configuration files","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"Configuration files, similarly to makeup, are used to provide compilation flags, define external libraries to use when compiling the code et cetera. Thus, having a correct configuration file is one of the key elements of successful building HARMONIE-AROME. The CMake-based build system of HARMONIE-AROME uses configuration files written in JSON format. JSON was chosen to make these files more declarative and, hopefully, easier to maintain and modify than plain CMake-script-based files would be.","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"The main config file, which is used to build auxiliary libraries and the main HARMONIE-AROME code should be placed under util/cmake/config directory. This file has a following top-level structure:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"{\n \"build_tools\":[],\n \"dependencies\":[],\n \"programs\":[],\n \"configure\":{},\n \"compile\":[],\n \"compile_single\":[],\n \"compile_double\":[],\n \"custom_compile\":{},\n \"link\":[]\n}","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"there all the sections except configure, custom_compile and link are mandatory. In the following a detailed description of all the config file section is provided.","category":"page"},{"location":"Build/Build_with_cmake/#The-build_tools-section","page":"CMake","title":"The build_tools section","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"This section lists the external tools required for compiling HARMONIE-AROME, excluding compilers. Currently, this section should always contain the two following entries: FLEX and BISON, but in future this list might be extended. So, currently this section is always defined as:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"\"build_tools\":[\"BISON\", \"FLEX\"]","category":"page"},{"location":"Build/Build_with_cmake/#The-dependencies-section","page":"CMake","title":"The dependencies section","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"This section provides a list of external (external here means \"not found within the src directory of HARMONIE-AROME\", so, for example, gribex is also an external library for CMake build) libraries required to compile and link HARMONIE-AROME code. Since finding a correct library can be a tricky task, this section allows a number of options for specifying external dependencies:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"You can completely rely on CMake and delegate it all the work for finding a dependency. In this case, a dependency is added as a simple string to the dependencies section, for example:\n\"dependencies\":[\"OpenMP\", \"LAPACK\"]\nThis option is for packages like OpenMP which do not involve finding libraries located in unusual places as often happens when using environment modules.\nYou can still rely on CMake to find the package, but provide a bit of detail on how to find it. In this case a dependency is added as a JSON object of the following form (using the NetCDF library as an example):\n{\n \"pkg\":\"NetCDF\",\n \"use_cmake_config\":false,\n \"components\":[\"C\",\"Fortran\"],\n \"hints\":[\"$ENV{NETCDF_DIR}\",\"$ENV{NETCDF_F_DIR}\"],\n \"cmake\":{\"NETCDF_USE_DEFAULT_PATHS\":true}\n}\nThere the use_cmake_config field tells CMake which mechanism it should use to find the library in question. When use_cmake_config is set to true CMake will look for CMake configuration files installed with the library, which is a recommended option in modern CMake. Even though it's a recommended by the CMake authors option, not all the libraries provide CMake configuration files so just setting use_cmake_config to true does not work all the time (at least it works for the auxiliary libraries compiled with CMake). You might want to provide a -D_DIR= as an argument to the cmake command when configuring the build if CMake fails to find a package.\nAnother alternative is setting use_cmake_config to false, then CMake will try to find the required dependency using the hand-written scripts provided by the authors of CMake (or found under the util/cmake directory of HARMONIE-AROME). These scripts usually do quite some work trying to find a dependency and sometimes fail even if library is there, for example when it's located in a very unusual place or has an unexpected pkg-config name.\nWhen using \"use_cmake_config\":false one may add a components list, if only a language-specific version of the dependency is wanted. For example, having:\n{\"pkg\":\"NetCDF\", \"use_cmake_config\":false, \"components\":[\"C\"]}\nCMake would not try to find the Fortran version of NetCDF library, which can be useful sometimes. Use this option of defining external dependencies for such libraries as MPI, which can have multiple vendors and subtle differences between libraries provided (for example CMake should be able to figure out the correct MPI libraries for both MPICH and Open-MPI).\nThe hints list tells CMake which directories it should check when looking for a library.\nnote: Note\nElements of the hints list are simply added to the _ROOT CMake variable. If CMake's find_package() does not use this variable providing hints would have no effect.\nFinally, the cmake section provides a key-value set of elements, which will be converted to corresponding CMake variables set before calling find_package(). Thus, it can be used to control the behaviour of find_package.\nnote: Note\nVariables set in the cmake section are local to the current package and do not modify the global scope.\nWhen nothing of the above works, you can provide all the flags manually. To do so, use the following form for a dependency entry:\n{\n \"pkg\":\"HDF5\",\n \"raw_lib\":{\n \"include\":\"$ENV{HDF5_DIR}/include\",\n \"lib_directory\":\"$ENV{HDF5_DIR}/lib\",\n \"lib\":[\"-lhdf5hl_fortran\", \"-lhdf5_fortran\", \"-lhdf5_hl\", \"-lhdf5\"]\n }\n}\nwhere the raw_lib component provides all the needed include and link directories as well as the link libraries. If some some fields of the raw_lib object are unneeded they can be set to null:\n{\"pkg\":\"rt\", \"raw_lib\":{\"include\":null, \"lib_directory\":null, \"lib\":\"-lrt\"}}\nNote that all the members of the raw_lib object can be defined as lists:\n{\n \"pkg\":\"HDF5\",\n \"raw_lib\":{\n \"include\":[\"$ENV{HDF5_DIR}/include\",\"$ENV{HDF5_DIR}/include_fortran\"],\n \"lib_directory\":[\"$ENV{HDF5_DIR}/lib\",\"$ENV{HDF5_DIR}/lib64\"],\n \"lib\":[\"-lhdf5hl_fortran\", \"-lhdf5_fortran\", \"-lhdf5_hl\", \"-lhdf5\"]\n }\n}\nWhen providing the required libraries in the lib section one can skip the -l prefix, thus having \"lib\":\"-lrt\" and \"lib\":\"rt\" would have the same effect.\nSometimes it can be useful to define a dummy library in CMake without actually looking for the library files, for example when compiling a tool which uses only a subset of HARMONIE-AROME libraries. When loading HARMONIE-AROME as a CMake package all the targets associated with external dependencies should be present, but some of these dependencies might be not needed for successful linking (or these are added implicitly by the programming environment and adding them for the second time in CMake won't make any difference). In this case you can use the following:\n{\"pkg\":\"gribex\", \"dummy\":true}","category":"page"},{"location":"Build/Build_with_cmake/#The-programs-section","page":"CMake","title":"The programs section","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"This section provides a list of HARMONIE-AROME programs to build (excluding MASTERODB which is always built by the CMake build system), for example:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"\"programs\":[\"BATOR\", \"oulan\", \"ioassign\", \"LSMIX\"]","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"CMake will try to find the corresponding Fortran source files and will complain if unable to do so. Currently it is not possible to explicitly tell CMake via JSON config which program should be compiled from which source file. If CMake is unable to figure out how to compile a program the CMake-code should be altered to tell it how to do so.","category":"page"},{"location":"Build/Build_with_cmake/#The-configure-section","page":"CMake","title":"The configure section","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"This section provides various configure-time flags controlling the build system or selecting features. Currently in the main HARMONIE-AROME config file this section is defined as follows:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"\"configure\":{\n \"use_flexfix\":true\n , \"precision\":\"double\"\n},","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"There the use_flexfix option controls the usage of the flexfix wrapper, set it to true to use the flexfix wrapper when generating lexers for the Blacklist and ODB compilers. Having use_flexfix as false results in using the flex tool directly. The precision option controls the floating point precision of the build, with possible values of double and single. This is a mandatory option, removing it would result in a CMake fatal error at the configure time.","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"The configure file for gl has the following options in the configure section:","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"\"configure\":{\n \"use_aladin\":true\n , \"use_netcdf\":true\n , \"check_preferlocalconcepts_bug\":true\n}","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"Set use_aladin to true to compile with FA support (requires HARMONIE-AROME libraries). Set use_netcdf to true to enable NetCDF support in gl. Set check_preferlocalconcepts_bug to true to perform a configure-time auto-detection test checking whether the supplied eccodes version is affected by the preferLocalConcepts bug. This test can be skipped, although in such a case corresponding CPP definitions should be manually added to the config file if a 'bad' eccodes version is used.","category":"page"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"note: Note\nIf an option is removed from the configure section it will be treated by CMake as set to false in case of boolean flags or empty string for string options.","category":"page"},{"location":"Build/Build_with_cmake/#Adding-a-new-configure-option","page":"CMake","title":"Adding a new configure option","text":"","category":"section"},{"location":"Build/Build_with_cmake/","page":"CMake","title":"CMake","text":"There's no predefined list of configure section members of CMake JSON config, any element found in this section will be available as CONFIG_