diff --git a/.github/workflows/joss-pdf.yml b/.github/workflows/joss-pdf.yml index 76310246..cf8bbbdb 100644 --- a/.github/workflows/joss-pdf.yml +++ b/.github/workflows/joss-pdf.yml @@ -1,5 +1,7 @@ -on: [push] - +on: + workflow_dispatch: +#on: [push] + jobs: paper: runs-on: ubuntu-latest diff --git a/docs/guide_developer.rst b/docs/guide_developer.rst index b434932d..a67f1ad0 100644 --- a/docs/guide_developer.rst +++ b/docs/guide_developer.rst @@ -4,11 +4,8 @@ Developer guide Contributions, bug reports and fixes, documentation improvements, enhancements and ideas are welcome. A good starting place to look at is: -1. PROMICE-AWS-data-issues_, where we report suspicious or incorrect data -2. pypromice's GitHub Issues_, for an overview of known bugs, developments and ideas - -.. _PROMICE-AWS-data-issues: https://github.com/GEUS-Glaciology-and-Climate/PROMICE-AWS-data-issues -.. _Issues: https://github.com/GEUS-Glaciology-and-Climate/pypromice/issues +1. `PROMICE-AWS-data-issues _`, where we report suspicious or incorrect data +2. pypromice's `GitHub Issues `_, for an overview of known bugs, developments and ideas Data reports @@ -16,17 +13,17 @@ Data reports Automatic weather station (AWS) data from the Greenland Ice Sheet are often imperfect due to the complexity and conditions involved in installing and maintaining the AWS. -If you are using our AWS data and something seems suspicious or erroneous, you can check the PROMICE-AWS-data-issues_ space to see if has previously been flagged and/or fixed. If not, then please follow the conventions stated in the repository and open an issue. +If you are using our AWS data and something seems suspicious or erroneous, you can check the `PROMICE-AWS-data-issues `_ space to see if has previously been flagged and/or fixed. If not, then please follow the conventions stated in the repository and open an issue. .. note:: - Data visualisations best demonstrate data problems and are greatly appreciated in solving data issues. If you are unsure, see examples of our closed issues in PROMICE-AWS-data-issues_ + Data visualisations best demonstrate data problems and are greatly appreciated in solving data issues. If you are unsure, see examples of our closed issues in `PROMICE-AWS-data-issues `_ Bug reports and enhancement requests ==================================== -Bug reports are essential to improving the stability and usability of pypromice. These should be raised on pypromice's GitHub Issues_. A complete and reproducible report is essential for bugs to be resolved easily, therefore bug reports must: +Bug reports are essential to improving the stability and usability of pypromice. These should be raised on pypromice's `GitHub Issues `_. A complete and reproducible report is essential for bugs to be resolved easily, therefore bug reports must: 1. Include a concise and self-contained Python snippet reproducing the problem. For example: @@ -47,9 +44,7 @@ Bug reports are essential to improving the stability and usability of pypromice. .. note:: - Before submitting an issue, please make sure that your installation is correct and working from either the pip installation or the main_ branch of the pypromice repository. - -.. _main: https://github.com/GEUS-Glaciology-and-Climate/pypromice/tree/main + Before submitting an issue, please make sure that your installation is correct and working from either the pip installation or the `main `_ branch of the pypromice repository. Contributing to pypromice @@ -61,9 +56,7 @@ You can work directly with pypromice's development if you have a contribution, s Forking ------- -In order to contribute, you will need your own fork of the pypromice GitHub repository to work on the code. Go to the repo_ and choose the ``Fork`` option. This now creates a copy in your own GitHub space, which is connected to the upstream pypromice repository. - -.. _repo: https://github.com/GEUS-Glaciology-and-Climate/pypromice +In order to contribute, you will need your own fork of the pypromice GitHub repository to work on the code. Go to the `repo `_ and choose the ``Fork`` option. This now creates a copy in your own GitHub space, which is connected to the upstream pypromice repository. Creating a development branch @@ -85,15 +78,13 @@ To contribute your changes to pypromice, you need to make a pull request from yo .. code:: console - $ git fetch upstream + $ git fetch $ git merge upstream/main -And then open a pull request as documented here_. Make sure to include the following in your pull request description: +And then open a pull request as documented `here `_. Make sure to include the following in your pull request description: 1. The aim of your changes 2. Details of what these changes are 3. Any limitations or further development needed Your pull request will be reviewed and, if valid and suitable, will be accepted. Following this, you will be listed as a contributor to pypromice! - -.. _here: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork diff --git a/docs/guide_user.rst b/docs/guide_user.rst index e16bc6d6..c6918611 100644 --- a/docs/guide_user.rst +++ b/docs/guide_user.rst @@ -9,9 +9,7 @@ Two components are needed to perform Level 0 to Level 3 processing: - A Level 0 dataset file (.txt), or a collection of Level 0 dataset files - A station config file (.toml) -Two test station datasets and config files are available with pypromice as an example of the Level 0 to Level 3 processing. These can be found on the Github repo here_, in the ``src/pypromice/test/`` directory in the cloned repo. - -.. _here: https://github.com/GEUS-Glaciology-and-Climate/pypromice/tree/joss-doc-edits/src/pypromice/test +Two test station datasets and config files are available with pypromice as an example of the Level 0 to Level 3 processing. These can be found on the Github repo `here `_, in the ``src/pypromice/test/`` directory in the cloned repo. These can be processed from Level 0 to a Level 3 data product as an ``AWS`` object in pypromice. @@ -66,7 +64,7 @@ The Level 0 to Level 3 processing can also be executed from a CLI using the ``ge .. code:: console - $ getL3 -c src/pypromice/test/test_config1.toml -i src/pypromice/test -o src/pypromice/test + $ get_l3 -c src/pypromice/test/test_config1.toml -i src/pypromice/test -o src/pypromice/test Loading our data @@ -74,7 +72,7 @@ Loading our data Import from Dataverse (no downloads!) ------------------------------------- -The automated weather station (AWS) datasets from the PROMICE and GC-Net monitoring programmes are openly available on our Dataverse_. These can be imported directly with pypromice, with no downloading required. +The automated weather station (AWS) datasets from the PROMICE and GC-Net monitoring programmes are openly available on the `GEUS Dataverse `_. These can be imported directly with pypromice, with no downloading required. .. code:: python @@ -89,9 +87,7 @@ All available AWS datasets are retrieved by station name. Use ``aws_names()`` to n = pget.aws_names() print(n) - -.. _Dataverse: https://dataverse.geus.dk/dataverse/AWS - + Download with pypromice ----------------------- @@ -99,17 +95,17 @@ AWS data can be downloaded to file with pypromice. Open up a CLI and use the ``g .. code:: console - $ getData -n KPC_U_hour.csv + $ get_promice_data -n KPC_U_hour.csv Files are downloaded to the current directory as a CSV formatted file. Use the ``-h`` help flag to explore further input variables. .. code:: console - $ getData -h + $ get_promice_data -h .. note:: - Currently, this functionality within pypromice is only for our hourly AWS data. For daily and monthly AWS data, please download these from the Dataverse_. + Currently, this functionality within pypromice is only for our hourly AWS data. For daily and monthly AWS data, please download these from the `GEUS Dataverse `_. Load from NetCDF file @@ -162,17 +158,9 @@ Once loaded, variables from an AWS dataset can be simply plotted with using pand .. note:: - Variable names are provided in the dataset metadata, or can be found on in our variables look-up table here_. For more complex plotting, please see either the xarray_ or pandas_ plotting documentation. - -.. _here: https://github.com/GEUS-Glaciology-and-Climate/pypromice/blob/main/src/pypromice/process/variables.csv -.. _xarray: https://docs.xarray.dev/en/stable/user-guide/plotting.html -.. _pandas: https://pandas.pydata.org/docs/user_guide/10min.html#plotting + Variable names are provided in the dataset metadata, or can be found on in our `variables look-up table `_. For more complex plotting, please see either the `xarray `_ or `pandas `_ plotting documentation. .. warning:: - Plotting with either xarray or pandas requires the matplotlib_ package. This is not supplied as a dependency with pypromice, so please install matplotlib separately if you wish to do so. - -.. _matplotlib: https://matplotlib.org/ - -.. _matplotlib: https://matplotlib.org/ + Plotting with either xarray or pandas requires `matplotlib `_. This is not supplied as a dependency with pypromice, so please install matplotlib separately if you wish to do so. diff --git a/docs/index.rst b/docs/index.rst index fd7d26b0..72911f31 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,22 +1,18 @@ pypromice ========= -pypromice_ is designed for processing and handling PROMICE_ and GC-Net_ automated weather station (AWS) data. The PROMICE (Programme for Monitoring of the Greenland Ice Sheet) weather station network monitors glacier mass balance in the melt zone of the Greenland Ice Sheet, providing ground truth data to calibrate mass budget models. GC-Net (Greenland Climate Network) weather stations measure snowfall and surface properties in the accumulation zone, providing valuable knowledge on the Greenland Ice Sheet's mass gain and climatology. +`pypromice `_ is designed for processing and handling `PROMICE `_ and `GC-Net `_ automated weather station (AWS) data. The PROMICE (Programme for Monitoring of the Greenland Ice Sheet) weather station network monitors glacier mass balance in the melt zone of the Greenland Ice Sheet, providing ground truth data to calibrate mass budget models. GC-Net (Greenland Climate Network) weather stations measure snowfall and surface properties in the accumulation zone, providing valuable knowledge on the Greenland Ice Sheet's mass gain and climatology. -The PROMICE and GC-Net monitoring networks have unique AWS configurations and provide specialized data, therefore a toolbox is needed to handle and process their data. pypromice is the go-to toolbox for handling and processing climate and glacier datasets from the PROMICE and GC-Net monitoring networks. New releases of pypromice are uploaded alongside PROMICE AWS data releases to our Dataverse_ for transparency purposes and to encourage collaboration on improving our data. +The PROMICE and GC-Net monitoring networks have unique AWS configurations and provide specialized data, therefore a toolbox is needed to handle and process their data. pypromice is the go-to toolbox for handling and processing climate and glacier datasets from the PROMICE and GC-Net monitoring networks. New releases of pypromice are uploaded alongside PROMICE AWS data releases to the `GEUS Dataverse `_ for transparency purposes and to encourage collaboration on improving our data. If you intend to use PROMICE and GC-Net AWS data and/or pypromice in your work, please cite these publications below, along with any other applicable PROMICE publications where possible: -Fausto, R.S., van As, D., Mankoff, K.D., Vandecrux, B., Citterio, M., Ahlstrøm, A.P., Andersen, S.B., Colgan, W., Karlsson, N.B., Kjeldsen, K.K., Korsgaard, N.J., Larsen, S.H., Nielsen, S., Pedersen, A.Ø., Shields, C.L., Solgaard, A.M., and Box, J.E. (2021) Programme for Monitoring of the Greenland Ice Sheet (PROMICE) automatic weather station data, Earth Syst. Sci. Data, 13, 3819–3845, https://doi.org/10.5194/essd-13-3819-2021 +Fausto, R.S., van As, D., Mankoff, K.D., Vandecrux, B., Citterio, M., Ahlstrøm, A.P., Andersen, S.B., Colgan, W., Karlsson, N.B., Kjeldsen, K.K., Korsgaard, N.J., Larsen, S.H., Nielsen, S., Pedersen, A.Ø., Shields, C.L., Solgaard, A.M., and Box, J.E. (2021) Programme for Monitoring of the Greenland Ice Sheet (PROMICE) automatic weather station data, Earth Syst. Sci. Data, 13, 3819–3845, `https://doi.org/10.5194/essd-13-3819-2021 `_ -How, P., Wright, P.J., Mankoff, K., Vandecrux, B., Fausto, R.S. and Ahlstrøm, A.P. (2023) pypromice: A Python package for processing automated weather station data, Journal of Open Source Software, 8(86), 5298, https://doi.org/10.21105/joss.05298 +How, P., Wright, P.J., Mankoff, K., Vandecrux, B., Fausto, R.S. and Ahlstrøm, A.P. (2023) pypromice: A Python package for processing automated weather station data, Journal of Open Source Software, 8(86), 5298, `https://doi.org/10.21105/joss.05298 `_ -How, P., Lund, M.C., Nielsen, R.B., Ahlstrøm, A.P., Fausto, R.S., Larsen, S.H., Mankoff, K.D., Vandecrux, B., Wright, P.J. (2023) pypromice, GEUS Dataverse, https://doi.org/10.22008/FK2/3TSBF0 +How, P., Lund, M.C., Nielsen, R.B., Ahlstrøm, A.P., Fausto, R.S., Larsen, S.H., Mankoff, K.D., Vandecrux, B., Wright, P.J. (2023) pypromice, GEUS Dataverse, `https://doi.org/10.22008/FK2/3TSBF0 `_ -.. _pypromice: https://github.com/GEUS-Glaciology-and-Climate/pypromice -.. _PROMICE: https://promice.dk -.. _GC-Net: http://cires1.colorado.edu/steffen/gcnet/ -.. _Dataverse: https://dataverse.geus.dk/dataverse/PROMICE .. toctree:: :maxdepth: 2 diff --git a/docs/install.rst b/docs/install.rst index be8ae5fb..c5adb0aa 100644 --- a/docs/install.rst +++ b/docs/install.rst @@ -17,7 +17,7 @@ For the most up-to-date version, pypromice can be installed directly from the re Developer install ***************** -pypromice can be ran in an environment with the pypromice package cloned from GitHub_. +pypromice can be ran in an environment with the pypromice package cloned from the `GitHub repo `_. .. code:: console @@ -27,14 +27,11 @@ pypromice can be ran in an environment with the pypromice package cloned from Gi $ cd pypromice/ $ pip install . -pypromice is also provided with a conda environment configuration environment.yml_ for a more straightforward set-up, if needed: +pypromice is also provided with a `conda environment configuration file `_ for a more straightforward set-up, if needed: .. code:: console $ conda env create --file environment.yml -n pypromice - -.. _GitHub: https://github.com/GEUS-Glaciology-and-Climate/pypromice -.. _environment.yml: https://github.com/GEUS-Glaciology-and-Climate/pypromice/blob/main/environment.yml The package has inbuilt unit tests, which can be run to test the package installation: @@ -52,7 +49,7 @@ Additional dependencies Additional packages are required if you wish to use pypromice's post-processing functionality. -eccodes_ is the official package for BUFR encoding and decoding, which pypromice uses for post-process formatting. Try firstly to install with conda-forge like so: +`eccodes `_ is the official package for BUFR encoding and decoding, which pypromice uses for post-process formatting. Try firstly to install with conda-forge like so: .. code:: console @@ -60,7 +57,4 @@ eccodes_ is the official package for BUFR encoding and decoding, which pypromice .. note:: - If the environment cannot resolve the eccodes installation then follow the steps documented here_ to download eccodes and then install eccodes' python bindings using pip: ``pip3 install eccodes-python`` - -.. _eccodes: https://confluence.ecmwf.int/display/ECC/ecCodes+installation -.. _here: https://gist.github.com/MHBalsmeier/a01ad4e07ecf467c90fad2ac7719844a + If the environment cannot resolve the eccodes installation then follow the steps documented `here `_ to download eccodes and then install eccodes' python bindings using pip: ``pip3 install eccodes-python`` diff --git a/docs/modules.rst b/docs/modules.rst index 1470e5c6..61ec7c3c 100644 --- a/docs/modules.rst +++ b/docs/modules.rst @@ -1,6 +1,9 @@ process ======= +process.aws +----------- + .. automodule:: process.aws :members: :undoc-members: @@ -33,15 +36,32 @@ process.L2toL3 postprocess =========== +postprocess.csv2bufr +-------------------- + .. automodule:: postprocess.csv2bufr :members: :undoc-members: :show-inheritance: +qc +== + +qc.persistence +-------------- + +.. automodule:: qc.persistence + :members: + :undoc-members: + :show-inheritance: + get === -.. automodule:: get +get.get +------- + +.. automodule:: get.get :members: :undoc-members: :show-inheritance: @@ -49,6 +69,9 @@ get tx == +tx.tx +----- + .. automodule:: tx.tx :members: :undoc-members: diff --git a/docs/technical_process.rst b/docs/technical_process.rst index 24e56bfa..d431af84 100644 --- a/docs/technical_process.rst +++ b/docs/technical_process.rst @@ -20,10 +20,7 @@ Payload handling Payload decoder =============== -``PayloadFormat`` handles the message types and decoding templates. These can be imported from file, with two default CSV files provided with pypromice - payload_formatter.csv_ and payload_type.csv_. - -.. _payload_formatter.csv: https://github.com/GEUS-Glaciology-and-Climate/pypromice/blob/main/src/pypromice/tx/payload_formats.csv -.. _payload_type.csv: https://github.com/GEUS-Glaciology-and-Climate/pypromice/blob/main/src/pypromice/tx/payload_types.csv +``PayloadFormat`` handles the message types and decoding templates. These can be imported from file, with two default CSV files provided with pypromice - `payload_formatter.csv `_ and `payload_type.csv `_. Payload processing @@ -37,7 +34,7 @@ The following function can be executed from a CLI to fetch ``L0`` transmission m .. code:: console - $ getL0tx -a accounts.ini -p credentials.ini -c tx/config + $ get_l0tx -a accounts.ini -p credentials.ini -c tx/config -u last_aws_uid.ini -o tx .. note:: @@ -54,13 +51,13 @@ To process from L0>>L3, the following function can be executed in a CLI. .. code:: console - $ getL3 -c config/KPC_L.toml -i . -o ../../aws-l3/tx" + $ get_l3 -c config/KPC_L.toml -i . -o ../../aws-l3/tx" And in parallel through all configuration .toml files ``$imei_list`` .. code:: console - $ parallel --bar "getL3 -c ./{} -i . -o ../../aws-l3/tx" ::: $(ls $imei_list) + $ parallel --bar "get_l3 -c ./{} -i . -o ../../aws-l3/tx" ::: $(ls $imei_list) Station configuration @@ -109,6 +106,4 @@ The TOML config file has the following expectations and behaviors: .. note:: - Be aware the column names should follow those defined in the variables look-up table found here_. Any column names provided that are not in this look-up table will be passed through the processing untouched. - -.. _here: https://github.com/GEUS-Glaciology-and-Climate/pypromice/blob/main/src/pypromice/process/variables.csv + Be aware the column names should follow those defined in pypromice's `variables look-up table `_. Any column names provided that are not in this look-up table will be passed through the processing untouched. diff --git a/src/pypromice/tx/get_watsontx.py b/src/pypromice/tx/get_watsontx.py index c0693f7a..e5b74a27 100644 --- a/src/pypromice/tx/get_watsontx.py +++ b/src/pypromice/tx/get_watsontx.py @@ -11,7 +11,7 @@ from argparse import ArgumentParser from configparser import ConfigParser -import os, imaplib, email, unittest +import os, imaplib, email, re from glob import glob from datetime import datetime @@ -23,8 +23,8 @@ def parse_arguments_watson(): parser.add_argument('-a', '--account', default=None, type=str, required=True, help='Email account .ini file') parser.add_argument('-p', '--password', default=None, type=str, required=True, help='Email credentials .ini file') parser.add_argument('-o', '--outpath', default=None, type=str, required=False, help='Path where to write output (if given)') - parser.add_argument('-f', '--formats', default=None, type=str, required=True, help='Path to Payload format .csv file') - parser.add_argument('-t', '--types', default=None, type=str, required=True, help='Path to Payload type .csv file') + parser.add_argument('-f', '--formats', default=None, type=str, required=False, help='Path to Payload format .csv file') + parser.add_argument('-t', '--types', default=None, type=str, required=False, help='Path to Payload type .csv file') parser.add_argument('-u', '--uid', default=None, type=str, required=True, help='Last AWS uid .ini file') args = parser.parse_args() return args @@ -98,13 +98,16 @@ def get_watsontx(): name=None d=None - if name and 'Watson station' in name: - print(f'Watson station message, {d.strftime("%Y-%m-%d %H:%M:%S")}') + if name and ('Watson' in name or 'GIOS' in name): + print(f'Watson/GIOS station message, {d.strftime("%Y-%m-%d %H:%M:%S")}') + l0 = L0tx(message, formatter_file, type_file, sender_name=['emailrelay@konectgds.com','sbdservice']) if l0.msg: - out_fn = 'watson_station_tx.txt' + content, attachment = l0.getEmailBody() + attachment_name = str(attachment.get_filename()) + out_fn = re.sub(r'\d*\.dat$', '', attachment_name) + '.txt' out_path = os.sep.join((out_dir, out_fn)) print(f'Writing to {out_fn}') diff --git a/src/pypromice/tx/tx.py b/src/pypromice/tx/tx.py index a83b3099..983f2239 100644 --- a/src/pypromice/tx/tx.py +++ b/src/pypromice/tx/tx.py @@ -365,7 +365,7 @@ class L0tx(EmailMessage, PayloadFormat): '''L0 tranmission data object''' def __init__(self, email_msg, format_file=None, type_file=None, - sender_name=['sbdservice', 'ice@geus.dk'], #TODO don't hardcode sender names? + sender_name=['sbdservice', 'ice@geus.dk','emailrelay@konectgds.com'], #TODO don't hardcode sender names? UnixEpochOffset=calendar.timegm((1970,1,1,0,0,0,0,1,0)), CRbasicEpochOffset = calendar.timegm((1990,1,1,0,0,0,0,1,0))): '''L0tx object initialisation. @@ -454,9 +454,12 @@ def getFormat(self): bool Valid format flag ''' - if self.getFirstByte().isdigit() or 'Watson' in self.email_data['subject'] or (self.payload[:2] == '\n' and self.imei == 300234064121930): #TODO needed? + if self.getFirstByte().isdigit() or (self.payload[:2] == '\n' and self.imei == 300234064121930): #TODO needed? return None, None, None, None, -9999, False + elif 'watson' in self.email_data['subject'].lower() or 'gios' in self.email_data['subject'].lower(): + return None, None, None, None, -9999, False + else: bidx = ord(self.getFirstByte()) try: @@ -478,8 +481,8 @@ def checkByte(self, b): '''Check byte format against payload formatter object''' if ord(b) not in self.payload_format: print('Unrecognized first byte %s' %hex(ord(b))) - return False - else: + return False + else: return True def checkLength(self): @@ -521,7 +524,7 @@ def isSummer(self, DataLine): def isWatsonObservation(self, DataLine): '''Flag if message is Watson River measurement''' - return ('watson' in DataLine.lower()) + return ('watson' in DataLine.lower() or 'gios' in DataLine.lower()) def isWithInstance(self, DataLine): '''Flag if message is with instance'''