Skip to content

Commit

Permalink
Merge pull request #252 from GEUS-Glaciology-and-Climate/add-historic…
Browse files Browse the repository at this point in the history
…al-data-to-l3

Add historical data to l3
  • Loading branch information
BaptisteVandecrux authored Jun 26, 2024
2 parents c7ddb4e + 1263d7f commit b82b586
Show file tree
Hide file tree
Showing 24 changed files with 886 additions and 606 deletions.
47 changes: 0 additions & 47 deletions .github/workflows/process_l3_test.yml

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -31,20 +31,34 @@ jobs:
run: |
cd $GITHUB_WORKSPACE
git clone --depth 1 https://oauth2:${{ env.GITLAB_TOKEN }}@geusgitlab.geus.dk/glaciology-and-climate/promice/aws-l0.git
- name: Run L0 to L3 processing
- name: Run L0 to L2 processing
env:
TEST_STATION: KAN_U HUM
shell: bash
run: |
mkdir $GITHUB_WORKSPACE/out/
mkdir $GITHUB_WORKSPACE/out/L2/
mkdir $GITHUB_WORKSPACE/out/L0toL2/
for i in $(echo ${{ env.TEST_STATION }} | tr ' ' '\n'); do
python3 $GITHUB_WORKSPACE/main/src/pypromice/process/get_l2.py -c $GITHUB_WORKSPACE/aws-l0/tx/config/$i.toml -i $GITHUB_WORKSPACE/aws-l0/tx -o $GITHUB_WORKSPACE/out/L2/
python3 $GITHUB_WORKSPACE/main/src/pypromice/process/get_l2.py -c $GITHUB_WORKSPACE/aws-l0/tx/config/$i.toml -i $GITHUB_WORKSPACE/aws-l0/tx -o $GITHUB_WORKSPACE/out/L0toL2/
done
# mkdir $GITHUB_WORKSPACE/out/L3/
# - name: Run L0 to L2 processing
# env:
# TEST_STATION: KAN_U HUM
# shell: bash
# run: |
# mkdir $GITHUB_WORKSPACE/out/L2toL3/
# for i in $(echo ${{ env.TEST_STATION }} | tr ' ' '\n'); do
# python3 $GITHUB_WORKSPACE/main/src/pypromice/process/get_l2tol3.py -i $GITHUB_WORKSPACE/out/L2/$i/$i_hour.nc -o $GITHUB_WORKSPACE/out/ -t 60min
# python3 $GITHUB_WORKSPACE/main/src/pypromice/process/get_l2tol3.py -i $GITHUB_WORKSPACE/out/L0toL2/$i/$i_hour.nc -o $GITHUB_WORKSPACE/out/L2toL3 -t 60min
# done
- name: Run L0 to L3 processing
env:
TEST_STATION: KAN_U HUM
shell: bash
run: |
mkdir $GITHUB_WORKSPACE/out/L0toL3/
for i in $(echo ${{ env.TEST_STATION }} | tr ' ' '\n'); do
python3 $GITHUB_WORKSPACE/main/src/pypromice/process/get_l2.py -c $GITHUB_WORKSPACE/aws-l0/tx/config/$i.toml -i $GITHUB_WORKSPACE/aws-l0/tx -o $GITHUB_WORKSPACE/out/L2/
done
- name: Upload test output
uses: actions/upload-artifact@v3
with:
Expand Down
1 change: 1 addition & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
include src/pypromice/test/*
include src/pypromice/resources/*
5 changes: 2 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,20 +31,19 @@
packages=setuptools.find_packages(where="src"),
python_requires=">=3.8",
package_data={
"pypromice.process": ["metadata.csv", "variables.csv"],
"pypromice.tx": ["payload_formats.csv", "payload_types.csv"],
"pypromice.qc.percentiles": ["thresholds.csv"],
"pypromice.postprocess": ["station_configurations.toml", "positions_seed.csv"],
},
install_requires=['numpy>=1.23.0', 'pandas>=1.5.0', 'xarray>=2022.6.0', 'toml', 'scipy>=1.9.0', 'Bottleneck', 'netcdf4', 'pyDataverse==0.3.1', 'eccodes', 'scikit-learn>=1.1.0'],
install_requires=['numpy~=1.23', 'pandas>=1.5.0', 'xarray>=2022.6.0', 'toml', 'scipy>=1.9.0', 'Bottleneck', 'netcdf4', 'pyDataverse==0.3.1', 'eccodes', 'scikit-learn>=1.1.0'],
# extras_require={'postprocess': ['eccodes','scikit-learn>=1.1.0']},
entry_points={
'console_scripts': [
'get_promice_data = pypromice.get.get_promice_data:get_promice_data',
'get_l0tx = pypromice.tx.get_l0tx:get_l0tx',
'join_l2 = pypromice.process.join_l2:join_l2',
'join_l3 = pypromice.process.join_l3:join_l3',
'get_l2 = pypromice.process.get_l2:get_l2',
'get_l3 = pypromice.process.get_l3:get_l3',
'get_l2tol3 = pypromice.process.get_l2tol3:get_l2tol3',
'get_watsontx = pypromice.tx.get_watsontx:get_watsontx',
'get_bufr = pypromice.postprocess.get_bufr:main',
Expand Down
9 changes: 5 additions & 4 deletions src/pypromice/process/L0toL1.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
import numpy as np
import pandas as pd
import xarray as xr
import re

import re, logging
from pypromice.process.value_clipping import clip_values
logger = logging.getLogger(__name__)


def toL1(L0, vars_df, T_0=273.15, tilt_threshold=-100):
Expand All @@ -28,9 +28,10 @@ def toL1(L0, vars_df, T_0=273.15, tilt_threshold=-100):
-------
ds : xarray.Dataset
Level 1 dataset
'''
'''
assert(type(L0) == xr.Dataset)
ds = L0
ds.attrs['level'] = 'L1'

for l in list(ds.keys()):
if l not in ['time', 'msg_i', 'gps_lat', 'gps_lon', 'gps_alt', 'gps_time']:
Expand Down Expand Up @@ -247,7 +248,7 @@ def getPressDepth(z_pt, p, pt_antifreeze, pt_z_factor, pt_z_coef, pt_z_p_coef):
rho_af = 1145
else:
rho_af = np.nan
print('ERROR: Incorrect metadata: "pt_antifreeze" = ' +
logger.info('ERROR: Incorrect metadata: "pt_antifreeze" = ' +
f'{pt_antifreeze}. Antifreeze mix only supported at 50% or 100%')
# assert(False)

Expand Down
7 changes: 4 additions & 3 deletions src/pypromice/process/L1toL2.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ def toL2(
Level 2 dataset
'''
ds = L1.copy(deep=True) # Reassign dataset
ds.attrs['level'] = 'L2'
try:
ds = adjustTime(ds) # Adjust time after a user-defined csv files
ds = flagNAN(ds) # Flag NaNs after a user-defined csv files
Expand All @@ -85,7 +86,7 @@ def toL2(

# filtering gps_lat, gps_lon and gps_alt based on the difference to a baseline elevation
# right now baseline elevation is gapfilled monthly median elevation
baseline_elevation = (ds.gps_alt.to_series().resample('M').median()
baseline_elevation = (ds.gps_alt.to_series().resample('MS').median()
.reindex(ds.time.to_series().index, method='nearest')
.ffill().bfill())
mask = (np.abs(ds.gps_alt - baseline_elevation) < 100) & ds.gps_alt.notnull()
Expand Down Expand Up @@ -326,7 +327,7 @@ def smoothTilt(da: xr.DataArray, threshold=0.2):
# we calculate the moving standard deviation over a 3-day sliding window
# hourly resampling is necessary to make sure the same threshold can be used
# for 10 min and hourly data
moving_std_gap_filled = da.to_series().resample('H').median().rolling(
moving_std_gap_filled = da.to_series().resample('h').median().rolling(
3*24, center=True, min_periods=2
).std().reindex(da.time, method='bfill').values
# we select the good timestamps and gapfill assuming that
Expand All @@ -353,7 +354,7 @@ def smoothRot(da: xr.DataArray, threshold=4):
xarray.DataArray
smoothed rotation measurements from inclinometer
'''
moving_std_gap_filled = da.to_series().resample('H').median().rolling(
moving_std_gap_filled = da.to_series().resample('h').median().rolling(
3*24, center=True, min_periods=2
).std().reindex(da.time, method='bfill').values
# same as for tilt with, in addition:
Expand Down
1 change: 1 addition & 0 deletions src/pypromice/process/L2toL3.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ def toL3(L2, T_0=273.15, z_0=0.001, R_d=287.05, eps=0.622, es_0=6.1071,
1013.246.
'''
ds = L2
ds.attrs['level'] = 'L3'

T_100 = _getTempK(T_0) # Get steam point temperature as K

Expand Down
33 changes: 1 addition & 32 deletions src/pypromice/process/aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,7 @@ def __init__(self, config_file, inpath, var_file=None, meta_file=None):
L0 = self.loadL0()
self.L0=[]
for l in L0:
n = write.getColNames(self.vars,
l.attrs['number_of_booms'],
l.attrs['format'])
n = write.getColNames(self.vars, l)
self.L0.append(utilities.popCols(l, n))

self.L1 = None
Expand Down Expand Up @@ -106,18 +104,6 @@ def getL3(self):
logger.info('Level 3 processing...')
self.L3 = toL3(self.L2)

# def resample(self, dataset):
# '''Resample dataset to specific temporal resolution (based on input
# data type)'''
# f = [l.attrs['format'] for l in self.L0]
# if 'raw' in f or 'STM' in f:
# logger.info('Resampling to 10 minute')
# resampled = resample_dataset(dataset, '10min')
# else:
# resampled = resample_dataset(dataset, '60min')
# logger.info('Resampling to hour')
# return resampled

def writeArr(self, dataset, outpath, t=None):
'''Write L3 data to .nc and .csv hourly and daily files
Expand All @@ -141,23 +127,6 @@ def writeArr(self, dataset, outpath, t=None):
else:
write.prepare_and_write(dataset, outpath, self.vars,
self.meta, '60min')

# def addAttributes(self, dataset):
# '''Add variable and attribute metadata

# Parameters
# ----------
# dataset : xr.Dataset
# Dataset (i.e. L2 or L3) object

# Returns
# -------
# d2 : xr.Dataset
# Data object with attributes
# '''
# d2 = utilities.addVars(dataset, self.vars)
# d2 = utilities.addMeta(d2, self.meta)
# return d2

def loadConfig(self, config_file, inpath):
'''Load configuration from .toml file
Expand Down
25 changes: 7 additions & 18 deletions src/pypromice/process/get_l2.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
from argparse import ArgumentParser
import pypromice
from pypromice.process.aws import AWS
from pypromice.process.write import prepare_and_write
from pypromice.process.load import getVars, getMeta
from pypromice.process.write import prepare_and_write

def parse_arguments_l2():
parser = ArgumentParser(description="AWS L2 processor")
Expand All @@ -30,38 +30,27 @@ def get_l2():
level=logging.INFO,
stream=sys.stdout,
)

# Define variables (either from file or pypromice package defaults)
if args.variables is None:
v = os.path.join(os.path.dirname(pypromice.__file__),'process/variables.csv')
else:
v = args.variables

# Define metadata (either from file or pypromice package defaults)
if args.variables is None:
m = os.path.join(os.path.dirname(pypromice.__file__),'process/metadata.csv')
else:
m = args.metadata

# Define input path
station_name = args.config_file.split('/')[-1].split('.')[0]
station_path = os.path.join(args.inpath, station_name)
if os.path.exists(station_path):
aws = AWS(args.config_file, station_path, v, m)
aws = AWS(args.config_file, station_path, args.variables, args.metadata)
else:
aws = AWS(args.config_file, args.inpath, v, m)
aws = AWS(args.config_file, args.inpath, args.variables, args.metadata)

# Perform level 1 and 2 processing
aws.getL1()
aws.getL2()

v = getVars(args.variables)
m = getMeta(args.metadata)
# Write out level 2
if args.outpath is not None:
if not os.path.isdir(args.outpath):
os.mkdir(args.outpath)
if aws.L2.attrs['format'] == 'raw':
prepare_and_write(aws.L2, args.outpath, getVars(), getMeta(), '10min')
prepare_and_write(aws.L2, args.outpath, getVars(), getMeta(), '60min')
prepare_and_write(aws.L2, args.outpath, v, m, '10min')
prepare_and_write(aws.L2, args.outpath, v, m, '60min')


if __name__ == "__main__":
Expand Down
19 changes: 12 additions & 7 deletions src/pypromice/process/get_l2tol3.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@
import xarray as xr
from argparse import ArgumentParser
import pypromice
from pypromice.process.load import getVars, getMeta
from pypromice.process.L2toL3 import toL3
from pypromice.process.load import getVars, getMeta
from pypromice.process.write import prepare_and_write
logger = logging.getLogger(__name__)

def parse_arguments_l2tol3(debug_args=None):
parser = ArgumentParser(description="AWS L3 script for the processing L3 "+
Expand Down Expand Up @@ -34,23 +35,27 @@ def get_l2tol3():
level=logging.INFO,
stream=sys.stdout,
)

# Define variables and metadata (either from file or pypromice package defaults)
v = getVars(args.variables)
m = getMeta(args.metadata)

# Define Level 2 dataset from file
with xr.open_dataset(args.inpath) as l2:
l2.load()
l2.load()

# Remove encoding attributes from NetCDF
for varname in l2.variables:
if 'encoding' in l2[varname].attrs:
del l2[varname].attrs['encoding']

if 'bedrock' in l2.attrs.keys():
l2.attrs['bedrock'] = l2.attrs['bedrock'] == 'True'
if 'number_of_booms' in l2.attrs.keys():
l2.attrs['number_of_booms'] = int(l2.attrs['number_of_booms'])

# Perform Level 3 processing
l3 = toL3(l2)

# Write Level 3 dataset to file if output directory given
v = getVars(args.variables)
m = getMeta(args.metadata)
if args.outpath is not None:
prepare_and_write(l3, args.outpath, v, m, '60min')
prepare_and_write(l3, args.outpath, v, m, '1D')
Expand Down
Loading

0 comments on commit b82b586

Please sign in to comment.