Skip to content

Commit cc6bfb2

Browse files
committed
l2_to_l3 functionality from pypromice
1 parent 17245f7 commit cc6bfb2

File tree

6 files changed

+93
-103
lines changed

6 files changed

+93
-103
lines changed

setup.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,9 @@
4343
'get_promice_data = pypromice.get.get_promice_data:get_promice_data',
4444
'get_l0tx = pypromice.tx.get_l0tx:get_l0tx',
4545
'join_l2 = pypromice.process.join_l2:join_l2',
46-
'join_l3 = pypromice.process.join_l2:join_l3',
46+
'join_l3 = pypromice.process.join_l3:join_l3',
4747
'get_l3 = pypromice.process.get_l3:get_l3',
48+
'l2_to_l3 = pypromice.process.l2_to_l3:l2_to_l3',
4849
'get_watsontx = pypromice.tx.get_watsontx:get_watsontx',
4950
'get_bufr = pypromice.postprocess.get_bufr:main',
5051
'get_msg = pypromice.tx.get_msg:get_msg'

src/pypromice/process/aws.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -509,6 +509,32 @@ def writeNC(outfile, Lx, col_names=None):
509509
names = list(Lx.keys())
510510
Lx[names].to_netcdf(outfile, mode='w', format='NETCDF4', compute=True)
511511

512+
def writeAll(outpath, station_id, l3_h, l3_d, l3_m, csv_order=None):
513+
'''Write L3 hourly, daily and monthly datasets to .nc and .csv
514+
files
515+
516+
outpath : str
517+
Output file path
518+
station_id : str
519+
Station name
520+
l3_h : xr.Dataset
521+
L3 hourly data
522+
l3_d : xr.Dataset
523+
L3 daily data
524+
l3_m : xr.Dataset
525+
L3 monthly data
526+
csv_order : list, optional
527+
List order of variables
528+
'''
529+
if not os.path.isdir(outpath):
530+
os.mkdir(outpath)
531+
outfile_h = os.path.join(outpath, station_id + '_hour')
532+
outfile_d = os.path.join(outpath, station_id + '_day')
533+
outfile_m = os.path.join(outpath, station_id + '_month')
534+
for o,l in zip([outfile_h, outfile_d, outfile_m], [l3_h ,l3_d, l3_m]):
535+
writeCSV(o+'.csv',l, csv_order)
536+
writeNC(o+'.nc',l)
537+
512538
def popCols(ds, names):
513539
'''Populate dataset with all given variable names
514540

src/pypromice/process/get_l3.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,11 @@
55
from pypromice.process.aws import AWS
66

77
def parse_arguments_level():
8-
parser = ArgumentParser(description="AWS L2 processor")
8+
parser = ArgumentParser(description="AWS L2/L3 processor")
99

1010
parser.add_argument('-c', '--config_file', type=str, required=True,
1111
help='Path to config (TOML) file')
12-
parser.add_argument('-i', '--inpath', default='data', type=str, required=True,
12+
parser.add_argument('-i', '--inpath', type=str, required=True,
1313
help='Path to input data')
1414
parser.add_argument('-o', '--outpath', default=None, type=str, required=False,
1515
help='Path where to write output')
@@ -22,7 +22,7 @@ def parse_arguments_level():
2222
args = parser.parse_args()
2323
return args
2424

25-
def get_level():
25+
def get_l3():
2626
args = parse_arguments_level()
2727

2828
logging.basicConfig(
@@ -43,7 +43,7 @@ def get_level():
4343
else:
4444
m = args.metadata
4545

46-
# Define output path
46+
# Define input path
4747
station_name = args.config_file.split('/')[-1].split('.')[0]
4848
station_path = os.path.join(args.inpath, station_name)
4949
if os.path.exists(station_path):

src/pypromice/process/join_l2.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import xarray as xr
55
from argparse import ArgumentParser
66
from pypromice.process import getVars, getMeta, addMeta, getColNames, \
7-
roundValues, resampleL2, writeAll
7+
roundValues, resample_dataset, writeAll
88
from pypromice.process.L1toL2 import correctPrecip
99

1010
def parse_arguments_join():
@@ -88,9 +88,9 @@ def join_l2():
8888

8989
# Get hourly, daily and monthly datasets
9090
print('Resampling L2 data to hourly, daily and monthly resolutions...')
91-
l2_h = resampleL2(all_ds, '60min')
92-
l2_d = resampleL2(all_ds, '1D')
93-
l2_m = resampleL2(all_ds, 'M')
91+
l2_h = resample_dataset(all_ds, '60min')
92+
l2_d = resample_dataset(all_ds, '1D')
93+
l2_m = resample_dataset(all_ds, 'M')
9494

9595
print(f'Adding variable information from {args.variables}...')
9696

src/pypromice/process/join_l3.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import xarray as xr
55
from argparse import ArgumentParser
66
from pypromice.process import getVars, getMeta, addMeta, getColNames, \
7-
roundValues, resampleL3, writeAll
7+
roundValues, resample_dataset, writeAll
88
from pypromice.process.L1toL2 import correctPrecip
99

1010
def parse_arguments_join():
@@ -88,9 +88,9 @@ def join_l3():
8888

8989
# Get hourly, daily and monthly datasets
9090
print('Resampling L3 data to hourly, daily and monthly resolutions...')
91-
l3_h = resampleL3(all_ds, '60min')
92-
l3_d = resampleL3(all_ds, '1D')
93-
l3_m = resampleL3(all_ds, 'M')
91+
l3_h = resample_dataset(all_ds, '60min')
92+
l3_d = resample_dataset(all_ds, '1D')
93+
l3_m = resample_dataset(all_ds, 'M')
9494

9595
print(f'Adding variable information from {args.variables}...')
9696

src/pypromice/process/l2_to_l3.py

Lines changed: 53 additions & 90 deletions
Original file line numberDiff line numberDiff line change
@@ -1,109 +1,72 @@
11
#!/usr/bin/env python
2-
import os, unittest, pkg_resources
3-
import pandas as pd
4-
import numpy as np
2+
import os, logging, sys
53
import xarray as xr
64
from argparse import ArgumentParser
7-
from pypromice.process import getVars, getMeta, addMeta, getColNames, \
8-
roundValues, resampleL2, writeAll
9-
from pypromice.process.L1toL2 import correctPrecip
10-
from pypromice.process.L2toL3 import toL3
11-
from sys import exit
5+
import pypromice
6+
from pypromice.process.aws import AWS
127

138
def parse_arguments_l2_to_l3(debug_args=None):
14-
parser = ArgumentParser(description="AWS L3 script for the processing L3 data from L2 and merging the L3 data with its historical site. An hourly, daily and monthly L3 data product is outputted to the defined output path")
15-
parser.add_argument('-s', '--file1', type=str, required=True, nargs='+',
16-
help='Path to source L2 file')
9+
parser = ArgumentParser(description="AWS L3 script for the processing L3 "+
10+
"data from L2 and merging the L3 data with its "+
11+
"historical site. An hourly, daily and monthly L3 "+
12+
"data product is outputted to the defined output path")
13+
parser.add_argument('-c', '--config_file', type=str, required=True,
14+
help='Path to config (TOML) file')
15+
parser.add_argument('-i', '--inpath', type=str, required=True,
16+
help='Path to input data')
17+
parser.add_argument('-l', '--level_2', type=str, required=True,
18+
help='Path to Level 2 .nc data file')
19+
parser.add_argument('-o', '--outpath', default=None, type=str, required=False,
20+
help='Path where to write output')
21+
parser.add_argument('-v', '--variables', default=None, type=str,
22+
required=False, help='File path to variables look-up table')
23+
parser.add_argument('-m', '--metadata', default=None, type=str,
24+
required=False, help='File path to metadata')
25+
parser.add_argument('-g', '--gcnet_historical', default=None, type=str,
26+
required=False, help='File path to historical GC-Net data file')
27+
1728
# here will come additional arguments for the merging with historical stations
18-
parser.add_argument('-v', '--variables', default=None, type=str, required=False,
19-
help='Path to variables look-up table .csv file for variable name retained'''),
20-
parser.add_argument('-m', '--metadata', default=None, type=str, required=False,
21-
help='Path to metadata table .csv file for metadata information'''),
22-
parser.add_argument('-d', '--datatype', default='raw', type=str, required=False,
23-
help='Data type to output, raw or tx')
2429
args = parser.parse_args(args=debug_args)
25-
args.file1 = ' '.join(args.file1)
26-
args.folder_gcnet = ' '.join(args.folder_gcnet)
27-
args.folder_promice = ' '.join(args.folder_promice)
2830
return args
2931

30-
31-
def loadArr(infile):
32-
if infile.split('.')[-1].lower() in 'csv':
33-
df = pd.read_csv(infile)
34-
df['time'] = pd.to_datetime(df['time']).dt.tz_localize(None)
35-
df = df.set_index('time')
36-
ds = xr.Dataset.from_dataframe(df)
37-
38-
elif infile.split('.')[-1].lower() in 'nc':
39-
ds = xr.open_dataset(infile)
40-
41-
try:
42-
name = ds.attrs['station_name']
43-
except:
44-
name = infile.split('/')[-1].split('.')[0].split('_hour')[0].split('_10min')[0]
45-
46-
print(f'{name} array loaded from {infile}')
47-
return ds, name
48-
49-
def get_l3():
32+
def l2_to_l3():
5033
args = parse_arguments_l2_to_l3()
51-
52-
# Check files
53-
if os.path.isfile(args.file1):
54-
# Load L2 data arrays
55-
ds1, n1 = loadArr(args.file1)
56-
57-
# converts to L3:
58-
# - derives sensible heat fluxes
59-
# - more to come
60-
ds1 = toL3(ds1)
34+
logging.basicConfig(
35+
format="%(asctime)s; %(levelname)s; %(name)s; %(message)s",
36+
level=logging.INFO,
37+
stream=sys.stdout,
38+
)
39+
40+
# Define variables (either from file or pypromice package defaults)
41+
if args.variables is None:
42+
v = os.path.join(os.path.dirname(pypromice.__file__),'process/variables.csv')
43+
else:
44+
v = args.variables
6145

62-
# here will come the merging with historical data
46+
# Define metadata (either from file or pypromice package defaults)
47+
if args.variables is None:
48+
m = os.path.join(os.path.dirname(pypromice.__file__),'process/metadata.csv')
6349
else:
64-
print(f'Invalid file {args.file1}')
65-
exit()
50+
m = args.metadata
6651

67-
# Get hourly, daily and monthly datasets
68-
print('Resampling L3 data to hourly, daily and monthly resolutions...')
69-
l3_h = resampleL2(ds1, '60min')
70-
l3_d = resampleL2(ds1, '1D')
71-
l3_m = resampleL2(ds1, 'M')
72-
73-
print(f'Adding variable information from {args.variables}...')
74-
75-
# Load variables look-up table
76-
var = getVars(args.variables)
77-
78-
# Round all values to specified decimals places
79-
l3_h = roundValues(l3_h, var)
80-
l3_d = roundValues(l3_d, var)
81-
l3_m = roundValues(l3_m, var)
82-
83-
# Get columns to keep
84-
if hasattr(ds1, 'p_l'):
85-
col_names = getColNames(var, 2, args.datatype.lower())
52+
# Define input path
53+
station_name = args.config_file.split('/')[-1].split('.')[0]
54+
station_path = os.path.join(args.inpath, station_name)
55+
if os.path.exists(station_path):
56+
aws = AWS(args.config_file, station_path, v, m)
8657
else:
87-
col_names = getColNames(var, 1, args.datatype.lower())
58+
aws = AWS(args.config_file, args.inpath, v, m)
8859

89-
# Assign station id
90-
for l in [l3_h, l3_d, l3_m]:
91-
l.attrs['station_id'] = n1
9260

93-
# Assign metadata
94-
print(f'Adding metadata from {args.metadata}...')
95-
m = getMeta(args.metadata)
96-
l3_h = addMeta(l3_h, m)
97-
l3_d = addMeta(l3_d, m)
98-
l3_m = addMeta(l3_m, m)
99-
100-
# Set up output path
101-
out = os.path.join(args.outpath, site_id)
61+
# Define Level 2 dataset from file
62+
aws.L2 = xr.open_dataset(args.level_2)
10263

103-
# Write to files
104-
writeAll(out, site_id, l3_h, l3_d, l3_m, col_names)
105-
print(f'Files saved to {os.path.join(out, site_id)}...')
106-
# %%
64+
# Perform Level 3 processing
65+
aws.getL3()
66+
67+
# Write Level 3 dataset to file if output directory given
68+
if args.outpath is not None:
69+
aws.writeL3(args.outpath)
70+
10771
if __name__ == "__main__":
10872
l2_to_l3()
109-

0 commit comments

Comments
 (0)