|
| 1 | +################ This is the import statement required to reference scripts within the package |
| 2 | +import os,sys,glob |
| 3 | +ndh_tools_path_opts = [ |
| 4 | + '/mnt/data01/Code/', |
| 5 | + '/home/common/HolschuhLab/Code/' |
| 6 | +] |
| 7 | +for i in ndh_tools_path_opts: |
| 8 | + if os.path.isfile(i): sys.path.append(i) |
| 9 | +################################################################################################ |
| 10 | + |
| 11 | +from collections.abc import Iterable |
| 12 | +import numpy as np |
| 13 | +import NDH_Tools as ndh |
| 14 | + |
| 15 | + |
| 16 | +def cresis_dataaggregator(filelist,remove_totaldata=0,savename='',depthcap=0,at_samples=[],at_samples_type=0): |
| 17 | + """ |
| 18 | + % (C) Nick Holschuh - Amherst College -- 2022 ([email protected]) |
| 19 | + % |
| 20 | + % This function can take a list of CReSIS Radar files and extract the relevent bits |
| 21 | + % |
| 22 | + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% |
| 23 | + % The inputs are: |
| 24 | + % filelist - list of filenames to read |
| 25 | + % remove_totaldata - flag [0 or 1] which dictates whether or not to preserve the radargrams |
| 26 | + % savename - a string for the name of the .mat file to write |
| 27 | + % depthcap - an index for the maximum depth sample to include |
| 28 | + % at_samples - list of lists, including the start and end along-track sample to use, or a an |
| 29 | + % outline to act as bouunds for the aggregated data |
| 30 | + % at_samples_type - 0 is defined indecies, 1 is an outline |
| 31 | + % |
| 32 | + %%%%%%%%%%%%%%% |
| 33 | + % The outputs are: |
| 34 | + % save_dict - dictionary containing: |
| 35 | + % Data_Vals: The array with coordinate and profile information |
| 36 | + % DV_Info: Metadata describing the columns in Data_Vals |
| 37 | + % start_indecies: The index within the larger data array when each new file starts |
| 38 | + % filenames: The original filenames included in the aggregation |
| 39 | + % |
| 40 | + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% |
| 41 | + """ |
| 42 | + print_flag = 0 |
| 43 | + start_flag = 1 |
| 44 | + filenames = [] |
| 45 | + Aggregated_Data = [] |
| 46 | + start_indecies = [0] |
| 47 | + Data_Vals = [] |
| 48 | + |
| 49 | + ###################### We loop through the list of input files |
| 50 | + for f_ind,fn in enumerate(filelist): |
| 51 | + |
| 52 | + radar_data = ndh.loadmat(fn) |
| 53 | + final_varlist = [] |
| 54 | + |
| 55 | + ################## We create a Bottom object if needed |
| 56 | + if 'Surface' not in radar_data.keys(): |
| 57 | + radar_data['Surface'] = radar_data['Latitude'].copy()*np.NaN |
| 58 | + if 'Bottom' not in radar_data.keys(): |
| 59 | + radar_data['Bottom'] = radar_data['Surface'].copy()*np.NaN |
| 60 | + |
| 61 | + ################## Calculate Polarstereo coordinates |
| 62 | + xy = ndh.polarstereo_fwd(radar_data['Latitude'], radar_data['Longitude']) |
| 63 | + radar_data['x'] = xy['x'] |
| 64 | + radar_data['y'] = xy['y'] |
| 65 | + radar_data['distance'] = ndh.polarstereo_fwd(radar_data['x'],radar_data['y']) |
| 66 | + final_varlist.append('x') |
| 67 | + final_varlist.append('y') |
| 68 | + |
| 69 | + ################## We apply the depthcap, if prescribed |
| 70 | + if depthcap > 0: |
| 71 | + radar_data['Data'] = radar_data['Data'][:depthcap,:] |
| 72 | + radar_data['Time'] = radar_data['Time'][:depthcap] |
| 73 | + |
| 74 | + ################## This object is used to subset along tack, if requested |
| 75 | + if len(at_samples) == 0: |
| 76 | + trace_index = np.arange(0,len(radar_data['Latitude'])) |
| 77 | + else: |
| 78 | + if at_samples_type == 0: |
| 79 | + trace_index = np.arange(at_samples[f_ind][0],at_samples[f_ind][1]+1) |
| 80 | + else: |
| 81 | + trace_index = np.where(ndh.within(np.stack([radar_data['x'],radar_data['y']]).T,at_samples))[0] |
| 82 | + radar_data['Data'] = radar_data['Data'][:,trace_index] |
| 83 | + |
| 84 | + ################## All objects that are the same shape as latitude get subset |
| 85 | + orig_len = len(radar_data['Latitude']) |
| 86 | + for key_opt in radar_data.keys(): |
| 87 | + if isinstance(radar_data[key_opt],type(radar_data['Latitude'])): |
| 88 | + shape_array = np.array(radar_data[key_opt].shape) |
| 89 | + if len(shape_array) > 0: |
| 90 | + if np.max(shape_array == orig_len) == 1: |
| 91 | + final_varlist.append(key_opt) |
| 92 | + |
| 93 | + for kk in final_varlist: |
| 94 | + if len(radar_data[kk]) == orig_len: |
| 95 | + radar_data[kk] = radar_data[kk][trace_index] |
| 96 | + |
| 97 | + ################## Here we extract date info from the filename |
| 98 | + fn_parts = fn.split('/')[-1].split('.')[0].split('_') |
| 99 | + |
| 100 | + year = int(fn_parts[1][0:4]) |
| 101 | + month = int(fn_parts[1][4:6]) |
| 102 | + day = int(fn_parts[1][6:8]) |
| 103 | + seg = int(fn_parts[2]) |
| 104 | + frm = int(fn_parts[3]) |
| 105 | + file_ind = f_ind |
| 106 | + |
| 107 | + |
| 108 | + ################## Then, we try and construct the object and concatenate everything. Some files had trouble with this |
| 109 | + temp_Data_Vals = np.vstack((radar_data['x'],radar_data['y'],radar_data['Latitude'],radar_data['Longitude'],radar_data['Elevation'], |
| 110 | + radar_data['Surface'],radar_data['Bottom'],radar_data['GPS_time'],trace_index, |
| 111 | + np.ones(radar_data['Latitude'].shape)*year, np.ones(radar_data['Latitude'].shape)*month, np.ones(radar_data['Latitude'].shape)*day, |
| 112 | + np.ones(radar_data['Latitude'].shape)*seg, np.ones(radar_data['Latitude'].shape)*frm, np.ones(radar_data['Latitude'].shape)*file_ind)).T |
| 113 | + |
| 114 | + ################## The initial file starts the objects, subsequent files add to them |
| 115 | + if start_flag > 1: |
| 116 | + Data_Vals = np.concatenate((Data_Vals, temp_Data_Vals), axis=0) |
| 117 | + if print_flag == 1: |
| 118 | + print('Completed File '+str(f_ind)+' - '+fn) |
| 119 | + else: |
| 120 | + start_indecies = [0] |
| 121 | + Data_Vals = temp_Data_Vals |
| 122 | + start_flag = start_flag+1 |
| 123 | + if print_flag == 1: |
| 124 | + print('Started with file '+str(f_ind)+' - '+fn) |
| 125 | + |
| 126 | + ################## The start index for each new file is logged, along with the file name |
| 127 | + start_indecies.append(start_indecies[-1] + len(trace_index)) |
| 128 | + filenames.append(fn) |
| 129 | + |
| 130 | + ################## Finally, the radargrams are appended if desired |
| 131 | + if remove_totaldata == 0: |
| 132 | + Aggregated_Data.append([radar_data['Data'],radar_data['distance'],radar_data['Time']]) |
| 133 | + |
| 134 | + |
| 135 | + start_indecies = start_indecies[:-1] |
| 136 | + DV_Info = ['X Coordinate (ps)','Y Coordinate (ps)','Latitude','Longitude','Flight Elevation','Surface Pick','Bottom Pick','GPS Time','Trace Index','Year','Month','Day','Segment','Frame','File Index'] |
| 137 | + |
| 138 | + save_dict = {'Data_Vals':Data_Vals,'DV_Info':DV_Info,'start_indecies':start_indecies,'filenames':filenames,'Aggregated_Data':Aggregated_Data} |
| 139 | + if len(savename) > 0: |
| 140 | + ndh.savemat(save_dict,savename) |
| 141 | + |
| 142 | + |
| 143 | + return save_dict |
| 144 | + |
| 145 | + |
0 commit comments