-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathSbet.py
351 lines (280 loc) · 13 KB
/
Sbet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
"""
cBLUE (comprehensive Bathymetric Lidar Uncertainty Estimator)
Copyright (C) 2019
Oregon State University (OSU)
Center for Coastal and Ocean Mapping/Joint Hydrographic Center, University of New Hampshire (CCOM/JHC, UNH)
NOAA Remote Sensing Division (NOAA RSD)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Contact:
Christopher Parrish, PhD
School of Construction and Civil Engineering
101 Kearney Hall
Oregon State University
Corvallis, OR 97331
(541) 737-5688
Last Edited By:
Keana Kief (OSU)
July 25th, 2023
"""
import os
import time
import pandas as pd
from datetime import datetime
import progressbar
import logging
import numexpr as ne
logger = logging.getLogger(__name__)
"""
This class provides the functionality to load trajectory data into
cBLUE. Currently, the sbet files are expected to be ASCII files
that are exported from Applanix's PosPac software.
"""
class Sbet:
def __init__(self, sbet_dir, sensor_name):
"""
The data from all of the loaded sbet files are represented by
a single Sbet object. When the Sbet class is instantiated,
the sbet object does not contain any sbet data. The data
are "loaded" (assigned to a field of the sbet object) when
the user clicks the 'Load Sbet Data' button.
:param str sbet_dir: directory contained trajectory files
"""
self.sbet_dir = sbet_dir
self.sensor_name = sensor_name
self.sbet_files = sorted(
[
os.path.join(sbet_dir, f)
for f in os.listdir(sbet_dir)
if f.endswith(".txt")
]
)
self.data = None
self.SECS_PER_GPS_WK = 7 * 24 * 60 * 60 # 604800 sec
self.SECS_PER_DAY = 24 * 60 * 60 # 86400 sec
self.GPS_EPOCH = datetime(1980, 1, 6, 0, 0, 0)
self.GPS_ADJUSTED_OFFSET = 1e9
@staticmethod
def get_sbet_date(sbet):
"""parses year, month, and day from ASCII sbet filename
:param str sbet: ASCII sbet filename
:return: List[int]
"""
sbet_path = os.path.normpath(sbet)
sbet_parts = os.path.split(sbet_path)
sbet_name = sbet_parts[-1]
logger.sbet(f"SBET Name : {sbet_name}")
year = int(sbet_name[0:4])
month = int(sbet_name[4:6])
day = int(sbet_name[6:8])
sbet_date = [year, month, day]
return sbet_date
def gps_sow_to_gps_adj(self, gps_date, gps_wk_sec):
"""converts GPS seconds-of-week timestamp to GPS adjusted standard time
In the case that the timestamps in the sbet files are GPS week seconds,
this method is called to convert the timestamps to GPS adjusted standard
time, which is what the las file timestamps are. The timestamps in the
sbet and las files need to be the same format, because the merging process
merges the data in the sbet and las files based on timestamps.
:param ? gps_date: [year, month, day]
:param ? gps_wk_sec: GPS seconds-of-week timestamp
:return: float
"""
logger.sbet("converting GPS week seconds to GPS adjusted standard time..."),
year = gps_date[0]
month = gps_date[1]
day = gps_date[2]
sbet_date = datetime(year, month, day)
dt = sbet_date - self.GPS_EPOCH
gps_wk = int((dt.days * self.SECS_PER_DAY + dt.seconds) / self.SECS_PER_GPS_WK)
gps_time = gps_wk * self.SECS_PER_GPS_WK + gps_wk_sec
gps_time_adj = gps_time - self.GPS_ADJUSTED_OFFSET
return gps_time_adj
def check_if_sow(self, time):
logger.sbet("checking if timestamps are GPS week seconds...")
if time <= self.SECS_PER_GPS_WK:
return True
else:
return False
def build_sbets_data(self):
"""builds 1 pandas dataframe from all ASCII sbet files
:return: pandas dataframe
The following table lists the contents of the returned pandas sbet dataframe:
===== =============================================================
Index description
===== =============================================================
0 timestamp (GPS seconds-of-week or GPS standard adjusted time)
1 longitude
2 latitude
3 X (easting)
4 Y (northing)
5 Z (ellipsoid height)
6 roll
7 pitch
8 heading
9 standard deviation X
10 standard deviation Y
11 standard deviation Z
12 standard deviation roll
13 standard deviation pitch
14 standard deviation heading
===== =============================================================
"""
sbets_df = pd.DataFrame()
header_sbet = [
"time",
"lon",
"lat",
"X",
"Y",
"Z",
"roll",
"pitch",
"heading",
"stdX",
"stdY",
"stdZ",
"stdroll",
"stdpitch",
"stdheading",
]
# Used for holding processed SBET data if this is the PILLS sensor
modified_sbet_file = "modified_pills_sbet.txt"
print(r"Loading trajectory files...")
logger.sbet("loading {} trajectory files...".format(len(self.sbet_files)))
for sbet in progressbar.progressbar(
sorted(self.sbet_files), redirect_stdout=True
):
logger.sbet("-" * 50)
logger.sbet("{}...".format(os.path.split(sbet)[-1]))
sbet_date = self.get_sbet_date(sbet)
# If this is the PILLS sensor, pre-process the sbet data
if(self.sensor_name == "PILLS or RAMMS"):
logger.sbet("PILLS or RAMMS Sensor, pre-processing sbet file")
self.preprocess_pills_sbet(sbet, modified_sbet_file)
# Reassign the SBET file name to the modified file
sbet = modified_sbet_file
sbet_df = pd.read_csv(
sbet,
skip_blank_lines=True,
engine="c",
delim_whitespace=True,
header=None,
names=header_sbet,
index_col=False,
)
logger.sbet("({} trajectory points)".format(sbet_df.shape[0]))
is_sow = self.check_if_sow(sbet_df["time"][0])
if is_sow:
gps_time_adj = self.gps_sow_to_gps_adj(sbet_date, sbet_df["time"])
sbet_df["time"] = gps_time_adj
sbets_df = sbets_df.append(sbet_df, ignore_index=True)
sbets_data = sbets_df.sort_values(["time"], ascending=[1])
#If this was the PILLS sensor and we created a modified SBET file.
# Then clean up by deleting the modified_pills_sbet.txt
if os.path.isfile(modified_sbet_file):
logger.sbet("Cleaning up Modified SBET file")
os.remove(modified_sbet_file)
return sbets_data
def set_data(self):
"""populates Sbet object's data field with pandas dataframe (when user
presses the "Load Trajectory File(s)" button)
:return: n/a
"""
sbet_tic = time.process_time()
self.data = self.build_sbets_data() # df
sbet_toc = time.process_time()
logger.sbet(
"It took {:.1f} mins to load the trajectory data.".format(
(sbet_toc - sbet_tic) / 60
)
)
def get_tile_data(self, north, south, east, west):
"""queries the sbet data points that lie within the given las tile bounding coordinates
One pandas dataframe is created from all of the loaded ASCII sbet files,
but as each las tile is processed, only the sbet data located within the
las tile limits are sent to the calc_tpu() method.
To account for las tiles that contain data points from a las flight line
whose corresponding trajectory data falls outside of the las tile extents,
a buffer is added to the bounds of the tile when retreiving the
trajectory data.
:param float north: northern limit of las tile
:param float south: southern limit of las tile
:param float east: eastern limit of las tile
:param float west: western limit of las tile
:return: pandas dataframe
"""
buff = 500 # meters
x = self.data.X.values
y = self.data.Y.values
north += buff
south -= buff
east += buff
west -= buff
# using numexpr for accelerating computations of large arrays
data = self.data[
ne.evaluate("(y >= south) & (y <= north) & (x >= west) & (x <= east)")
]
return data
def preprocess_pills_sbet(self, sbet_file, modified_sbet_file):
"""Pre-process the given PILLS SBET data into the expected cBLUE format so that build_sbets_data()
will run as expected.
The PILLS SBET file has a varying number of lines of text describing the data in the file preceeding the SBET data.
The PILLS SBET data has 20 columns in the order:
TIME, DISTANCE, EASTING, NORTHING, ELLIPSOID HEIGHT, LATITUDE, LONGITUDE,
ELLIPSOID HEIGHT, ROLL, PITCH, HEADING, EAST VELOCITY, NORTH VELOCITY,
UP VELOCITY, EAST SD, NORTH SD, HEIGHT SD, ROLL SD, PITCH SD, HEADING SD
build_sbets_data() expects for the data to have 15 columns in the order:
timestamp (GPS seconds-of-week or GPS standard adjusted time), longitude, latitude,
X (easting), Y (northing), Z (ellipsoid height), roll, pitch, heading, standard deviation X,
standard deviation Y, standard deviation Z, standard deviation roll, standard deviation pitch,
standard deviation heading
The heading text is removed from the file. The 5 unneeded columns are dropped from the table.
The column order is rearranged to match the expected order in build_sbets_data().
The processed data is then written to the modified_sbet_file.
:param string sbet_file: SBET File name selected by the user
:param string modified_sbet_fil: File name that the processed SBET data will be written to
"""
# The first 26-28 rows of a PILLS sbet file are descrptions of the data.
# Drop the first 100 lines (header lines and time when plane is on the ground)
# to ensure that all extra header lines are dropped.
n = 100
# List of column indicies to drop from the PILLS sbet file
drop_columns = [1, 7, 11, 12, 13]
# Column index to reorder the sbet columns after the unneeded columns have been dropped
# Time, Longitude, Latitude, X(Easting), Y(Northing), Z(Ellipsoid Height), Roll, Pitch, Heading,
# stdx(East SD), stdy(North SD), stdz(Height SD), stdroll(Roll SD), stdpitch(Pitch SD), stdheading(Heading SD)
new_order = [0, 5, 4, 1, 2, 3, 6, 7, 8, 9, 10, 11, 12, 13, 14]
# We want to get rid of the first 26 lines of the file
# open the sbet file with read and write permissions
with open(sbet_file, 'r') as fp:
#read in the lines of the sbet file
lines = fp.readlines()
with open(modified_sbet_file, 'w') as modified_fp:
#write the lines to the modified file without the first 26 lines
modified_fp.writelines(lines[n:])
modified_fp.close()
fp.close()
# Read in the sbet_file and save it to a DataFrame
sbet_df = pd.read_csv(modified_sbet_file, delim_whitespace=True, header=None, index_col=False)
# logger.sbet(f"sbet df: {sbet_df}")
# Drop the unneeded columns
new_sbet_df = sbet_df.drop(sbet_df.columns[drop_columns], axis = 1)
#Rename column labels to make it easier to reorder them
new_sbet_df.columns = range(0,15)
# logger.sbet(f"sbet dropped columns: {new_sbet_df}")
# Rearrange the column order to match expected column order
new_sbet_df = new_sbet_df.reindex(new_order, axis =1)
# logger.sbet(f"sbet rearranged: {new_sbet_df}")
# Write the processed data to the modified sbet file
new_sbet_df.to_csv(modified_sbet_file, index=False, index_label=False, sep=" ", header=False)