Skip to content

Commit 53b1e90

Browse files
author
Martin
committed
Updated main.py, inputs.py and utils.py to incorporate support for .MFC, .MFE, .MFM files (data compression and/or data encryption)
1 parent 195fc62 commit 53b1e90

File tree

6 files changed

+102
-11
lines changed

6 files changed

+102
-11
lines changed

README.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,14 @@ Antoher approach is to use event based triggers, e.g. via AWS Lambda functions.
6363
---
6464
## Other practical information
6565

66+
### Regarding encrypted log files
67+
If you need to handle encrypted log files, you can provide a passwords dictionary object with similar structure as the `passwords.json` file used in the CANedge MF4 converters. The object can be provided e.g. as below (or via environmental variables):
68+
69+
```
70+
pw = {"default": "password"} # hardcoded
71+
pw = json.load(open("passwords.json")) # from local JSON file
72+
```
73+
6674
### Change timestamps
6775
If you wish to test the script using old data, you can change the timestamps so that the data is 'rebaselined' to today, minus an offset number of days. This is useful e.g. if you want to use the InfluxDB Cloud Starter, which will delete data that is older than 30 days. To rebaseline your data to start today minus 2 days, simply add `days_offset=2` in the `ProcessData` initialization.
6876

build_layers.py

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
# requires Docker to be running
2+
3+
import os, json, sys
4+
import subprocess
5+
6+
# specify base details and region list
7+
layer_name = "css-dashboard-writer"
8+
layer_description = "CSS Electronics dashboard-writer script dependencies for use in AWS Lambda functions"
9+
csv_path = "dashboard-writer/aws_lambda_example/lambda_layer_arns.csv"
10+
run_req_build = False
11+
12+
regions = [
13+
"ap-northeast-1",
14+
"ap-northeast-2",
15+
"ap-south-1",
16+
"ap-southeast-1",
17+
"ap-southeast-2",
18+
"ca-central-1",
19+
"eu-central-1",
20+
"eu-north-1",
21+
"eu-west-1",
22+
"eu-west-2",
23+
"eu-west-3",
24+
"sa-east-1",
25+
"us-east-1",
26+
"us-east-2",
27+
"us-west-1",
28+
"us-west-2",
29+
]
30+
31+
# create zip with requirements.txt dependencies
32+
if run_req_build:
33+
os.system("rmdir /S/Q python")
34+
os.system("mkdir python\lib\python3.7\site-packages")
35+
os.system(
36+
'docker run -v "%cd%":/var/task "lambci/lambda:build-python3.7" /bin/sh -c "pip install -r requirements.txt -t python/lib/python3.7/site-packages/; exit"'
37+
)
38+
os.system("rmdir /S/Q python\\lib\\python3.7\\site-packages\\botocore")
39+
os.system("zip -r dashboard-writer.zip python")
40+
41+
# for each region, publish AWS layer with build zip
42+
region_arn_list = []
43+
44+
print("we get here")
45+
46+
for region in regions:
47+
print(f"executing region {region}")
48+
49+
# create the layers
50+
arn_output = subprocess.check_output(
51+
f'aws lambda publish-layer-version --region {region} --layer-name {layer_name} --description "{layer_description}" --cli-connect-timeout 6000 --license-info "MIT" --zip-file "fileb://dashboard-writer.zip" --compatible-runtimes python3.7',
52+
shell=True,
53+
).decode("utf-8")
54+
55+
print("We build the layer", arn_output)
56+
57+
version = int(json.loads(arn_output)["Version"])
58+
59+
# make them public
60+
make_public = subprocess.check_output(
61+
f"aws lambda add-layer-version-permission --layer-name {layer_name} --version-number {version} --statement-id allAccountsExample --principal * --action lambda:GetLayerVersion --region {region}",
62+
shell=True,
63+
)
64+
65+
print("Build layer:", arn_output)
66+
67+
print("Make public:", make_public)
68+
69+
arn = str(json.loads(arn_output)["LayerVersionArn"])
70+
region_arn = f"{region},{arn}\n"
71+
region_arn_list.append(region_arn)
72+
73+
# write data to CSV
74+
output_file = open(csv_path, "w")
75+
for region_arn in region_arn_list:
76+
output_file.write(region_arn)
77+
78+
output_file.close()
79+
80+
print(f"Completed writing {len(region_arn_list)} out of {len(regions)} to CSV {csv_path}")

dashboard-writer/inputs.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,9 @@
1111
# Syntax: Local: ["folder/device_id"] | S3: ["bucket/device_id"]
1212
devices = ["LOG/958D2219"]
1313

14+
# if you're using data encryption, you can pass the password below
15+
pw = {"default": "password"}
16+
1417

1518
# -----------------------------------------------
1619
# specify DBC paths and a list of signals to process ([]: include all signals)

dashboard-writer/main.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,15 @@
77
start_times = influx.get_start_times(inp.devices, inp.default_start, inp.dynamic)
88

99
# setup filesystem (local/S3), load DBC files and list log files for processing
10-
fs = setup_fs(inp.s3, inp.key, inp.secret, inp.endpoint)
10+
fs = setup_fs(inp.s3, inp.key, inp.secret, inp.endpoint, inp.pw)
1111
db_list = load_dbc_files(inp.dbc_paths)
12-
log_files = list_log_files(fs, inp.devices, start_times)
12+
log_files = list_log_files(fs, inp.devices, start_times, inp.pw)
1313

1414
# process log files and write extracted signals to InfluxDB
1515
proc = ProcessData(fs, db_list, inp.signals)
1616

1717
for log_file in log_files:
18-
df_raw, device_id = proc.get_raw_data(log_file)
18+
df_raw, device_id = proc.get_raw_data(log_file, inp.pw)
1919
df_phys = proc.extract_phys(df_raw)
2020

2121
proc.print_log_summary(device_id, log_file, df_phys)

dashboard-writer/utils.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
def setup_fs(s3, key="", secret="", endpoint="", cert=""):
1+
def setup_fs(s3, key="", secret="", endpoint="", cert="", passwords={}):
22
"""Given a boolean specifying whether to use local disk or S3, setup filesystem
33
Syntax examples: AWS (http://s3.us-east-2.amazonaws.com), MinIO (http://192.168.0.1:9000)
44
The cert input is relevant if you're using MinIO with TLS enabled, for specifying the path to the certficiate.
@@ -27,7 +27,7 @@ def setup_fs(s3, key="", secret="", endpoint="", cert=""):
2727
import canedge_browser
2828

2929
base_path = Path(__file__).parent
30-
fs = canedge_browser.LocalFileSystem(base_path=base_path)
30+
fs = canedge_browser.LocalFileSystem(base_path=base_path, passwords=passwords)
3131

3232
return fs
3333

@@ -48,7 +48,7 @@ def load_dbc_files(dbc_paths):
4848

4949

5050
# -----------------------------------------------
51-
def list_log_files(fs, devices, start_times, verbose=True):
51+
def list_log_files(fs, devices, start_times, verbose=True, passwords={}):
5252
"""Given a list of device paths, list log files from specified filesystem.
5353
Data is loaded based on the list of start datetimes
5454
"""
@@ -59,7 +59,7 @@ def list_log_files(fs, devices, start_times, verbose=True):
5959
if len(start_times):
6060
for idx, device in enumerate(devices):
6161
start = start_times[idx]
62-
log_files_device = canedge_browser.get_log_files(fs, [device], start_date=start)
62+
log_files_device = canedge_browser.get_log_files(fs, [device], start_date=start, passwords=passwords)
6363
log_files.extend(log_files_device)
6464

6565
if verbose:
@@ -180,14 +180,14 @@ def filter_signals(self, df_phys):
180180

181181
return df_phys
182182

183-
def get_raw_data(self, log_file, lin=False):
183+
def get_raw_data(self, log_file, lin=False, passwords={}):
184184
"""Extract a df of raw data and device ID from log file.
185185
Optionally include LIN bus data by setting lin=True
186186
"""
187187
import mdf_iter
188188

189189
with self.fs.open(log_file, "rb") as handle:
190-
mdf_file = mdf_iter.MdfFile(handle)
190+
mdf_file = mdf_iter.MdfFile(handle, passwords=passwords)
191191
device_id = self.get_device_id(mdf_file)
192192

193193
if lin:

requirements.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ attrs==21.2.0
66
bitstruct==8.11.1
77
botocore==1.20.49
88
can-decoder>=0.1.3
9-
canedge-browser>=0.0.7
9+
canedge-browser>=0.0.8
1010
canmatrix==0.9.1
1111
certifi==2021.5.30
1212
chardet==4.0.0
@@ -19,7 +19,7 @@ importlib-metadata==4.5.0
1919
influxdb-client==1.10.0
2020
J1939-PGN==0.4
2121
jmespath==0.10.0
22-
mdf-iter>=0.0.4
22+
mdf-iter>=0.0.6
2323
multidict==5.1.0
2424
numpy==1.20.3
2525
pandas==1.2.4

0 commit comments

Comments
 (0)