Skip to content

Commit 169e1f2

Browse files
authored
Updating -e option by adding new dump feature (#47)
* update -e option function * adding dump build configuration files by adding new bbclass * update bom.bbclass * fix error bom.json * fix error in bom_for_e.bbclass * Adapting review comments and fix build error * restore bom.bbclass * fix reviewed item and build error * fix build error in log statement * adding 'get' to set default value in pf
1 parent b262c36 commit 169e1f2

File tree

5 files changed

+290
-9
lines changed

5 files changed

+290
-9
lines changed
Lines changed: 149 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,149 @@
1+
# Copyright (c) 2020 LG Electronics, Inc.
2+
# SPDX-License-Identifier: Apache-2.0
3+
#
4+
# This class adds write_bom_info and write_abi_xml_data,
5+
# Each of them can be run by bitake --runall option.
6+
# They are useful to verify build output specification.
7+
8+
do_write_bom_info[nostamp] = "1"
9+
addtask write_bom_info
10+
python do_write_bom_info() {
11+
import json
12+
import time
13+
# We want one recipe per line, starting with arch and recipe keys,
14+
# so that it's easy to sort and compare them
15+
class BomJSONEncoder(json.JSONEncoder):
16+
def iterencode(self, obj, _one_shot=True):
17+
if isinstance(obj, dict):
18+
output = []
19+
if "arch" in obj.keys() and "recipe" in obj.keys():
20+
output.append(json.dumps("arch") + ": " + self.encode(obj["arch"]))
21+
output.append(json.dumps("recipe") + ": " + self.encode(obj["recipe"]))
22+
for key, value in sorted(obj.items()):
23+
if key == "arch" or key == "recipe":
24+
continue
25+
output.append(json.dumps(key) + ": " + self.encode(value))
26+
return "{" + ",".join(output) + "}"
27+
else:
28+
return json.JSONEncoder().iterencode(obj, _one_shot)
29+
30+
31+
jsondata = {}
32+
jsondata["src_path"] = d.getVar("S", True)
33+
jsondata["src_uri"] = d.getVar("SRC_URI", True)
34+
jsondata["srcrev"] = "".join(d.getVar("SRCREV", True).split())
35+
jsondata["recipe"] = d.getVar("PN", True)
36+
jsondata["file"] = d.getVar("FILE", True)[len(d.getVar("TOPDIR", True)):]
37+
jsondata["arch"] = d.getVar("PACKAGE_ARCH", True)
38+
jsondata["author"] = d.getVar("AUTHOR", True)
39+
license = d.getVar("LICENSE", True)
40+
license_flags = d.getVar("LICENSE_FLAGS", True)
41+
packages = d.getVar("PACKAGES", True)
42+
jsondata["license"] = license
43+
jsondata["license_flags"] = license_flags
44+
jsondata["complete"] = int(time.time())
45+
jsondata["packages"] = packages
46+
pkg_lic = {}
47+
if packages:
48+
for pkg in packages.split():
49+
lic = d.getVar("LICENSE_%s" % pkg, True)
50+
if lic and lic != license:
51+
pkg_lic[pkg] = lic
52+
jsondata["pkg_lic"] = pkg_lic
53+
jsondata["pe"] = d.getVar("PE", True)
54+
jsondata["pv"] = d.getVar("PV", True)
55+
jsondata["pr"] = d.getVar("PR", True)
56+
jsondata["pf"] = d.getVar("PF", True)
57+
jsondata["extendprauto"] = d.getVar("EXTENDPRAUTO", True)
58+
jsondata["extendpkgv"] = d.getVar("EXTENDPKGV", True)
59+
jsondata["description"] = d.getVar("DESCRIPTION", True)
60+
jsondata["summary"] = d.getVar("SUMMARY", True)
61+
jsondata["cve_check_whitelist "] = d.getVar("CVE_CHECK_WHITELIST", True)
62+
63+
cpe_ids = get_cpe_ids(d.getVar("CVE_VENDOR",""), d.getVar("CVE_PRODUCT",""), d.getVar("CVE_VERSION",""), jsondata["recipe"], jsondata["pv"])
64+
jsondata["source_info"] = cpe_ids
65+
66+
datafile = os.path.join(d.getVar("TOPDIR", True), "bom.json")
67+
lock = bb.utils.lockfile(datafile + '.lock')
68+
with open(datafile, "a") as f:
69+
json.dump(jsondata, f, sort_keys=True, cls=BomJSONEncoder)
70+
f.write(',\n')
71+
bb.utils.unlockfile(lock)
72+
}
73+
74+
75+
python do_dumptasks() {
76+
77+
#Dump BitBake tasks to ${TOPDIR}/dumped_tasks/${PF}.task_name.
78+
79+
import os
80+
import bb
81+
82+
ar_outdir = os.path.join(d.getVar('TOPDIR', True), "dumped_tasks") # 기본값 설정
83+
ar_dumptasks = ["do_configure", "do_compile"] # 기본값 설정
84+
pf = d.getVar('PF', True)
85+
86+
bb.utils.mkdirhier(ar_outdir)
87+
88+
for task in ar_dumptasks:
89+
# Do not export tasks that are set to do not run
90+
if d.getVarFlag(task, 'noexec') == '1':
91+
bb.warn('%s: skipping task %s: [noexec]' % (pf, task))
92+
continue
93+
94+
dumpfile = os.path.join(ar_outdir, '%s.%s' % (pf, task))
95+
bb.note('Dumping task %s into %s' % (task, dumpfile))
96+
97+
# We assume the task as a shell script and then check if it is
98+
# actually a Python script.
99+
emit_func = bb.data.emit_func
100+
if d.getVarFlag(task, 'python') == '1':
101+
emit_func = bb.data.emit_func_python
102+
103+
try:
104+
with open(dumpfile, 'w') as f:
105+
emit_func(task, f, d)
106+
except Exception as e:
107+
bb.fatal('%s: Cannot export %s: %s' % (pf, task, e))
108+
}
109+
110+
# do_dumptasks 작업을 빌드 순서에 포함시키기
111+
addtask do_dumptasks after do_configure before do_compile
112+
113+
114+
def get_cpe_ids(cve_vendor, cve_product, cve_version, pn, pv):
115+
116+
#Get list of CPE identifiers for the given product and version
117+
118+
if cve_vendor is None:
119+
cve_vendor = ""
120+
if cve_product is None:
121+
cve_product = ""
122+
if cve_version is None:
123+
cve_version = ""
124+
125+
version = cve_version.split("+git")[0]
126+
127+
if cve_version.startswith("$"):
128+
version = pv
129+
130+
cpe_ids = []
131+
for product in cve_product.split():
132+
# CVE_PRODUCT in recipes may include vendor information for CPE identifiers. If not,
133+
# use wildcard for vendor.
134+
if ":" in product:
135+
cve_vendor, product = product.split(":", 1)
136+
137+
if product.startswith("$"):
138+
product = pn
139+
140+
if cve_vendor is None:
141+
cve_vendor = ""
142+
143+
cpe_id = f'cpe:2.3:a:{cve_vendor}:{product}:{version}:*:*:*:*:*:*:*'
144+
cpe_ids.append(cpe_id)
145+
146+
return cpe_ids
147+
148+
149+

src/fosslight_yocto/_help.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
-n\t\t\t\t Print result in BIN(Yocto) format
2525
-s\t\t\t\t Analyze source code for unconfirmed Open Source
2626
-c\t\t\t\t Analyze all the source code
27-
-e\t\t\t\t Compress all the source code
27+
-e <path>\t\t\t Top build output path with bom.json to compress all the source code
2828
-o <path>\t\t\t Output Path
2929
-f <format>\t\t\t Output file format (excel, csv, opossum)
3030
-pr\t\t\t\t Print all data of bom.json"""

src/fosslight_yocto/_package_item.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,9 @@ def __init__(self):
4444
self.pr = ""
4545
self._yocto_recipe = []
4646
self._yocto_package = []
47+
self.source_done = "" # Save timestamp after source code fetch : Only for -e option
48+
self.full_src_uri = "" # List all src uri links : Only for -e option
49+
self.pf = "" # Package name + version value : Only for -e option
4750

4851
def __eq__(self, value):
4952
return self.spdx_id == value
@@ -297,6 +300,12 @@ def set_value_switch(oss, key, value, nested_pkg_name):
297300
oss.yocto_recipe = value
298301
elif key == 'additional_data':
299302
oss.additional_data = value
303+
elif key == 'source_done':
304+
oss.source_done = value
305+
elif key == 'full_src_uri':
306+
oss.full_src_uri = value
307+
elif key == 'package_format':
308+
oss.pf = value
300309

301310

302311
def update_package_name(oss, value, nested_pkg_name):

src/fosslight_yocto/_zip_source_works.py

Lines changed: 121 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,8 @@
2525
ZIP_FILE_EXTENSION = ".zip"
2626
EXCLUDE_FILE_EXTENSION = ['socket']
2727

28+
DUMP_DIR_PATH = "dumped_tasks"
29+
2830

2931
def is_exclude_file(file_abs_path):
3032
excluded = False
@@ -39,16 +41,119 @@ def is_exclude_file(file_abs_path):
3941
return excluded
4042

4143

42-
def zip_module(orig_path, desc_name):
44+
def join_source_path(build_output_path, bom_src_path):
45+
if bom_src_path == '':
46+
return ''
47+
leaf_folder = os.path.basename(os.path.normpath(build_output_path))
48+
split_path = bom_src_path.split(leaf_folder)
49+
if len(split_path) == 1:
50+
return bom_src_path
51+
join_path = os.path.join(build_output_path, split_path[1][1:])
52+
# join_path = join_path.replace('\\', '/')
53+
return join_path
54+
55+
56+
def check_valid_file_type(file_path, timestamp):
57+
validation = True
58+
if not os.path.isfile(file_path) or \
59+
os.path.islink(file_path) or \
60+
os.path.getsize(file_path) > 1024 * 1024 or \
61+
file_path.endswith('.cmd') or \
62+
file_path.endswith('.o'):
63+
validation = False
64+
65+
if validation:
66+
creation_time = os.path.getmtime(file_path)
67+
if creation_time > timestamp:
68+
validation = False
69+
70+
return validation
71+
72+
73+
def get_dump_files(oss_key, dump_dir):
74+
dump_file_list = os.listdir(dump_dir)
75+
found_list = []
76+
77+
logger.debug(f'Check dump oss : {oss_key}')
78+
79+
if oss_key == "":
80+
return found_list
81+
82+
if dump_file_list is None:
83+
print("no dump info")
84+
return found_list
85+
86+
for dump in dump_file_list:
87+
if dump.startswith(oss_key):
88+
print("found dump file")
89+
print(dump)
90+
found_list.append(dump)
91+
92+
return found_list
93+
94+
95+
def zip_module(orig_path, desc_name, build_output_dir, timestamp, full_src_uri, pf):
4396
FAILED_MSG_PREFIX = "Failed: " + desc_name + " " + orig_path
4497
success = True
4598
failed_msg = [FAILED_MSG_PREFIX]
4699
desc_name = desc_name.strip()
47100
zip_name = desc_name + ZIP_FILE_EXTENSION
101+
uri_path_list = []
102+
dumptasks_dir = os.path.join(build_output_dir, DUMP_DIR_PATH)
103+
oss_dump_list = get_dump_files(pf, dumptasks_dir)
104+
105+
uris = full_src_uri.split()
106+
107+
for uri in uris:
108+
if uri.startswith("file://"):
109+
src_uri_file = uri.split("file://")[1]
110+
uri_path = os.path.join(orig_path, src_uri_file)
111+
uri_path = join_source_path(build_output_dir, uri_path)
112+
logger.debug(f'uri full path : {uri_path}')
113+
uri_path_list.append(uri_path)
114+
115+
if len(uri_path_list) > 0:
116+
uri_path = uri_path_list[0]
117+
else:
118+
uri_path = None
119+
120+
orig_path = join_source_path(build_output_dir, orig_path)
121+
122+
if os.path.islink(orig_path):
123+
orig_path = os.path.realpath(orig_path)
124+
orig_path = join_source_path(build_output_dir, orig_path)
48125

49126
if desc_name == "":
50127
logger.debug("Recipe name is missing")
128+
elif uri_path is not None and os.path.exists(uri_path) and os.path.isfile(uri_path):
129+
130+
zip_object = zipfile.ZipFile(zip_name, "w", zipfile.ZIP_DEFLATED)
131+
for uri_path in uri_path_list:
132+
133+
try:
134+
abs_src = os.path.abspath(orig_path)
135+
abs_name = os.path.abspath(uri_path)
136+
des_path = os.path.join(source_desc_folder, zip_name)
137+
138+
relpath = os.path.relpath(abs_name, abs_src)
139+
zip_object.write(abs_name, relpath)
140+
except Exception as ex:
141+
success = False
142+
failed_msg.append(f'|--- {ex}')
143+
144+
try:
145+
for dump in oss_dump_list:
146+
dump_orig_path = os.path.join(dumptasks_dir, dump)
147+
zip_object.write(dump_orig_path, os.path.basename(dump_orig_path))
148+
149+
zip_object.close()
150+
shutil.move(zip_name, des_path)
151+
except Exception as ex:
152+
success = False
153+
failed_msg.append(f'|--- {ex}')
154+
51155
elif orig_path != "" and os.path.exists(orig_path):
156+
52157
abs_src = os.path.abspath(orig_path)
53158
des_path = os.path.join(source_desc_folder, zip_name)
54159
compress_file = []
@@ -59,6 +164,8 @@ def zip_module(orig_path, desc_name):
59164
abs_name = os.path.abspath(os.path.join(dir_name, filename))
60165
if is_exclude_file(abs_name):
61166
continue
167+
if not check_valid_file_type(abs_name, timestamp):
168+
continue
62169
if os.path.islink(abs_name):
63170
abs_name = os.readlink(abs_name)
64171
if not os.path.isfile(abs_name):
@@ -71,11 +178,16 @@ def zip_module(orig_path, desc_name):
71178
success = False
72179
failed_msg.append(f'|--- {ex}')
73180
try:
181+
for dump in oss_dump_list:
182+
dump_orig_path = os.path.join(dumptasks_dir, dump)
183+
zip_object.write(dump_orig_path, os.path.basename(dump_orig_path))
184+
74185
zip_object.close()
75186
shutil.move(zip_name, des_path)
76187
except Exception as ex:
77188
success = False
78189
failed_msg.append(f'|--- {ex}')
190+
79191
else:
80192
success = False
81193
failed_msg.append(f"|--- Can't find source path: {orig_path}")
@@ -114,7 +226,7 @@ def zip_compressed_source(output_dir="", total_list=[]):
114226
logger.info(f"\n* Final compressed file: {final_zip_file}")
115227

116228

117-
def collect_source(pkg_list: List[PackageItem], output_dir: str):
229+
def collect_source(pkg_list: List[PackageItem], output_dir: str, build_output_dir: str):
118230
global source_desc_folder
119231
if output_dir == "":
120232
output_dir = os.getcwd()
@@ -141,9 +253,14 @@ def collect_source(pkg_list: List[PackageItem], output_dir: str):
141253
src_uri = recipe_item.download_location
142254
base_path = recipe_item.file_path
143255

256+
full_uri = recipe_item.full_src_uri
257+
pf = recipe_item.pf
144258
# zip downloaded source codes and located to package_zip folders
145259
total_list.append(recipe_name + ZIP_FILE_EXTENSION)
146-
success, failed_msg = zip_module(recipe_item.src_path, recipe_name)
260+
source_timestamp = recipe_item.source_done
261+
zip_file_name = recipe_name + "_" + recipe_item.version
262+
263+
success, failed_msg = zip_module(recipe_item.src_path, zip_file_name, build_output_dir, source_timestamp, full_uri, pf)
147264
if success:
148265
success_list.append(recipe_name)
149266
else:
@@ -168,4 +285,4 @@ def collect_source(pkg_list: List[PackageItem], output_dir: str):
168285
write_txt_file(output_failed_txt, "\n".join(failed_list))
169286

170287
# zip package source codes
171-
zip_compressed_source(output_dir, total_list)
288+
# zip_compressed_source(output_dir, total_list)

0 commit comments

Comments
 (0)