@@ -136,8 +136,9 @@ def process_file(
136
136
137
137
process_result = _process_task (config , task )
138
138
139
- # ensure that the root extraction directory is created even for empty extractions
140
- extract_dir .mkdir (parents = True , exist_ok = True )
139
+ if not config .skip_extraction :
140
+ # ensure that the root extraction directory is created even for empty extractions
141
+ extract_dir .mkdir (parents = True , exist_ok = True )
141
142
142
143
if report_file :
143
144
write_json_report (report_file , process_result )
@@ -475,7 +476,7 @@ def __init__(
475
476
def process (self ):
476
477
logger .debug ("Processing file" , path = self .task .path , size = self .size )
477
478
478
- if self .carve_dir .exists ():
479
+ if not self . config . skip_extraction and self .carve_dir .exists ():
479
480
# Extraction directory is not supposed to exist, it is usually a simple mistake of running
480
481
# unblob again without cleaning up or using --force.
481
482
# It would cause problems continuing, as it would mix up original and extracted files,
@@ -515,6 +516,13 @@ def _process_chunks(
515
516
if unknown_chunks :
516
517
logger .warning ("Found unknown Chunks" , chunks = unknown_chunks )
517
518
519
+ if self .config .skip_extraction :
520
+ for chunk in unknown_chunks :
521
+ self .result .add_report (chunk .as_report (entropy = None ))
522
+ for chunk in outer_chunks :
523
+ self .result .add_report (chunk .as_report (extraction_reports = []))
524
+ return
525
+
518
526
for chunk in unknown_chunks :
519
527
carved_unknown_path = carve_unknown_chunk (self .carve_dir , file , chunk )
520
528
entropy = self ._calculate_entropy (carved_unknown_path )
0 commit comments