Skip to content
This repository was archived by the owner on Jun 6, 2021. It is now read-only.

multithread decompiling #6

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions Utility/helper_thread.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from concurrent.futures import ThreadPoolExecutor, as_completed

def _default_multithread_run_callback(name, result):
print(f'{name}: {result}')

def multithread_run(fn, callback=_default_multithread_run_callback, args_list=()):
"""run functions concurrently, call callback with (name, result) on every task done.

"""
with ThreadPoolExecutor(max_workers=5) as executor:
# Start the load operations and mark each future with its URL
future_tasks = {executor.submit(fn, *args, **kwargs): name for name, args, kwargs in args_list}
for future in as_completed(future_tasks):
name = future_tasks[future]
try:
result = future.result()
callback(name, result)
except Exception as exc:
print(f'multithread_run {fn.__name__} {name} failed: \n{exc}')
56 changes: 31 additions & 25 deletions Utility/helpers_decompile.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from Utility.helpers_path import replace_extension, get_rel_path, get_file_stem, ensure_path_created
from Utility.helpers_package import install_package, exec_package
from Utility.helpers_time import get_time, get_time_str, get_minutes
from Utility.helper_thread import multithread_run

# Globals
script_package_types = ['*.zip', '*.ts4script']
Expand Down Expand Up @@ -77,6 +78,31 @@ def decompile_dir(src_dir: str, dest_dir: str, filename: str) -> None:
fail_count = 0
count = 0

def on_decompile_done(name, success):
# Print progress
# Prints a single dot on the same line which gives a nice clean progress report
# Tally number of files and successful / failed files
nonlocal count, col_count, suc_count, fail_count
global total_suc_count, total_fail_count, total_count
if success:
print(".", end="")
suc_count += 1
total_suc_count += 1
else:
print("x", end="")
fail_count += 1
total_fail_count += 1

count += 1
total_count += 1

# Insert a new progress line every 80 characters
col_count += 1
if col_count >= 80:
col_count = 0
print("")

args_list = []
# Go through each compiled python file in the folder
for root, dirs, files in os.walk(src_dir):
for filename in fnmatch.filter(files, python_compiled_ext):
Expand All @@ -92,31 +118,11 @@ def decompile_dir(src_dir: str, dest_dir: str, filename: str) -> None:
# Make sure to strip off the file name at the end
ensure_path_created(str(Path(dest_file_path).parent))

# Decompile it to destination
success = exec_package("uncompyle6",
"-o " + '"' + dest_file_path + '"' + " " +
'"' + src_file_path + '"')

# Print progress
# Prints a single dot on the same line which gives a nice clean progress report
# Tally number of files and successful / failed files
if success:
print(".", end="")
suc_count += 1
total_suc_count += 1
else:
print("x", end="")
fail_count += 1
total_fail_count += 1

count += 1
total_count += 1

# Insert a new progress line every 80 characters
col_count += 1
if col_count >= 80:
col_count = 0
print("")
# pack args to run
args_list.append((src_file_path, ["uncompyle6", f'-o "{dest_file_path}" "{src_file_path}"' ], {}))

# Decompile it to destination
multithread_run(exec_package, on_decompile_done, args_list)

time_end = get_time()
elapsed_minutes = get_minutes(time_end, time_start)
Expand Down