Skip to content

Commit 155fc18

Browse files
author
ChengyuZhu6
committed
security-fix: resolve security issue with subprocess call using shell=True
- Issue: Fixed [B602:subprocess_popen_with_shell_equals_true] identified by Bandit, which flagged the use of `subprocess.Popen` with `shell=True` as a high-severity security risk (CWE-78: OS Command Injection). - Ensures that the command is executed more securely without exposing it to shell injection vulnerabilities. Signed-off-by: ChengyuZhu6 <[email protected]>
1 parent 5ec4f88 commit 155fc18

File tree

17 files changed

+143
-102
lines changed

17 files changed

+143
-102
lines changed

benchmarks/auto_benchmark.py

+6-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import argparse
22
import datetime
33
import os
4+
import shlex
45
import shutil
56
from subprocess import Popen
67

@@ -259,9 +260,13 @@ def clean_up_benchmark_env(bm_config):
259260

260261
def execute(command, wait=False, stdout=None, stderr=None, shell=True):
261262
print("execute: {}".format(command))
263+
264+
# Split the command into a list of arguments
265+
if isinstance(command, str):
266+
command = shlex.split(command)
267+
262268
cmd = Popen(
263269
command,
264-
shell=shell,
265270
close_fds=True,
266271
stdout=stdout,
267272
stderr=stderr,

benchmarks/utils/common.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,16 @@
11
import os
2+
import shlex
23
from subprocess import Popen
34

45

5-
def execute(command, wait=False, stdout=None, stderr=None, shell=True):
6+
def execute(command, wait=False, stdout=None, stderr=None):
67
print(command)
8+
# Split the command into a list of arguments
9+
if isinstance(command, str):
10+
command = shlex.split(command)
11+
712
cmd = Popen(
813
command,
9-
shell=shell,
1014
close_fds=True,
1115
stdout=stdout,
1216
stderr=stderr,

benchmarks/windows_install_dependencies.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,14 @@
55
import subprocess
66
import locale
77
import shutil
8+
import shlex
89
import argparse
910

1011
def run(command):
1112
"""Returns (return-code, stdout, stderr)"""
12-
p = subprocess.Popen(command, stdout=subprocess.PIPE,
13-
stderr=subprocess.PIPE, shell=True)
13+
if isinstance(command, str):
14+
command = shlex.split(command)
15+
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1416
raw_output, raw_err = p.communicate()
1517
rc = p.returncode
1618
enc = locale.getpreferredencoding()

binaries/s3_binary_upload.py

+4-3
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import glob
33
import logging
44
import os
5+
import shlex
56
import subprocess
67
import sys
78

@@ -39,10 +40,10 @@ def s3_upload_local_folder(self, local_folder_path: str):
3940
"""
4041
LOGGER.info(f"Uploading *.whl files from folder: {local_folder_path}")
4142
s3_command = f"{self.s3_command} --exclude '*' --include '*.whl' {local_folder_path} {self.s3_bucket.rstrip('/')}/whl/{self.channel}"
42-
43+
s3_command = shlex.split(s3_command)
4344
try:
44-
ret_code = subprocess.run(
45-
s3_command, check=True, stdout=subprocess.PIPE, universal_newlines=True, shell=True
45+
subprocess.run(
46+
s3_command, check=True, stdout=subprocess.PIPE, universal_newlines=True
4647
)
4748
except subprocess.CalledProcessError as e:
4849
LOGGER.info(f"S3 upload command failed: {s3_command}. Exception: {e}")

examples/LLM/llama/chat_app/docker/torchserve_server_app.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,9 @@
1515

1616

1717
def start_server():
18+
commands = ["torchserve", "--start", "--ts-config", "/home/model-server/config.properties"]
1819
subprocess.run(
19-
["torchserve --start --ts-config /home/model-server/config.properties"],
20-
shell=True,
20+
commands,
2121
check=True,
2222
)
2323
while True:

examples/intel_extension_for_pytorch/intel_gpu.py

+5-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import csv
22
import logging
3+
import shlex
34
import subprocess
45
from io import StringIO
56

@@ -11,8 +12,11 @@
1112

1213
def check_cmd(cmd):
1314
out = None
15+
# Split the command into a list of arguments
16+
if isinstance(command, str):
17+
command = shlex.split(command)
1418
try:
15-
out = subprocess.check_output(cmd, shell=True, timeout=5, text=True)
19+
out = subprocess.check_output(cmd, timeout=5, text=True)
1620
except subprocess.TimeoutExpired:
1721
logging.error("Timeout running %s", cmd)
1822
except FileNotFoundError:

examples/text_classification/run_script.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -3,5 +3,5 @@
33

44
os.makedirs(".data",exist_ok=True)
55

6-
cmd="python train.py AG_NEWS --device cpu --save-model-path model.pt --dictionary source_vocab.pt"
7-
subprocess.run(cmd, shell=True,check=True)
6+
cmd = ["python", "train.py", "AG_NEWS", "--device", "cpu", "--save-model-path", "model.pt", "--dictionary", "source_vocab.pt"]
7+
subprocess.run(cmd, check=True)

setup.py

+8-10
Original file line numberDiff line numberDiff line change
@@ -33,14 +33,14 @@
3333
pkgs = find_packages(exclude=["ts_scripts", "test"])
3434

3535
build_frontend_command = {
36-
"Windows": ".\\frontend\\gradlew.bat -p frontend clean assemble",
37-
"Darwin": "frontend/gradlew -p frontend clean assemble",
38-
"Linux": "frontend/gradlew -p frontend clean assemble",
36+
"Windows": [".\\frontend\\gradlew.bat", "-p", "frontend", "clean", "assemble"],
37+
"Darwin": ["frontend/gradlew", "-p", "frontend", "clean", "assemble"],
38+
"Linux": ["frontend/gradlew", "-p", "frontend", "clean", "assemble"],
3939
}
4040
build_plugins_command = {
41-
"Windows": ".\\plugins\\gradlew.bat -p plugins clean bS",
42-
"Darwin": "plugins/gradlew -p plugins clean bS",
43-
"Linux": "plugins/gradlew -p plugins clean bS",
41+
"Windows": [".\\plugins\\gradlew.bat", "-p", "plugins", "clean", "bS"],
42+
"Darwin": ["plugins/gradlew", "-p", "plugins", "clean", "bS"],
43+
"Linux": ["plugins/gradlew", "-p", "plugins", "clean", "bS"],
4444
}
4545

4646

@@ -94,7 +94,7 @@ def run(self):
9494
os.remove(self.source_server_file)
9595

9696
try:
97-
subprocess.check_call(build_frontend_command[platform.system()], shell=True)
97+
subprocess.check_call(build_frontend_command[platform.system()])
9898
except OSError:
9999
assert 0, "build failed"
100100
copy2(self.source_server_file, self.dest_file_name)
@@ -131,9 +131,7 @@ def run(self):
131131

132132
try:
133133
if self.plugins == "endpoints":
134-
subprocess.check_call(
135-
build_plugins_command[platform.system()], shell=True
136-
)
134+
subprocess.check_call(build_plugins_command[platform.system()])
137135
else:
138136
raise OSError("No such rule exists")
139137
except OSError:

test/pytest/test_benchmark.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -40,10 +40,10 @@ def test_benchmark_e2e(backend):
4040
os.chdir(REPO_ROOT_DIR / "benchmarks")
4141

4242
cmd = subprocess.Popen(
43-
f"{sys.executable} ./benchmark-ab.py --concurrency 1 --requests 10 -bb {backend} --generate_graphs True",
44-
shell=True,
43+
[sys.executable, "./benchmark-ab.py", "--concurrency", "1", "--requests", "10", "-bb", backend, "--generate_graphs", "True"],
4544
stdout=subprocess.PIPE,
4645
)
46+
4747
output_lines = list(cmd.stdout)
4848

4949
assert output_lines[-1].decode("utf-8") == "Test suite execution complete.\n"

test/pytest/test_example_dcgan.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,7 @@ def teardown_module():
4242
def create_example_mar():
4343
# Create only if not already present
4444
if not os.path.exists(DCGAN_MAR_FILE):
45-
create_mar_cmd = "cd " + DCGAN_EXAMPLE_DIR + ";./create_mar.sh"
46-
subprocess.check_call(create_mar_cmd, shell=True)
45+
subprocess.check_call(["./create_mar.sh"], cwd=DCGAN_EXAMPLE_DIR)
4746

4847

4948
def delete_example_mar():

test/pytest/test_torch_compile.py

+46-38
Original file line numberDiff line numberDiff line change
@@ -68,27 +68,39 @@ def chdir_example(monkeypatch):
6868
@pytest.mark.skipif(PT_2_AVAILABLE == False, reason="torch version is < 2.0.0")
6969
class TestTorchCompile:
7070
def teardown_class(self):
71-
subprocess.run("torchserve --stop", shell=True, check=True)
71+
subprocess.run(["torchserve", "--stop"], check=True)
7272
time.sleep(10)
7373

7474
def test_archive_model_artifacts(self):
7575
assert len(glob.glob(MODEL_FILE)) == 1
7676
assert len(glob.glob(YAML_CONFIG_STR)) == 1
7777
assert len(glob.glob(YAML_CONFIG_DICT)) == 1
78-
subprocess.run(f"cd {TEST_DATA_DIR} && python model.py", shell=True, check=True)
79-
subprocess.run(f"mkdir -p {MODEL_STORE_DIR}", shell=True, check=True)
78+
subprocess.run(["python", "model.py"], cwd=TEST_DATA_DIR, check=True)
79+
os.makedirs(MODEL_STORE_DIR, exist_ok=True)
8080

8181
# register 2 models, one with the backend as str config, the other with the kwargs as dict config
82-
subprocess.run(
83-
f"torch-model-archiver --model-name {MODEL_NAME}_str --version 1.0 --model-file {MODEL_FILE} --serialized-file {SERIALIZED_FILE} --config-file {YAML_CONFIG_STR} --export-path {MODEL_STORE_DIR} --handler {HANDLER_FILE} -f",
84-
shell=True,
85-
check=True,
86-
)
87-
subprocess.run(
88-
f"torch-model-archiver --model-name {MODEL_NAME}_dict --version 1.0 --model-file {MODEL_FILE} --serialized-file {SERIALIZED_FILE} --config-file {YAML_CONFIG_DICT} --export-path {MODEL_STORE_DIR} --handler {HANDLER_FILE} -f",
89-
shell=True,
90-
check=True,
91-
)
82+
subprocess.run([
83+
"torch-model-archiver",
84+
"--model-name", f"{MODEL_NAME}_str",
85+
"--version", "1.0",
86+
"--model-file", MODEL_FILE,
87+
"--serialized-file", SERIALIZED_FILE,
88+
"--config-file", YAML_CONFIG_STR,
89+
"--export-path", MODEL_STORE_DIR,
90+
"--handler", HANDLER_FILE,
91+
"-f"
92+
], check=True)
93+
subprocess.run([
94+
"torch-model-archiver",
95+
"--model-name", f"{MODEL_NAME}_dict",
96+
"--version", "1.0",
97+
"--model-file", MODEL_FILE,
98+
"--serialized-file", SERIALIZED_FILE,
99+
"--config-file", YAML_CONFIG_DICT,
100+
"--export-path", MODEL_STORE_DIR,
101+
"--handler", HANDLER_FILE,
102+
"-f"
103+
], check=True)
92104
assert len(glob.glob(SERIALIZED_FILE)) == 1
93105
assert (
94106
len(glob.glob(os.path.join(MODEL_STORE_DIR, f"{MODEL_NAME}_str.mar"))) == 1
@@ -98,12 +110,16 @@ def test_archive_model_artifacts(self):
98110
)
99111

100112
def test_start_torchserve(self):
101-
cmd = f"torchserve --start --ncs --models {MODEL_NAME}_str.mar,{MODEL_NAME}_dict.mar --model-store {MODEL_STORE_DIR} --enable-model-api --disable-token-auth"
102-
subprocess.run(
103-
cmd,
104-
shell=True,
105-
check=True,
106-
)
113+
command = [
114+
"torchserve",
115+
"--start",
116+
"--ncs",
117+
"--models", f"{MODEL_NAME}_str.mar,{MODEL_NAME}_dict.mar",
118+
"--model-store", MODEL_STORE_DIR,
119+
"--enable-model-api",
120+
"--disable-token-auth"
121+
]
122+
subprocess.run(command, check=True)
107123
time.sleep(10)
108124
assert len(glob.glob("logs/access_log.log")) == 1
109125
assert len(glob.glob("logs/model_log.log")) == 1
@@ -114,12 +130,7 @@ def test_start_torchserve(self):
114130
reason="Test to be run outside docker",
115131
)
116132
def test_server_status(self):
117-
result = subprocess.run(
118-
"curl http://localhost:8080/ping",
119-
shell=True,
120-
capture_output=True,
121-
check=True,
122-
)
133+
result = subprocess.run(["curl", "http://localhost:8080/ping"], capture_output=True, check=True)
123134
expected_server_status_str = '{"status": "Healthy"}'
124135
expected_server_status = json.loads(expected_server_status_str)
125136
assert json.loads(result.stdout) == expected_server_status
@@ -129,12 +140,7 @@ def test_server_status(self):
129140
reason="Test to be run outside docker",
130141
)
131142
def test_registered_model(self):
132-
result = subprocess.run(
133-
"curl http://localhost:8081/models",
134-
shell=True,
135-
capture_output=True,
136-
check=True,
137-
)
143+
result = subprocess.run(["curl", "http://localhost:8081/models"], capture_output=True, check=True)
138144

139145
def _response_to_tuples(response_str):
140146
models = json.loads(response_str)["models"]
@@ -155,13 +161,15 @@ def test_serve_inference(self):
155161
request_json = json.dumps(request_data)
156162

157163
for model_name in [f"{MODEL_NAME}_str", f"{MODEL_NAME}_dict"]:
158-
result = subprocess.run(
159-
f"curl -s -X POST -H \"Content-Type: application/json;\" http://localhost:8080/predictions/{model_name} -d '{request_json}'",
160-
shell=True,
161-
capture_output=True,
162-
check=True,
163-
)
164-
164+
command = [
165+
"curl",
166+
"-s",
167+
"-X", "POST",
168+
"-H", "Content-Type: application/json",
169+
f"http://localhost:8080/predictions/{model_name}",
170+
"-d", request_json
171+
]
172+
result = subprocess.run(command, capture_output=True, check=True)
165173
string_result = result.stdout.decode("utf-8")
166174
float_result = float(string_result)
167175
expected_result = 3.5

0 commit comments

Comments
 (0)