Skip to content

Commit

Permalink
security-fix: resolve security issue with subprocess call using shell…
Browse files Browse the repository at this point in the history
…=True

- Issue: Fixed [B602:subprocess_popen_with_shell_equals_true] identified by Bandit,
  which flagged the use of `subprocess.Popen` with `shell=True` as a high-severity security risk (CWE-78: OS Command Injection).
- Ensures that the command is executed more securely without exposing it to shell injection vulnerabilities.

Signed-off-by: ChengyuZhu6 <[email protected]>
  • Loading branch information
ChengyuZhu6 committed Oct 7, 2024
1 parent 15f0300 commit e7a26d4
Show file tree
Hide file tree
Showing 17 changed files with 143 additions and 102 deletions.
7 changes: 6 additions & 1 deletion benchmarks/auto_benchmark.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import argparse
import datetime
import os
import shlex
import shutil
from subprocess import Popen

Expand Down Expand Up @@ -259,9 +260,13 @@ def clean_up_benchmark_env(bm_config):

def execute(command, wait=False, stdout=None, stderr=None, shell=True):
print("execute: {}".format(command))

# Split the command into a list of arguments
if isinstance(command, str):
command = shlex.split(command)

cmd = Popen(
command,
shell=shell,
close_fds=True,
stdout=stdout,
stderr=stderr,
Expand Down
8 changes: 6 additions & 2 deletions benchmarks/utils/common.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
import os
import shlex
from subprocess import Popen


def execute(command, wait=False, stdout=None, stderr=None, shell=True):
def execute(command, wait=False, stdout=None, stderr=None):
print(command)
# Split the command into a list of arguments
if isinstance(command, str):
command = shlex.split(command)

cmd = Popen(
command,
shell=shell,
close_fds=True,
stdout=stdout,
stderr=stderr,
Expand Down
6 changes: 4 additions & 2 deletions benchmarks/windows_install_dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,14 @@
import subprocess
import locale
import shutil
import shlex
import argparse

def run(command):
"""Returns (return-code, stdout, stderr)"""
p = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=True)
if isinstance(command, str):
command = shlex.split(command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
raw_output, raw_err = p.communicate()
rc = p.returncode
enc = locale.getpreferredencoding()
Expand Down
7 changes: 4 additions & 3 deletions binaries/s3_binary_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import glob
import logging
import os
import shlex
import subprocess
import sys

Expand Down Expand Up @@ -39,10 +40,10 @@ def s3_upload_local_folder(self, local_folder_path: str):
"""
LOGGER.info(f"Uploading *.whl files from folder: {local_folder_path}")
s3_command = f"{self.s3_command} --exclude '*' --include '*.whl' {local_folder_path} {self.s3_bucket.rstrip('/')}/whl/{self.channel}"

s3_command = shlex.split(s3_command)
try:
ret_code = subprocess.run(
s3_command, check=True, stdout=subprocess.PIPE, universal_newlines=True, shell=True
subprocess.run(
s3_command, check=True, stdout=subprocess.PIPE, universal_newlines=True
)
except subprocess.CalledProcessError as e:
LOGGER.info(f"S3 upload command failed: {s3_command}. Exception: {e}")
Expand Down
4 changes: 2 additions & 2 deletions examples/LLM/llama/chat_app/docker/torchserve_server_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@


def start_server():
commands = ["torchserve", "--start", "--ts-config", "/home/model-server/config.properties"]
subprocess.run(
["torchserve --start --ts-config /home/model-server/config.properties"],
shell=True,
commands,
check=True,
)
while True:
Expand Down
6 changes: 5 additions & 1 deletion examples/intel_extension_for_pytorch/intel_gpu.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import csv
import logging
import shlex
import subprocess
from io import StringIO

Expand All @@ -11,8 +12,11 @@

def check_cmd(cmd):
out = None
# Split the command into a list of arguments
if isinstance(command, str):
command = shlex.split(command)
try:
out = subprocess.check_output(cmd, shell=True, timeout=5, text=True)
out = subprocess.check_output(cmd, timeout=5, text=True)
except subprocess.TimeoutExpired:
logging.error("Timeout running %s", cmd)
except FileNotFoundError:
Expand Down
4 changes: 2 additions & 2 deletions examples/text_classification/run_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,5 @@

os.makedirs(".data",exist_ok=True)

cmd="python train.py AG_NEWS --device cpu --save-model-path model.pt --dictionary source_vocab.pt"
subprocess.run(cmd, shell=True,check=True)
cmd = ["python", "train.py", "AG_NEWS", "--device", "cpu", "--save-model-path", "model.pt", "--dictionary", "source_vocab.pt"]
subprocess.run(cmd, check=True)
18 changes: 8 additions & 10 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,14 @@
pkgs = find_packages(exclude=["ts_scripts", "test"])

build_frontend_command = {
"Windows": ".\\frontend\\gradlew.bat -p frontend clean assemble",
"Darwin": "frontend/gradlew -p frontend clean assemble",
"Linux": "frontend/gradlew -p frontend clean assemble",
"Windows": [".\\frontend\\gradlew.bat", "-p", "frontend", "clean", "assemble"],
"Darwin": ["frontend/gradlew", "-p", "frontend", "clean", "assemble"],
"Linux": ["frontend/gradlew", "-p", "frontend", "clean", "assemble"],
}
build_plugins_command = {
"Windows": ".\\plugins\\gradlew.bat -p plugins clean bS",
"Darwin": "plugins/gradlew -p plugins clean bS",
"Linux": "plugins/gradlew -p plugins clean bS",
"Windows": [".\\plugins\\gradlew.bat", "-p", "plugins", "clean", "bS"],
"Darwin": ["plugins/gradlew", "-p", "plugins", "clean", "bS"],
"Linux": ["plugins/gradlew", "-p", "plugins", "clean", "bS"],
}


Expand Down Expand Up @@ -94,7 +94,7 @@ def run(self):
os.remove(self.source_server_file)

try:
subprocess.check_call(build_frontend_command[platform.system()], shell=True)
subprocess.check_call(build_frontend_command[platform.system()])
except OSError:
assert 0, "build failed"
copy2(self.source_server_file, self.dest_file_name)
Expand Down Expand Up @@ -131,9 +131,7 @@ def run(self):

try:
if self.plugins == "endpoints":
subprocess.check_call(
build_plugins_command[platform.system()], shell=True
)
subprocess.check_call(build_plugins_command[platform.system()])
else:
raise OSError("No such rule exists")
except OSError:
Expand Down
4 changes: 2 additions & 2 deletions test/pytest/test_benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,10 @@ def test_benchmark_e2e(backend):
os.chdir(REPO_ROOT_DIR / "benchmarks")

cmd = subprocess.Popen(
f"{sys.executable} ./benchmark-ab.py --concurrency 1 --requests 10 -bb {backend} --generate_graphs True",
shell=True,
[sys.executable, "./benchmark-ab.py", "--concurrency", "1", "--requests", "10", "-bb", backend, "--generate_graphs", "True"],
stdout=subprocess.PIPE,
)

output_lines = list(cmd.stdout)

assert output_lines[-1].decode("utf-8") == "Test suite execution complete.\n"
Expand Down
3 changes: 1 addition & 2 deletions test/pytest/test_example_dcgan.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,7 @@ def teardown_module():
def create_example_mar():
# Create only if not already present
if not os.path.exists(DCGAN_MAR_FILE):
create_mar_cmd = "cd " + DCGAN_EXAMPLE_DIR + ";./create_mar.sh"
subprocess.check_call(create_mar_cmd, shell=True)
subprocess.check_call(["./create_mar.sh"], cwd=DCGAN_EXAMPLE_DIR)


def delete_example_mar():
Expand Down
84 changes: 46 additions & 38 deletions test/pytest/test_torch_compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,27 +68,39 @@ def chdir_example(monkeypatch):
@pytest.mark.skipif(PT_2_AVAILABLE == False, reason="torch version is < 2.0.0")
class TestTorchCompile:
def teardown_class(self):
subprocess.run("torchserve --stop", shell=True, check=True)
subprocess.run(["torchserve", "--stop"], check=True)
time.sleep(10)

def test_archive_model_artifacts(self):
assert len(glob.glob(MODEL_FILE)) == 1
assert len(glob.glob(YAML_CONFIG_STR)) == 1
assert len(glob.glob(YAML_CONFIG_DICT)) == 1
subprocess.run(f"cd {TEST_DATA_DIR} && python model.py", shell=True, check=True)
subprocess.run(f"mkdir -p {MODEL_STORE_DIR}", shell=True, check=True)
subprocess.run(["python", "model.py"], cwd=TEST_DATA_DIR, check=True)
os.makedirs(MODEL_STORE_DIR, exist_ok=True)

# register 2 models, one with the backend as str config, the other with the kwargs as dict config
subprocess.run(
f"torch-model-archiver --model-name {MODEL_NAME}_str --version 1.0 --model-file {MODEL_FILE} --serialized-file {SERIALIZED_FILE} --config-file {YAML_CONFIG_STR} --export-path {MODEL_STORE_DIR} --handler {HANDLER_FILE} -f",
shell=True,
check=True,
)
subprocess.run(
f"torch-model-archiver --model-name {MODEL_NAME}_dict --version 1.0 --model-file {MODEL_FILE} --serialized-file {SERIALIZED_FILE} --config-file {YAML_CONFIG_DICT} --export-path {MODEL_STORE_DIR} --handler {HANDLER_FILE} -f",
shell=True,
check=True,
)
subprocess.run([
"torch-model-archiver",
"--model-name", f"{MODEL_NAME}_str",
"--version", "1.0",
"--model-file", MODEL_FILE,
"--serialized-file", SERIALIZED_FILE,
"--config-file", YAML_CONFIG_STR,
"--export-path", MODEL_STORE_DIR,
"--handler", HANDLER_FILE,
"-f"
], check=True)
subprocess.run([
"torch-model-archiver",
"--model-name", f"{MODEL_NAME}_dict",
"--version", "1.0",
"--model-file", MODEL_FILE,
"--serialized-file", SERIALIZED_FILE,
"--config-file", YAML_CONFIG_DICT,
"--export-path", MODEL_STORE_DIR,
"--handler", HANDLER_FILE,
"-f"
], check=True)
assert len(glob.glob(SERIALIZED_FILE)) == 1
assert (
len(glob.glob(os.path.join(MODEL_STORE_DIR, f"{MODEL_NAME}_str.mar"))) == 1
Expand All @@ -98,12 +110,16 @@ def test_archive_model_artifacts(self):
)

def test_start_torchserve(self):
cmd = f"torchserve --start --ncs --models {MODEL_NAME}_str.mar,{MODEL_NAME}_dict.mar --model-store {MODEL_STORE_DIR} --enable-model-api --disable-token-auth"
subprocess.run(
cmd,
shell=True,
check=True,
)
command = [
"torchserve",
"--start",
"--ncs",
"--models", f"{MODEL_NAME}_str.mar,{MODEL_NAME}_dict.mar",
"--model-store", MODEL_STORE_DIR,
"--enable-model-api",
"--disable-token-auth"
]
subprocess.run(command, check=True)
time.sleep(10)
assert len(glob.glob("logs/access_log.log")) == 1
assert len(glob.glob("logs/model_log.log")) == 1
Expand All @@ -114,12 +130,7 @@ def test_start_torchserve(self):
reason="Test to be run outside docker",
)
def test_server_status(self):
result = subprocess.run(
"curl http://localhost:8080/ping",
shell=True,
capture_output=True,
check=True,
)
result = subprocess.run(["curl", "http://localhost:8080/ping"], capture_output=True, check=True)
expected_server_status_str = '{"status": "Healthy"}'
expected_server_status = json.loads(expected_server_status_str)
assert json.loads(result.stdout) == expected_server_status
Expand All @@ -129,12 +140,7 @@ def test_server_status(self):
reason="Test to be run outside docker",
)
def test_registered_model(self):
result = subprocess.run(
"curl http://localhost:8081/models",
shell=True,
capture_output=True,
check=True,
)
result = subprocess.run(["curl", "http://localhost:8081/models"], capture_output=True, check=True)

def _response_to_tuples(response_str):
models = json.loads(response_str)["models"]
Expand All @@ -155,13 +161,15 @@ def test_serve_inference(self):
request_json = json.dumps(request_data)

for model_name in [f"{MODEL_NAME}_str", f"{MODEL_NAME}_dict"]:
result = subprocess.run(
f"curl -s -X POST -H \"Content-Type: application/json;\" http://localhost:8080/predictions/{model_name} -d '{request_json}'",
shell=True,
capture_output=True,
check=True,
)

command = [
"curl",
"-s",
"-X", "POST",
"-H", "Content-Type: application/json",
f"http://localhost:8080/predictions/{model_name}",
"-d", request_json
]
result = subprocess.run(command, capture_output=True, check=True)
string_result = result.stdout.decode("utf-8")
float_result = float(string_result)
expected_result = 3.5
Expand Down
Loading

0 comments on commit e7a26d4

Please sign in to comment.