diff --git a/classify/train.py b/classify/train.py index 4767be77bd61..8ae2fdd52828 100644 --- a/classify/train.py +++ b/classify/train.py @@ -78,7 +78,7 @@ def train(opt, device): LOGGER.info(f'\nDataset not found ⚠️, missing path {data_dir}, attempting download...') t = time.time() if str(data) == 'imagenet': - subprocess.run(f"bash {ROOT / 'data/scripts/get_imagenet.sh'}", shell=True, check=True) + subprocess.run(["bash", str(ROOT / 'data/scripts/get_imagenet.sh')], shell=True, check=True) else: url = f'https://github.com/ultralytics/yolov5/releases/download/v1.0/{data}.zip' download(url, dir=data_dir.parent) diff --git a/export.py b/export.py index 1bf0532dde34..2c9fb77d17be 100644 --- a/export.py +++ b/export.py @@ -194,8 +194,15 @@ def export_openvino(file, metadata, half, prefix=colorstr('OpenVINO:')): LOGGER.info(f'\n{prefix} starting export with openvino {ie.__version__}...') f = str(file).replace('.pt', f'_openvino_model{os.sep}') - cmd = f"mo --input_model {file.with_suffix('.onnx')} --output_dir {f} --data_type {'FP16' if half else 'FP32'}" - subprocess.run(cmd.split(), check=True, env=os.environ) # export + args = [ + "mo", + "--input_model", + str(file.with_suffix('.onnx')), + "--output_dir", + f, + "--data_type", + ("FP16" if half else "FP32"),] + subprocess.run(args, check=True, env=os.environ) # export yaml_save(Path(f) / file.with_suffix('.yaml').name, metadata) # add metadata.yaml return f, None @@ -420,8 +427,15 @@ def export_edgetpu(file, prefix=colorstr('Edge TPU:')): f = str(file).replace('.pt', '-int8_edgetpu.tflite') # Edge TPU model f_tfl = str(file).replace('.pt', '-int8.tflite') # TFLite model - cmd = f"edgetpu_compiler -s -d -k 10 --out_dir {file.parent} {f_tfl}" - subprocess.run(cmd.split(), check=True) + subprocess.run([ + 'edgetpu_compiler', + '-s', + '-d', + '-k', + '10', + '--out_dir', + str(file.parent), + f_tfl,], check=True) return f, None @@ -436,11 +450,14 @@ def export_tfjs(file, int8, prefix=colorstr('TensorFlow.js:')): f_pb = file.with_suffix('.pb') # *.pb path f_json = f'{f}/model.json' # *.json path - int8_export = ' --quantize_uint8 ' if int8 else '' - - cmd = f'tensorflowjs_converter --input_format=tf_frozen_model {int8_export}' \ - f'--output_node_names=Identity,Identity_1,Identity_2,Identity_3 {f_pb} {f}' - subprocess.run(cmd.split()) + args = [ + 'tensorflowjs_converter', + '--input_format=tf_frozen_model', + '--quantize_uint8' if int8 else '', + '--output_node_names=Identity,Identity_1,Identity_2,Identity_3', + str(f_pb), + str(f),] + subprocess.run([arg for arg in args if arg], check=True) json = Path(f_json).read_text() with open(f_json, 'w') as j: # sort JSON Identity_* in ascending order diff --git a/segment/train.py b/segment/train.py index 883c8b0a2b62..4914f9613a3d 100644 --- a/segment/train.py +++ b/segment/train.py @@ -598,8 +598,12 @@ def main(opt, callbacks=Callbacks()): # ei = [isinstance(x, (int, float)) for x in hyp.values()] # evolvable indices evolve_yaml, evolve_csv = save_dir / 'hyp_evolve.yaml', save_dir / 'evolve.csv' if opt.bucket: - subprocess.run( - f'gsutil cp gs://{opt.bucket}/evolve.csv {evolve_csv}'.split()) # download evolve.csv if exists + # download evolve.csv if exists + subprocess.run([ + 'gsutil', + 'cp', + f'gs://{opt.bucket}/evolve.csv', + str(evolve_csv),]) for _ in range(opt.evolve): # generations to evolve if evolve_csv.exists(): # if evolve.csv exists: select best hyps and mutate diff --git a/segment/val.py b/segment/val.py index 8168b5407c1d..665b540a5490 100644 --- a/segment/val.py +++ b/segment/val.py @@ -462,7 +462,7 @@ def main(opt): r, _, t = run(**vars(opt), plots=False) y.append(r + t) # results and times np.savetxt(f, y, fmt='%10.4g') # save - subprocess.run('zip -r study.zip study_*.txt'.split()) + subprocess.run(['zip', '-r', 'study.zip', 'study_*.txt']) plot_val_study(x=x) # plot else: raise NotImplementedError(f'--task {opt.task} not in ("train", "val", "test", "speed", "study")') diff --git a/train.py b/train.py index db65f2c74c6c..ccda0a7fe2e3 100644 --- a/train.py +++ b/train.py @@ -572,8 +572,12 @@ def main(opt, callbacks=Callbacks()): # ei = [isinstance(x, (int, float)) for x in hyp.values()] # evolvable indices evolve_yaml, evolve_csv = save_dir / 'hyp_evolve.yaml', save_dir / 'evolve.csv' if opt.bucket: - subprocess.run( - f'gsutil cp gs://{opt.bucket}/evolve.csv {evolve_csv}'.split()) # download evolve.csv if exists + # download evolve.csv if exists + subprocess.run([ + 'gsutil', + 'cp', + f'gs://{opt.bucket}/evolve.csv', + str(evolve_csv),]) for _ in range(opt.evolve): # generations to evolve if evolve_csv.exists(): # if evolve.csv exists: select best hyps and mutate diff --git a/utils/downloads.py b/utils/downloads.py index a3ff9274066e..2610f3c66aac 100644 --- a/utils/downloads.py +++ b/utils/downloads.py @@ -26,8 +26,10 @@ def is_url(url, check=True): def gsutil_getsize(url=''): # gs://bucket/file size https://cloud.google.com/storage/docs/gsutil/commands/du - s = subprocess.check_output(f'gsutil du {url}', shell=True).decode('utf-8') - return eval(s.split(' ')[0]) if len(s) else 0 # bytes + output = subprocess.check_output(['gsutil', 'du', url], shell=True, encoding='utf-8') + if output: + return int(output.split()[0]) + return 0 def url_getsize(url='https://ultralytics.com/images/bus.jpg'): @@ -36,6 +38,25 @@ def url_getsize(url='https://ultralytics.com/images/bus.jpg'): return int(response.headers.get('content-length', -1)) +def curl_download(url, filename, *, silent: bool = False) -> bool: + """ + Download a file from a url to a filename using curl. + """ + silent_option = 'sS' if silent else '' # silent + proc = subprocess.run([ + 'curl', + '-#', + f'-{silent_option}L', + url, + '--output', + filename, + '--retry', + '9', + '-C', + '-',]) + return proc.returncode == 0 + + def safe_download(file, url, url2=None, min_bytes=1E0, error_msg=''): # Attempts to download file from url or url2, checks and removes incomplete downloads < min_bytes from utils.general import LOGGER @@ -50,8 +71,8 @@ def safe_download(file, url, url2=None, min_bytes=1E0, error_msg=''): if file.exists(): file.unlink() # remove partial downloads LOGGER.info(f'ERROR: {e}\nRe-attempting {url2 or url} to {file}...') - subprocess.run( - f"curl -# -L '{url2 or url}' -o '{file}' --retry 3 -C -".split()) # curl download, retry and resume on fail + # curl download, retry and resume on fail + curl_download(url2 or url, file) finally: if not file.exists() or file.stat().st_size < min_bytes: # check if file.exists(): diff --git a/utils/general.py b/utils/general.py index 01f0a3bddc7d..a6af4f3216dd 100644 --- a/utils/general.py +++ b/utils/general.py @@ -38,7 +38,7 @@ import yaml from utils import TryExcept, emojis -from utils.downloads import gsutil_getsize +from utils.downloads import gsutil_getsize, curl_download from utils.metrics import box_iou, fitness FILE = Path(__file__).resolve() @@ -630,9 +630,7 @@ def download_one(url, dir): LOGGER.info(f'Downloading {url} to {f}...') for i in range(retry + 1): if curl: - s = 'sS' if threads > 1 else '' # silent - proc = subprocess.run(f'curl -# -{s}L "{url}" -o "{f}" --retry 9 -C -'.split()) - success = proc.returncode == 0 + success = curl_download(url, f, silent=(threads > 1)) else: torch.hub.download_url_to_file(url, f, progress=threads == 1) # torch download success = f.is_file() diff --git a/val.py b/val.py index 62fa2c980988..7829afb68b79 100644 --- a/val.py +++ b/val.py @@ -398,7 +398,7 @@ def main(opt): r, _, t = run(**vars(opt), plots=False) y.append(r + t) # results and times np.savetxt(f, y, fmt='%10.4g') # save - subprocess.run('zip -r study.zip study_*.txt'.split()) + subprocess.run(['zip', '-r', 'study.zip', 'study_*.txt']) plot_val_study(x=x) # plot else: raise NotImplementedError(f'--task {opt.task} not in ("train", "val", "test", "speed", "study")')