-
Notifications
You must be signed in to change notification settings - Fork 879
/
Copy pathupload.py
executable file
·112 lines (96 loc) · 3.98 KB
/
upload.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
#! /usr/env/bin
import argparse
import glob
import os
import sys
import subprocess
# To help discover local modules
REPO_ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
sys.path.append(REPO_ROOT)
from ts_scripts.utils import try_and_handle
def upload_pypi_packages(args, WHL_PATHS):
"""
Takes a list of path values and uploads them to pypi using twine, using token stored in environment variable
"""
dry_run = args.dry_run
# Note: TWINE_USERNAME and TWINE_PASSWORD are expected to be set in the environment
for dist_path in WHL_PATHS:
if args.test_pypi:
try_and_handle(
f"twine upload {dist_path}/* --username __token__ --repository-url https://test.pypi.org/legacy/",
dry_run,
)
else:
try_and_handle(f"twine upload --username __token__ {dist_path}/*", dry_run)
# TODO: Mock some file paths to make conda dry run work
def upload_conda_packages(args, PACKAGES, CONDA_PACKAGES_PATH):
"""
Takes a list of path values and uploads them to anaconda.org using conda upload, using token stored in environment variable
If you'd like to upload to a staging environment make sure to pass in your personal credentials when you anaconda login instead
of the pytorch credentials
"""
# Set ANACONDA_API_TOKEN before calling this function
for root, _, files in os.walk(CONDA_PACKAGES_PATH):
for name in files:
file_path = os.path.join(root, name)
# Identify *.tar.bz2 files to upload
if any(word in file_path for word in PACKAGES) and file_path.endswith(
"tar.bz2"
):
print(f"Uploading to anaconda package: {name}")
anaconda_upload_command = ["anaconda", "upload", file_path, "--force"]
try:
subprocess.run(
anaconda_upload_command,
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
except subprocess.CalledProcessError as e:
return e.returncode
print(f"All packages uploaded to anaconda successfully")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Upload anaconda and pypi packages for torchserve and torch-model-archiver"
)
parser.add_argument(
"--upload-conda-packages",
action="store_true",
required=False,
help="Specify whether to upload conda packages",
)
parser.add_argument(
"--upload-pypi-packages",
action="store_true",
required=False,
help="Specify whether to upload pypi packages",
)
parser.add_argument(
"--test-pypi",
action="store_true",
required=False,
help="Specify whether to upload to test PyPI",
)
parser.add_argument(
"--dry_run",
action="store_true",
help="dry_run will print the commands that will be run without running them. Only works for pypi now",
)
args = parser.parse_args()
PACKAGES = ["torchserve", "model-archiver", "workflow-archiver"]
CONDA_PACKAGES_PATH = os.path.join(REPO_ROOT, "binaries", "conda", "output")
if not args.dry_run:
TS_WHEEL_PATH = glob.glob(os.path.join(REPO_ROOT, "dist"))[0]
MA_WHEEL_PATH = glob.glob(os.path.join(REPO_ROOT, "model-archiver", "dist"))[0]
WA_WHEEL_PATH = glob.glob(os.path.join(REPO_ROOT, "workflow-archiver", "dist"))[
0
]
else:
TS_WHEEL_PATH = os.path.join(REPO_ROOT, "dist")
MA_WHEEL_PATH = os.path.join(REPO_ROOT, "model-archiver", "dist")
WA_WHEEL_PATH = os.path.join(REPO_ROOT, "workflow-archiver", "dist")
WHL_PATHS = [TS_WHEEL_PATH, MA_WHEEL_PATH, WA_WHEEL_PATH]
if args.upload_pypi_packages:
upload_pypi_packages(args, WHL_PATHS)
if args.upload_conda_packages:
upload_conda_packages(args, PACKAGES, CONDA_PACKAGES_PATH)