Skip to content

Commit 3e348a2

Browse files
[PyProject] Spin up an initial pyproject.toml allowing for local pip install (#1527)
* Base pyproject extracting requirements.txt * Split up install * Copying requirements.txt comments verbatim * Apply suggestions from code review Co-authored-by: Zhenyan Zhang (Meta) <[email protected]> --------- Co-authored-by: Zhenyan Zhang (Meta) <[email protected]>
1 parent bf05236 commit 3e348a2

File tree

3 files changed

+171
-85
lines changed

3 files changed

+171
-85
lines changed

Diff for: install/install_requirements.sh

+2-85
Original file line numberDiff line numberDiff line change
@@ -44,96 +44,13 @@ fi
4444

4545
echo "Using pip executable: $PIP_EXECUTABLE"
4646

47-
# Since torchchat often uses main-branch features of pytorch, only the nightly
48-
# pip versions will have the required features. The PYTORCH_NIGHTLY_VERSION value should
49-
# agree with the third-party/pytorch pinned submodule commit.
50-
#
51-
# NOTE: If a newly-fetched version of the executorch repo changes the value of
52-
# PYTORCH_NIGHTLY_VERSION, you should re-run this script to install the necessary
53-
# package versions.
54-
PYTORCH_NIGHTLY_VERSION=dev20250327
55-
56-
# Nightly version for torchvision
57-
VISION_NIGHTLY_VERSION=dev20250327
58-
59-
# Nightly version for torchtune
60-
TUNE_NIGHTLY_VERSION=dev20250327
61-
62-
# The pip repository that hosts nightly torch packages. cpu by default.
63-
# If cuda is available, based on presence of nvidia-smi, install the pytorch nightly
64-
# with cuda for faster execution on cuda GPUs.
65-
if [[ -x "$(command -v nvidia-smi)" ]];
66-
then
67-
TORCH_NIGHTLY_URL="https://download.pytorch.org/whl/nightly/cu126"
68-
elif [[ -x "$(command -v rocminfo)" ]];
69-
then
70-
TORCH_NIGHTLY_URL="https://download.pytorch.org/whl/nightly/rocm6.2"
71-
elif [[ -x "$(command -v xpu-smi)" ]];
72-
then
73-
TORCH_NIGHTLY_URL="https://download.pytorch.org/whl/nightly/xpu"
74-
else
75-
TORCH_NIGHTLY_URL="https://download.pytorch.org/whl/nightly/cpu"
76-
fi
77-
78-
# pip packages needed by exir.
79-
if [[ -x "$(command -v xpu-smi)" ]];
80-
then
81-
REQUIREMENTS_TO_INSTALL=(
82-
torch=="2.8.0.${PYTORCH_NIGHTLY_VERSION}"
83-
torchvision=="0.22.0.${VISION_NIGHTLY_VERSION}"
84-
#torchtune=="0.7.0" # no 0.6.0 on xpu nightly
85-
)
86-
else
87-
REQUIREMENTS_TO_INSTALL=(
88-
torch=="2.8.0.${PYTORCH_NIGHTLY_VERSION}"
89-
torchvision=="0.22.0.${VISION_NIGHTLY_VERSION}"
90-
torchtune=="0.7.0.${TUNE_NIGHTLY_VERSION}"
91-
)
92-
fi
93-
94-
#
95-
# First install requirements in install/requirements.txt. Older torch may be
96-
# installed from the dependency of other models. It will be overridden by
97-
# newer version of torch nightly installed later in this script.
98-
#
9947
(
10048
set -x
101-
$PIP_EXECUTABLE install -r install/requirements.txt --extra-index-url "${TORCH_NIGHTLY_URL}"
49+
$PIP_EXECUTABLE install -r install/requirements.txt
10250
)
10351

104-
# Uninstall triton, as nightly will depend on pytorch-triton, which is one and the same
105-
(
106-
set -x
107-
$PIP_EXECUTABLE uninstall -y triton
108-
)
109-
110-
# Install the requirements. --extra-index-url tells pip to look for package
111-
# versions on the provided URL if they aren't available on the default URL.
112-
(
113-
set -x
114-
$PIP_EXECUTABLE install --extra-index-url "${TORCH_NIGHTLY_URL}" \
115-
"${REQUIREMENTS_TO_INSTALL[@]}"
116-
)
52+
bash install/install_torch.sh
11753

118-
# Temporatory instal torchtune nightly from cpu nightly link since no torchtune nightly for xpu now
119-
# TODO: Change to install torchtune from xpu nightly link, once torchtune xpu nightly is ready
120-
if [[ -x "$(command -v xpu-smi)" ]];
121-
then
122-
(
123-
set -x
124-
$PIP_EXECUTABLE install --extra-index-url "https://download.pytorch.org/whl/nightly/cpu" \
125-
torchtune=="0.6.0.${TUNE_NIGHTLY_VERSION}"
126-
)
127-
fi
128-
129-
bash install/install_torchao.sh
130-
131-
if [[ -x "$(command -v nvidia-smi)" ]]; then
132-
(
133-
set -x
134-
$PYTHON_EXECUTABLE torchchat/utils/scripts/patch_triton.py
135-
)
136-
fi
13754
(
13855
set -x
13956
$PIP_EXECUTABLE install evaluate=="0.4.3" lm-eval=="0.4.7" psutil=="6.0.0"

Diff for: install/install_torch.sh

+110
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
#!/bin/bash
2+
# Copyright (c) Meta Platforms, Inc. and affiliates.
3+
# All rights reserved.
4+
#
5+
# This source code is licensed under the BSD-style license found in the
6+
# LICENSE file in the root directory of this source tree.
7+
8+
if [ -z "${PYTHON_EXECUTABLE:-}" ];
9+
then
10+
if [[ -z ${CONDA_DEFAULT_ENV:-} ]] || [[ ${CONDA_DEFAULT_ENV:-} == "base" ]] || [[ ! -x "$(command -v python)" ]];
11+
then
12+
PYTHON_EXECUTABLE=python3
13+
else
14+
PYTHON_EXECUTABLE=python
15+
fi
16+
fi
17+
echo "Using python executable: $PYTHON_EXECUTABLE"
18+
19+
if [[ "$PYTHON_EXECUTABLE" == "python" ]];
20+
then
21+
PIP_EXECUTABLE=pip
22+
elif [[ "$PYTHON_EXECUTABLE" == "python3" ]];
23+
then
24+
PIP_EXECUTABLE=pip3
25+
else
26+
PIP_EXECUTABLE=pip${PYTHON_SYS_VERSION}
27+
fi
28+
echo "Using pip executable: $PIP_EXECUTABLE"
29+
30+
# Since torchchat often uses main-branch features of pytorch, only the nightly
31+
# pip versions will have the required features. The PYTORCH_NIGHTLY_VERSION value should
32+
# agree with the third-party/pytorch pinned submodule commit.
33+
#
34+
# NOTE: If a newly-fetched version of the executorch repo changes the value of
35+
# PYTORCH_NIGHTLY_VERSION, you should re-run this script to install the necessary
36+
# package versions.
37+
PYTORCH_NIGHTLY_VERSION=dev20250327
38+
39+
# Nightly version for torchvision
40+
VISION_NIGHTLY_VERSION=dev20250327
41+
42+
# Nightly version for torchtune
43+
TUNE_NIGHTLY_VERSION=dev20250327
44+
45+
# The pip repository that hosts nightly torch packages. cpu by default.
46+
# If cuda is available, based on presence of nvidia-smi, install the pytorch nightly
47+
# with cuda for faster execution on cuda GPUs.
48+
if [[ -x "$(command -v nvidia-smi)" ]];
49+
then
50+
TORCH_NIGHTLY_URL="https://download.pytorch.org/whl/nightly/cu126"
51+
elif [[ -x "$(command -v rocminfo)" ]];
52+
then
53+
TORCH_NIGHTLY_URL="https://download.pytorch.org/whl/nightly/rocm6.2"
54+
elif [[ -x "$(command -v xpu-smi)" ]];
55+
then
56+
TORCH_NIGHTLY_URL="https://download.pytorch.org/whl/nightly/xpu"
57+
else
58+
TORCH_NIGHTLY_URL="https://download.pytorch.org/whl/nightly/cpu"
59+
fi
60+
61+
# pip packages needed by exir.
62+
if [[ -x "$(command -v xpu-smi)" ]];
63+
then
64+
REQUIREMENTS_TO_INSTALL=(
65+
torch=="2.8.0.${PYTORCH_NIGHTLY_VERSION}"
66+
torchvision=="0.22.0.${VISION_NIGHTLY_VERSION}"
67+
#torchtune=="0.7.0" # no 0.6.0 on xpu nightly
68+
)
69+
else
70+
REQUIREMENTS_TO_INSTALL=(
71+
torch=="2.8.0.${PYTORCH_NIGHTLY_VERSION}"
72+
torchvision=="0.22.0.${VISION_NIGHTLY_VERSION}"
73+
torchtune=="0.7.0.${TUNE_NIGHTLY_VERSION}"
74+
)
75+
fi
76+
77+
# Uninstall triton, as nightly will depend on pytorch-triton, which is one and the same
78+
(
79+
set -x
80+
$PIP_EXECUTABLE uninstall -y triton
81+
)
82+
83+
# Install the requirements. --extra-index-url tells pip to look for package
84+
# versions on the provided URL if they aren't available on the default URL.
85+
(
86+
set -x
87+
$PIP_EXECUTABLE install --extra-index-url "${TORCH_NIGHTLY_URL}" \
88+
"${REQUIREMENTS_TO_INSTALL[@]}"
89+
)
90+
91+
# Temporatory instal torchtune nightly from cpu nightly link since no torchtune nightly for xpu now
92+
# TODO: Change to install torchtune from xpu nightly link, once torchtune xpu nightly is ready
93+
if [[ -x "$(command -v xpu-smi)" ]];
94+
then
95+
(
96+
set -x
97+
$PIP_EXECUTABLE install --extra-index-url "https://download.pytorch.org/whl/nightly/cpu" \
98+
torchtune=="0.6.0.${TUNE_NIGHTLY_VERSION}"
99+
)
100+
fi
101+
102+
bash install/install_torchao.sh
103+
104+
# Delete since already patched in PT main
105+
if [[ -x "$(command -v nvidia-smi)" ]]; then
106+
(
107+
set -x
108+
$PYTHON_EXECUTABLE torchchat/utils/scripts/patch_triton.py
109+
)
110+
fi

Diff for: pyproject.toml

+59
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
[build-system]
2+
requires = ["setuptools", "wheel"]
3+
build-backend = "setuptools.build_meta"
4+
5+
[project]
6+
name = "torchchat"
7+
version = "0.1.0"
8+
description = "PyTorch showcase for running LLMs on local devices"
9+
authors = [
10+
{name="PyTorch Team", email="[email protected]"},
11+
]
12+
license = { file = "LICENSE" }
13+
keywords = ["pytorch", "machine learning", "llm"]
14+
readme = "README.md"
15+
16+
requires-python = ">=3.10"
17+
dependencies=[
18+
# Hugging Face downloads
19+
"huggingface_hub",
20+
21+
# GGUF import
22+
"gguf",
23+
24+
# Tiktoken tokenizer for Llama 3 and other advanced models
25+
"tiktoken",
26+
27+
# Tokenizers and jinja2 for other non-llama models that use HF tokenizers
28+
"tokenizers",
29+
"jinja2",
30+
31+
# Miscellaneous
32+
"snakeviz",
33+
"sentencepiece",
34+
"numpy>=1.17",
35+
"blobfile",
36+
"tomli>=1.1.0; python_version<'3.11'",
37+
"openai",
38+
39+
# Build tools
40+
"wheel",
41+
"cmake>=3.24,<4.0.0", # 4.0 is BC breaking
42+
"ninja",
43+
"zstd",
44+
45+
# Test tools
46+
"pytest",
47+
48+
# Browser mode
49+
"streamlit",
50+
51+
# Server mode
52+
"flask",
53+
54+
# eval
55+
"lm-eval==0.4.7",
56+
]
57+
58+
[tool.setuptools]
59+
packages = ["torchchat"]

0 commit comments

Comments
 (0)