|
| 1 | +name: Install env + build |
| 2 | +inputs: |
| 3 | + arch: |
| 4 | + description: 'GPU architecture' |
| 5 | + required: true |
| 6 | + python: |
| 7 | + description: 'Python version' |
| 8 | + required: false |
| 9 | + default: "3.11" |
| 10 | + cuda: |
| 11 | + description: 'CUDA version' |
| 12 | + required: false |
| 13 | + default: "11.8" |
| 14 | + pytorch_version: |
| 15 | + description: 'PyTorch version' |
| 16 | + default: "2" |
| 17 | + pytorch_channel: |
| 18 | + description: 'PyTorch channel on conda' |
| 19 | + default: "pytorch" |
| 20 | + |
| 21 | +runs: |
| 22 | + using: composite |
| 23 | + steps: |
| 24 | + - name: Cleanup |
| 25 | + shell: bash |
| 26 | + run: rm -f ~/.profile ~/.bashrc |
| 27 | + - id: prepare_conda_env_paths |
| 28 | + shell: python |
| 29 | + run: | |
| 30 | + import os |
| 31 | + import subprocess |
| 32 | + import hashlib |
| 33 | + import glob |
| 34 | + import datetime |
| 35 | + from pathlib import Path |
| 36 | +
|
| 37 | + CONDA_INSTALL_CMD = "micromamba create python=${{ inputs.python }} zlib pip ninja pytorch=${{ inputs.pytorch_version }} ccache=4.8 pytorch-mutex==1.0=cuda pytorch-cuda=${{ inputs.cuda }} -c ${{ inputs.pytorch_channel }} -c nvidia -c conda-forge -q -y" |
| 38 | +
|
| 39 | + conda_env_key = CONDA_INSTALL_CMD |
| 40 | + for file in sorted(glob.glob("requirement*.txt")): |
| 41 | + conda_env_key += f"\n########## {file}\n" |
| 42 | + conda_env_key += Path(file).read_text() |
| 43 | + if "${{ inputs.pytorch_channel }}" != "pytorch": |
| 44 | + # Nightly or Test, update every week |
| 45 | + conda_env_key += datetime.date.today().strftime("%Y-week%W") + "\n" |
| 46 | + conda_env_hash = hashlib.sha224(conda_env_key.encode("ascii")).hexdigest()[:8] |
| 47 | + shared_dir = os.environ.get("GHRUNNER_SHARED_DIR", os.getcwd()) |
| 48 | + env_path = os.path.join(shared_dir, "tmp", os.environ["GITHUB_RUN_ID"]) |
| 49 | + final_env = Path(shared_dir) / f"env_{conda_env_hash}.txt" |
| 50 | + pkg_dir = Path(shared_dir) / "pkgs" |
| 51 | + (Path(shared_dir) / f"env_{conda_env_hash}_content.txt").write_text(conda_env_key) |
| 52 | + CONDA_INSTALL_CMD += " -p " + env_path |
| 53 | + env_already_built = False |
| 54 | + # If environment is already built |
| 55 | + if final_env.is_file(): |
| 56 | + final_env_link = final_env.read_text().strip() |
| 57 | + if (Path(final_env_link) / "bin" / "python").is_file(): |
| 58 | + print("Found valid env - skipping env setup") |
| 59 | + CONDA_INSTALL_CMD = "true" |
| 60 | + env_already_built = True |
| 61 | + env_path = final_env_link |
| 62 | + else: |
| 63 | + print("Invalid env") |
| 64 | + with open(os.environ['GITHUB_ENV'], "r+") as fp: |
| 65 | + fp.write("CONDA_ENV_LINK=" + str(final_env) + "\n") |
| 66 | + fp.write("CONDA_PREFIX=" + env_path + "\n") |
| 67 | + fp.write("CONDA_PKGS_DIRS=" + str(pkg_dir) + "\n") |
| 68 | + fp.write("CONDA_INSTALL_CMD=" + CONDA_INSTALL_CMD + "\n") |
| 69 | + fp.write("CONDA_ENV_HASH=" + conda_env_hash + "\n") |
| 70 | + fp.write("PY=" + os.path.join(env_path, "bin", "python") + "\n") |
| 71 | + fp.write("PIP=" + os.path.join(env_path, "bin", "pip") + "\n") |
| 72 | + with open(os.environ['GITHUB_OUTPUT'], "r+") as fp: |
| 73 | + fp.write(f"ENV_CACHED={int(env_already_built)}\n") |
| 74 | + - name: Print conda commands |
| 75 | + shell: bash -l {0} |
| 76 | + run: | |
| 77 | + echo "CONDA_PREFIX=$CONDA_PREFIX" |
| 78 | + echo "CONDA_INSTALL_CMD=$CONDA_INSTALL_CMD" |
| 79 | + echo "CONDA_ENV_HASH=$CONDA_ENV_HASH" |
| 80 | + echo "PY=$PY" |
| 81 | + - name: Install micromamba |
| 82 | + shell: bash -l {0} |
| 83 | + run: | |
| 84 | + set -ex |
| 85 | + curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba |
| 86 | + echo "eval \"\$($(pwd)/bin/micromamba shell hook --shell bash)\"" >> ~/.profile |
| 87 | + - name: Conda/pip setup |
| 88 | + shell: bash -l {0} |
| 89 | + if: steps.prepare_conda_env_paths.outputs.ENV_CACHED == 0 |
| 90 | + run: | |
| 91 | + set -ex |
| 92 | + micromamba config set channel_priority strict |
| 93 | + # Retry if failed after removing downloaded packages cache |
| 94 | + $CONDA_INSTALL_CMD || (rm -rf $CONDA_PKGS_DIRS && rm -rf $CONDA_PREFIX && $CONDA_INSTALL_CMD) |
| 95 | + $PY -m pip install cmake |
| 96 | + $PY -m pip install -r requirements-benchmark.txt --progress-bar off |
| 97 | + - name: Activate environment |
| 98 | + shell: bash -l {0} |
| 99 | + run: | |
| 100 | + echo "micromamba activate $CONDA_PREFIX" >> ~/.profile |
| 101 | + echo "==== .profile =====" |
| 102 | + cat ~/.profile |
| 103 | + - run: which python |
| 104 | + shell: bash -l {0} |
| 105 | + - name: Setup ccache nvcc |
| 106 | + shell: bash -l {0} |
| 107 | + if: steps.prepare_conda_env_paths.outputs.ENV_CACHED == 0 |
| 108 | + run: | |
| 109 | + echo "#!/bin/bash" > $CONDA_PREFIX/bin/nvcc-ccache |
| 110 | + echo "ccache ${CUDA_HOME}/bin/nvcc \"\$@\"" >> $CONDA_PREFIX/bin/nvcc-ccache |
| 111 | + cat $CONDA_PREFIX/bin/nvcc-ccache |
| 112 | + chmod +x $CONDA_PREFIX/bin/nvcc-ccache |
| 113 | + which nvcc |
| 114 | + ccache --version |
| 115 | +
|
| 116 | + - name: Setup ccache g++ |
| 117 | + shell: bash -l {0} |
| 118 | + if: steps.prepare_conda_env_paths.outputs.ENV_CACHED == 0 |
| 119 | + run: | |
| 120 | + echo "#!/bin/bash" > $CONDA_PREFIX/bin/g++-ccache |
| 121 | + echo "ccache g++ \"\$@\"" >> $CONDA_PREFIX/bin/g++-ccache |
| 122 | + cat $CONDA_PREFIX/bin/g++-ccache |
| 123 | + chmod +x $CONDA_PREFIX/bin/g++-ccache |
| 124 | + which g++-ccache |
| 125 | +
|
| 126 | + - name: Patch for https://github.com/pytorch/pytorch/issues/114962 |
| 127 | + shell: bash -l {0} |
| 128 | + run: | |
| 129 | + CPP_EXTENSIONS_PY=$(python -c "import torch.utils.cpp_extension; print(torch.utils.cpp_extension.__file__)") |
| 130 | + echo "Patching $CPP_EXTENSIONS_PY" |
| 131 | + sed -i "/generate-dependencies-with-compile/d" $CPP_EXTENSIONS_PY |
| 132 | + - name: Check NVIDIA libs |
| 133 | + shell: bash -l {0} |
| 134 | + run: | |
| 135 | + ldconfig -p | grep libcuda.so |
| 136 | + ls /.singularity.d/libs/ |
| 137 | + - name: Mark env as ready |
| 138 | + shell: bash -l {0} |
| 139 | + if: steps.prepare_conda_env_paths.outputs.ENV_CACHED == 0 |
| 140 | + run: echo $CONDA_PREFIX > $CONDA_ENV_LINK |
| 141 | + - name: Setup ccache |
| 142 | + shell: bash -l {0} |
| 143 | + run: | |
| 144 | + export CCACHE_DIR=$GHRUNNER_SHARED_DIR/ccache |
| 145 | + echo "CCACHE_DIR=$CCACHE_DIR" >> ${GITHUB_ENV} |
| 146 | + mkdir -p $CCACHE_DIR |
| 147 | + ccache -s |
| 148 | + - name: Build |
| 149 | + shell: bash -l {0} |
| 150 | + run: | |
| 151 | + PYTORCH_NVCC="$CONDA_PREFIX/bin/nvcc-ccache" CXX="g++-ccache" TORCH_CUDA_ARCH_LIST=${{ inputs.arch }} python -m pip install -v -e . |
| 152 | + - name: Build info |
| 153 | + run: | |
| 154 | + printenv |
| 155 | + python -m xformers.info |
| 156 | + python xformers/_triton_version_fairinternal.py |
| 157 | + ccache -s |
| 158 | + shell: bash -l {0} |
0 commit comments