Enable CUTLASS in Linux wheels #635
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# GH actions. | |
name: Wheel-Manylinux-Nightly | |
on: | |
push: | |
branches: | |
- main | |
pull_request: | |
branches: | |
- main | |
schedule: | |
- cron: '0 6 * * *' # 6 AM UTC | |
jobs: | |
Build: | |
strategy: | |
matrix: | |
pkg: ['mlc-ai-nightly'] | |
mlc-chat-pkg: ['mlc-chat-nightly'] | |
# matrix of build configs | |
config: | |
- cuda: 'none' | |
image: 'mlcaidev/package-cpu:072632e' | |
- cuda: '11.6' | |
image: 'mlcaidev/package-cu116:072632e' | |
- cuda: '11.7' | |
image: 'mlcaidev/package-cu117:072632e' | |
- cuda: '11.8' | |
image: 'mlcaidev/package-cu118:072632e' | |
- cuda: '12.1' | |
image: 'mlcaidev/package-cu121:072632e' | |
runs-on: ubuntu-latest | |
steps: | |
- name: Reclaim disk space | |
run: | | |
df -h | |
sudo rm -rf /usr/local/lib/android | |
sudo rm -rf /opt/hostedtoolcache/CodeQL | |
sudo rm -rf /usr/local/.ghcup | |
df -h | |
- uses: actions/checkout@v3 | |
with: | |
submodules: 'recursive' | |
- name: Setup script env | |
run: | | |
rm -rf conda | |
ln -s 3rdparty/tlcpack/conda conda | |
- name: Checkout source | |
run: | | |
git clone https://github.com/mlc-ai/relax tvm --recursive | |
git clone https://github.com/mlc-ai/mlc-llm mlc-llm --recursive | |
- name: Sync Package | |
run: | | |
python scripts/sync_package.py --cuda ${{ matrix.config.cuda }} --package tvm --package-name ${{ matrix.pkg }} --revision origin/mlc --skip-checkout --skip-conda | |
python scripts/sync_package.py --cuda ${{ matrix.config.cuda }} --package mlc-llm --package-name ${{ matrix.mlc-chat-pkg }} --revision origin/main --skip-checkout --skip-conda | |
- name: Build TVM Unity | |
env: | |
IMAGE: ${{ matrix.config.image }} | |
CUDA: ${{ matrix.config.cuda }} | |
run: | | |
docker/bash.sh --no-gpu $IMAGE ./scripts/build_mlc_ai_wheel_manylinux.sh --cuda $CUDA | |
- name: Build MLC-Chat | |
env: | |
IMAGE: ${{ matrix.config.image }} | |
CUDA: ${{ matrix.config.cuda }} | |
run: | | |
docker/bash.sh --no-gpu $IMAGE ./scripts/build_mlc_chat_wheel_manylinux.sh --cuda $CUDA | |
- name: Wheel-Deploy | |
if: github.ref == 'refs/heads/main' | |
uses: softprops/action-gh-release@v1 | |
env: | |
GITHUB_TOKEN: ${{ secrets.MLC_GITHUB_TOKEN }} | |
with: | |
files: | | |
tvm/python/repaired_wheels/*.whl | |
mlc-llm/python/repaired_wheels/*.whl | |
tag_name: v0.9.dev0 | |
prerelease: true |