Skip to content

[Inference]Support sentence transformers clip #105

[Inference]Support sentence transformers clip

[Inference]Support sentence transformers clip #105

name: Optimum neuron / Test INF1 inference
on:
push:
branches: [ main ]
paths:
- "setup.py"
- "optimum/**.py"
pull_request:
branches: [ main ]
paths:
- "setup.py"
- "optimum/**.py"
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
do-the-job:
name: Run INF1 tests
runs-on: [self-hosted, 4-aws-inf1, 24-cpu, ci]
env:
AWS_REGION: us-east-1
steps:
- name: Check AMI
run: dpkg -l | grep neuron
- name: Checkout
uses: actions/checkout@v2
- name: Install system packages
run: |
sudo apt install python3.8-venv -y
- name: Install python packages
run: |
python3 -m venv aws_neuron_venv_pytorch
source aws_neuron_venv_pytorch/bin/activate
python -m pip install -U pip
python -m pip config set global.extra-index-url https://pip.repos.neuron.amazonaws.com
python -m pip install .[neuron,tests]
python -m pip uninstall optimum -y
python -m pip install optimum
- name: Run inference tests
run: |
source aws_neuron_venv_pytorch/bin/activate
HF_TOKEN=${{ secrets.HF_TOKEN_OPTIMUM_NEURON_CI }} pytest -m is_inferentia_test tests/inference