Skip to content

[Inference]Support sentence transformers clip #30

[Inference]Support sentence transformers clip

[Inference]Support sentence transformers clip #30

name: Optimum neuron / Test INF1 full export
on:
push:
branches: [ main ]
paths:
- "optimum/exporters/neuron/*.py"
pull_request:
branches: [ main ]
paths:
- "optimum/exporters/neuron/*.py"
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
do-the-job:
name: Run INF1 full export tests
runs-on: [self-hosted, 4-aws-inf1, 24-cpu, ci]
env:
AWS_REGION: us-east-1
steps:
- name: Check AMI
run: dpkg -l | grep neuron
- name: Checkout
uses: actions/checkout@v2
- name: Install system packages
run: |
sudo apt install python3.8-venv -y
- name: Install python packages
run: |
python3 -m venv aws_neuron_venv_pytorch
source aws_neuron_venv_pytorch/bin/activate
python -m pip install -U pip
python -m pip config set global.extra-index-url https://pip.repos.neuron.amazonaws.com
python -m pip install .[neuron,tests]
python -m pip uninstall optimum -y
python -m pip install optimum
- name: Run CLI tests
run: |
source aws_neuron_venv_pytorch/bin/activate
HF_TOKEN=${{ secrets.HF_TOKEN_OPTIMUM_NEURON_CI }} pytest -m is_inferentia_test tests/cli
- name: Run export tests
run: |
source aws_neuron_venv_pytorch/bin/activate
HF_TOKEN=${{ secrets.HF_TOKEN_OPTIMUM_NEURON_CI }} pytest -m is_inferentia_test tests/exporters