diff --git a/.github/workflows/llm.yml b/.github/workflows/llm.yml index 6548f08f13..5354b58a00 100644 --- a/.github/workflows/llm.yml +++ b/.github/workflows/llm.yml @@ -1,22 +1,23 @@ name: llm-cpp on: - pull_request: - paths: - - llm/cpp/** - - .github/workflows/llm.yml - - thirdparty/** - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true + pull_request jobs: llm-cpp: - runs-on: ubuntu-20.04-8-cores + strategy: + fail-fast: false + matrix: + os: [ubuntu-20.04, ubuntu-22.04] + python-version: [3.7, 3.8, 3.9, '3.10', '3.11'] + runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 with: submodules: recursive - uses: actions/setup-python@v4 with: - python-version: 3.8 - - run: llm/cpp/set_up_and_run.sh + python-version: ${{ matrix.python-version }} + - run: | + python -m pip install --upgrade-strategy eager "transformers>=4.36" "optimum[openvino]>=1.15" --extra-index-url https://download.pytorch.org/whl/cpu + python -m pip uninstall --yes optimum-intel + python -m pip install git+https://github.com/huggingface/optimum-intel.git@5dac93d6e8d15c96fe061c653d82b7afd54954db + optimum-cli export openvino -m TinyLlama/TinyLlama-1.1B-Chat-v0.6 .