Skip to content

Add IPEX-LLM with GPU #19

Add IPEX-LLM with GPU

Add IPEX-LLM with GPU #19

name: Publish Sub-Package to PyPI if Needed
on:
push:
branches:
- main
pull_request:
branches: [main, ipex-llm-llm-gpu]
paths:
- ".github/workflows/publish_sub_package.yml"
- "llama-index-integrations/**"
env:
POETRY_VERSION: "1.6.1"
PYTHON_VERSION: "3.10"
jobs:
publish_subpackage_if_needed:
# if: github.repository == 'run-llama/llama_index'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install Poetry
uses: snok/install-poetry@v1
with:
version: ${{ env.POETRY_VERSION }}
- name: Get changed pyproject files
id: changed-files
run: |
echo "changed_files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | grep -v llama-index-core | grep llama-index | grep pyproject | xargs)" >> $GITHUB_OUTPUT
- name: Publish changed packages
run: |
for file in llama-index-integrations/llms/llama-index-llms-ipex-llm/pyproject.toml; do
cd `echo $file | sed 's/\/pyproject.toml//g'`
poetry lock
pip install -e .
poetry publish --build --dry-run
cd -
done