From 80afa683c312272db6bd00cb98a33c1bbbe78f90 Mon Sep 17 00:00:00 2001 From: Sixian Yi Date: Thu, 16 Jan 2025 18:02:48 -0800 Subject: [PATCH] rebase --- .github/workflows/ci-test.yml | 80 +++++++++++++++ .github/workflows/publish-to-test-pypi.yml | 97 ++++++++++--------- .../tests/github_ci_test_config.yaml | 19 ++++ 3 files changed, 151 insertions(+), 45 deletions(-) create mode 100644 .github/workflows/ci-test.yml create mode 100644 llama_stack/providers/tests/github_ci_test_config.yaml diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml new file mode 100644 index 0000000000..1b0086f39c --- /dev/null +++ b/.github/workflows/ci-test.yml @@ -0,0 +1,80 @@ +name: Integration-test + +on: + workflow_dispatch: + push: + +run-name: + "CI test" + +env: + TESTS_PATH: "${{ github.workspace }}/llama_stack/providers/tests" + +jobs: + build: + name: Build + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.x' + + + run_tests: + name: Run tests + runs-on: ubuntu-latest + needs: + build + steps: + - name: "Checkout 'meta-llama/llama-stack' repository" + uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.x' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + pip install -e . + pip install -U \ + torch torchvision \ + pytest pytest_asyncio \ + fairscale lm-format-enforcer \ + zmq chardet pypdf \ + pandas sentence_transformers together \ + aiosqlite groq fireworks-ai \ + pytest-asyncio + + # - name: Install Ollama + # run: | + # curl -fsSL https://ollama.com/install.sh | sh + + # - name: spin up ollama server + # run: | + # ollama run llama3.1:8b-instruct-fp16 + # sleep 10 + + - name: Run integration test + working-directory: "${{ github.workspace }}" + run: | + export FIREWORKS_API_KEY=${{ secrets.FIREWORKS_API_KEY }} + export TOGETHER_API_KEY=${{ secrets.TOGETHER_API_KEY }} + echo "Current directory: $(pwd)" + echo "Repository root: ${{ github.workspace }}" + echo "Branch: ${{ github.ref }}" + echo "List of tests" + pytest ${{ github.workspace }}/llama_stack/providers/tests/ -k "fireworks and together" --config=github_ci_test_config.yaml + + # - name: Upload pytest test results + # uses: actions/upload-artifact@v4 + # with: + # name: pytest_report.md + # path: pytest_report.md + # # Use always() to always run this step to publish test results when there are test failures + # if: ${{ always() }} diff --git a/.github/workflows/publish-to-test-pypi.yml b/.github/workflows/publish-to-test-pypi.yml index 35cbc4dc34..b148d8ac0f 100644 --- a/.github/workflows/publish-to-test-pypi.yml +++ b/.github/workflows/publish-to-test-pypi.yml @@ -1,14 +1,15 @@ name: Publish Python 🐍 distribution 📦 to TestPyPI on: - workflow_dispatch: # Keep manual trigger - inputs: - version: - description: 'Version number (e.g. 0.0.63.dev20250111)' - required: true - type: string - schedule: - - cron: "0 0 * * *" # Run every day at midnight + push: + # workflow_dispatch: # Keep manual trigger + # inputs: + # version: + # description: 'Version number (e.g. 0.0.63.dev20250111)' + # required: true + # type: string + # schedule: + # - cron: "0 0 * * *" # Run every day at midnight jobs: trigger-client-and-models-build: @@ -201,43 +202,49 @@ jobs: runs-on: ubuntu-latest env: TOGETHER_API_KEY: ${{ secrets.TOGETHER_API_KEY }} + FIREWORKS_API_KEY: ${{ secrets.FIREWORKS_API_KEY }} TAVILY_SEARCH_API_KEY: ${{ secrets.TAVILY_SEARCH_API_KEY }} steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: Install the package - run: | - max_attempts=6 - attempt=1 - while [ $attempt -le $max_attempts ]; do - echo "Attempt $attempt of $max_attempts to install package..." - if pip install --no-cache --index-url https://pypi.org/simple/ --extra-index-url https://test.pypi.org/simple/ llama-stack==${{ needs.trigger-client-and-models-build.outputs.version }}; then - echo "Package installed successfully" - break - fi - if [ $attempt -ge $max_attempts ]; then - echo "Failed to install package after $max_attempts attempts" - exit 1 - fi - attempt=$((attempt + 1)) - sleep 10 - done - - name: Test the package versions + # - uses: actions/checkout@v4 + # with: + # persist-credentials: false + # - name: Install the package + # run: | + # max_attempts=6 + # attempt=1 + # while [ $attempt -le $max_attempts ]; do + # echo "Attempt $attempt of $max_attempts to install package..." + # if pip install --no-cache --index-url https://pypi.org/simple/ --extra-index-url https://test.pypi.org/simple/ llama-stack==${{ needs.trigger-client-and-models-build.outputs.version }}; then + # echo "Package installed successfully" + # break + # fi + # if [ $attempt -ge $max_attempts ]; then + # echo "Failed to install package after $max_attempts attempts" + # exit 1 + # fi + # attempt=$((attempt + 1)) + # sleep 10 + # done + # - name: Test the package versions + # run: | + # pip list | grep llama_ + # - name: Test CLI commands + # run: | + # llama model list + # llama stack build --list-templates + # llama model prompt-format -m Llama3.2-11B-Vision-Instruct + # llama stack list-apis + # llama stack list-providers inference + # llama stack list-providers telemetry + # - name: Test Notebook + # run: | + # pip install pytest nbval + # llama stack build --template together --image-type venv + # pytest -v -s --nbval-lax ./docs/notebooks/Llama_Stack_Building_AI_Applications.ipynb + - name: Integration tests + working-directory: "${{ github.workspace }}" run: | - pip list | grep llama_ - - name: Test CLI commands - run: | - llama model list - llama stack build --list-templates - llama model prompt-format -m Llama3.2-11B-Vision-Instruct - llama stack list-apis - llama stack list-providers inference - llama stack list-providers telemetry - - name: Test Notebook - run: | - pip install pytest nbval - llama stack build --template together --image-type venv - pytest -v -s --nbval-lax ./docs/notebooks/Llama_Stack_Building_AI_Applications.ipynb - - # TODO: add trigger for integration test workflow & docker builds + pip install pytest_html + export FIREWORKS_API_KEY=${{ secrets.FIREWORKS_API_KEY }} + export TOGETHER_API_KEY=${{ secrets.TOGETHER_API_KEY }} + pytest ${{ github.workspace }}/llama_stack/providers/tests/ -k "fireworks and together" --config=github_ci_test_config.yaml diff --git a/llama_stack/providers/tests/github_ci_test_config.yaml b/llama_stack/providers/tests/github_ci_test_config.yaml new file mode 100644 index 0000000000..9bf2f15b10 --- /dev/null +++ b/llama_stack/providers/tests/github_ci_test_config.yaml @@ -0,0 +1,19 @@ +inference: + tests: + - inference/test_vision_inference.py::test_vision_chat_completion_streaming + - inference/test_vision_inference.py::test_vision_chat_completion_non_streaming + - inference/test_text_inference.py::test_structured_output + - inference/test_text_inference.py::test_completion + - inference/test_text_inference.py::test_chat_completion_streaming + - inference/test_text_inference.py::test_chat_completion_non_streaming + - inference/test_text_inference.py::test_chat_completion_with_tool_calling + - inference/test_text_inference.py::test_chat_completion_with_tool_calling_streaming + + fixtures: + provider_fixtures: + - default_fixture_param_id: fireworks + - inference: together + + inference_models: + - meta-llama/Llama-3.1-8B-Instruct + - meta-llama/Llama-3.2-11B-Vision-Instruct