- Notifications
You must be signed in to change notification settings - Fork745
General Checks for: Add support for request decompression with gzip, br, and zstd#19781
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.Learn more about bidirectional Unicode characters
| name:General Checks | |
| run-name:"General Checks for: ${{ github.event.pull_request.title || github.ref }}" | |
| on: | |
| merge_group: | |
| paths-ignore: | |
| -"docs/**" | |
| pull_request: | |
| branches:["main"] | |
| workflow_dispatch: | |
| concurrency: | |
| group:${{ github.ref == 'refs/heads/main' && github.run_id || format('general-{0}-{1}', github.workflow, github.ref) }} | |
| cancel-in-progress:${{ github.ref != 'refs/heads/main' }} | |
| env: | |
| FORCE_COLOR:1 | |
| TENSORZERO_CLICKHOUSE_URL:"http://chuser:chpassword@localhost:8123/tensorzero" | |
| SQLX_OFFLINE:1 | |
| R2_ACCESS_KEY_ID:${{ secrets.R2_ACCESS_KEY_ID }} | |
| R2_SECRET_ACCESS_KEY:${{ secrets.R2_SECRET_ACCESS_KEY }} | |
| RUST_MSRV:1.88.0 | |
| jobs: | |
| check-version-consistency: | |
| permissions: | |
| contents:read | |
| runs-on:ubuntu-latest | |
| if:github.repository == 'tensorzero/tensorzero' | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| with: | |
| fetch-depth:0 | |
| -name:Check version consistency | |
| run:./ci/check-version-consistency.sh | |
| # This checks for specific markers in the codebase that require multiple files to be edited together. | |
| check-if-edited-then-edit: | |
| permissions: | |
| contents:read | |
| runs-on:ubuntu-latest | |
| # Should run on all PRs from forks as well, if the label is not present | |
| if:${{ !contains(github.event.pull_request.labels.*.name, 'skip-if-edited-check') }} | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| with: | |
| fetch-depth:0 | |
| -name:Set up Python | |
| uses:actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b | |
| with: | |
| python-version:"3.9" | |
| -name:Check coordinated edits | |
| run:python3 ci/check_coordinated_edits.py | |
| # Like 'check-docker-compose', but loads the freshly built 'gateway' and 'ui' container images first | |
| # This checks that all of our docker-compose files work with the changes in the PR | |
| check-latest-docker-compose: | |
| needs:[build-gateway-container, build-ui-container] | |
| permissions: | |
| # Permission to checkout the repository | |
| contents:read | |
| # Permission to fetch GitHub OIDC token authentication | |
| id-token:write | |
| # Permission to download artifacts | |
| actions:read | |
| runs-on:ubuntu-latest | |
| if:github.repository == 'tensorzero/tensorzero' | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| -name:Download container images | |
| uses:actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 | |
| with: | |
| pattern:build-{gateway,ui}-container | |
| merge-multiple:true | |
| -name:Load `gateway` and `ui` containers | |
| run:| | |
| docker load < gateway-container.tar | |
| docker load < ui-container.tar | |
| # Retag the loaded images to match what docker-compose expects | |
| docker tag tensorzero/gateway:sha-${{ github.sha }} tensorzero/gateway:latest | |
| docker tag tensorzero/ui:sha-${{ github.sha }} tensorzero/ui:latest | |
| -name:Create the `object_storage` directory for the multimodal-vision-finetuning example | |
| run:| | |
| mkdir -p examples/multimodal-vision-finetuning/object_storage | |
| chmod 777 examples/multimodal-vision-finetuning/object_storage | |
| -name:Apply Postgres migrations for the enforce-custom-rate-limits example | |
| # Note: In isolation, applying migrations should not require `OPENAI_API_KEY`, but Docker Compose will complain if it's not set given our docker-compose.yml file. | |
| # Note: We must delete the Docker network because later we use `ci/internal-network.yml` | |
| run:| | |
| cd examples/docs/guides/operations/enforce-custom-rate-limits | |
| OPENAI_API_KEY=dummy docker compose run --rm gateway --run-postgres-migrations | |
| OPENAI_API_KEY=dummy docker compose down | |
| -name:Apply Postgres migrations for the set-up-auth-for-tensorzero example | |
| # Note: In isolation, applying migrations should not require `OPENAI_API_KEY`, but Docker Compose will complain if it's not set given our docker-compose.yml file. | |
| # Note: We must delete the Docker network because later we use `ci/internal-network.yml` | |
| run:| | |
| cd examples/docs/guides/operations/set-up-auth-for-tensorzero | |
| OPENAI_API_KEY=dummy docker compose run --rm gateway --run-postgres-migrations | |
| OPENAI_API_KEY=dummy docker compose down | |
| -name:Apply Postgres migrations for the run-adaptive-ab-tests example | |
| # Note: In isolation, applying migrations should not require `OPENAI_API_KEY`, but Docker Compose will complain if it's not set given our docker-compose.yml file. | |
| # Note: We must delete the Docker network because later we use `ci/internal-network.yml` | |
| run:| | |
| cd examples/docs/guides/experimentation/run-adaptive-ab-tests | |
| OPENAI_API_KEY=dummy docker compose run --rm gateway --run-postgres-migrations | |
| OPENAI_API_KEY=dummy docker compose down | |
| -name:Apply Postgres migrations for examples/blog/bandits-in-your-llm-gateway | |
| # Note: In isolation, applying migrations should not require `ANTHROPIC_API_KEY`, but Docker Compose will complain if it's not set given our docker-compose.yml file. | |
| # Note: We must delete the Docker network because later we use `ci/internal-network.yml` | |
| run:| | |
| cd examples/blog/bandits-in-your-llm-gateway | |
| ANTHROPIC_API_KEY=dummy docker compose run --rm gateway --run-postgres-migrations | |
| ANTHROPIC_API_KEY=dummy docker compose down | |
| -name:Check latest docker-compose | |
| run:./ci/check-all-docker-compose.sh | |
| # Checks our docker-compose files against the most recent published 'gateway' and 'ui' container images | |
| # (not the ones built for this PR) | |
| check-docker-compose: | |
| permissions: | |
| # Permission to checkout the repository | |
| contents:read | |
| # Permission to fetch GitHub OIDC token authentication | |
| id-token:write | |
| runs-on:ubuntu-latest | |
| timeout-minutes:120 | |
| if:github.repository == 'tensorzero/tensorzero' | |
| steps: | |
| -name:Set DNS | |
| run:echo "127.0.0.1 howdy.tensorzero.com" | sudo tee -a /etc/hosts | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| # We allow the namespace builder setup to fail on Dependabot PRs and PRs from forks | |
| # (where the oidc token is not available) | |
| -name:Install Namespace CLI | |
| uses:namespacelabs/nscloud-setup@d1c625762f7c926a54bd39252efff0705fd11c64 | |
| continue-on-error:${{ github.event.pull_request.head.repo.full_name != github.repository || github.actor == 'dependabot[bot]' }} | |
| -name:Configure Namespace-powered Buildx | |
| uses:namespacelabs/nscloud-setup-buildx-action@91c2e6537780e3b092cb8476406be99a8f91bd5e | |
| with: | |
| wait-for-builder:true | |
| continue-on-error:${{ github.event.pull_request.head.repo.full_name != github.repository || github.actor == 'dependabot[bot]' }} | |
| -name:Create the `object_storage` directory for the multimodal-vision-finetuning example | |
| run:| | |
| mkdir -p examples/multimodal-vision-finetuning/object_storage | |
| chmod 777 examples/multimodal-vision-finetuning/object_storage | |
| -name:Apply Postgres migrations for the enforce-custom-rate-limits example | |
| # Note: In isolation, applying migrations should not require `OPENAI_API_KEY`, but Docker Compose will complain if it's not set given our docker-compose.yml file. | |
| # Note: We must delete the Docker network because later we use `ci/internal-network.yml` | |
| run:| | |
| cd examples/docs/guides/operations/enforce-custom-rate-limits | |
| OPENAI_API_KEY=dummy docker compose run --rm gateway --run-postgres-migrations | |
| OPENAI_API_KEY=dummy docker compose down | |
| -name:Apply Postgres migrations for the set-up-auth-for-tensorzero example | |
| # Note: In isolation, applying migrations should not require `OPENAI_API_KEY`, but Docker Compose will complain if it's not set given our docker-compose.yml file. | |
| # Note: We must delete the Docker network because later we use `ci/internal-network.yml` | |
| run:| | |
| cd examples/docs/guides/operations/set-up-auth-for-tensorzero | |
| OPENAI_API_KEY=dummy docker compose run --rm gateway --run-postgres-migrations | |
| OPENAI_API_KEY=dummy docker compose down | |
| -name:Apply Postgres migrations for the run-adaptive-ab-tests example | |
| # Note: In isolation, applying migrations should not require `OPENAI_API_KEY`, but Docker Compose will complain if it's not set given our docker-compose.yml file. | |
| # Note: We must delete the Docker network because later we use `ci/internal-network.yml` | |
| run:| | |
| cd examples/docs/guides/experimentation/run-adaptive-ab-tests | |
| OPENAI_API_KEY=dummy docker compose run --rm gateway --run-postgres-migrations | |
| OPENAI_API_KEY=dummy docker compose down | |
| -name:Apply Postgres migrations for examples/blog/bandits-in-your-llm-gateway | |
| # Note: In isolation, applying migrations should not require `ANTHROPIC_API_KEY`, but Docker Compose will complain if it's not set given our docker-compose.yml file. | |
| # Note: We must delete the Docker network because later we use `ci/internal-network.yml` | |
| run:| | |
| cd examples/blog/bandits-in-your-llm-gateway | |
| ANTHROPIC_API_KEY=dummy docker compose run --rm gateway --run-postgres-migrations | |
| ANTHROPIC_API_KEY=dummy docker compose down | |
| -name:Check all docker-compose.yml files | |
| run:./ci/check-all-docker-compose.sh | |
| check-python-client-build: | |
| permissions: | |
| contents:read | |
| uses:./.github/workflows/python-client-build.yml | |
| check-node-bindings: | |
| permissions: | |
| contents:read | |
| # Permission to upload artifacts | |
| actions:write | |
| runs-on:ubuntu-latest | |
| if:github.repository == 'tensorzero/tensorzero' | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| -name:Setup Node.js | |
| uses:actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e | |
| with: | |
| node-version:"24.11.0" | |
| -name:Install pnpm | |
| run:| | |
| for attempt in 1 2 3; do | |
| if npm install -g pnpm@latest; then | |
| break | |
| fi | |
| if [ $attempt -eq 3 ]; then | |
| echo "Failed to install pnpm after 3 attempts" | |
| exit 1 | |
| fi | |
| sleep $((10 * attempt)) | |
| done | |
| shell:bash | |
| -name:Install dependencies | |
| working-directory:internal/tensorzero-node | |
| run:pnpm install --frozen-lockfile | |
| -name:Build Node.js bindings | |
| working-directory:internal/tensorzero-node | |
| run:pnpm build-bindings | |
| -name:Check for git diff after building bindings | |
| id:git_diff_check | |
| run:git diff --exit-code | |
| -name:Upload bindings artifact on diff failure | |
| if:failure() | |
| uses:namespace-actions/upload-artifact@9a78c62e083914789d908952f9773e42744b9f68 | |
| with: | |
| name:node-bindings | |
| path:internal/tensorzero-node/lib/bindings | |
| retention-days:7 | |
| -name:Check tensorzero-node bindings are properly exported | |
| run:pnpm --filter=tensorzero-node run check-exports | |
| check-python-schemas: | |
| permissions: | |
| contents:read | |
| runs-on:ubuntu-latest | |
| if:github.repository == 'tensorzero/tensorzero' | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| -name:Setup Node.js | |
| uses:actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e | |
| with: | |
| node-version:"24.11.0" | |
| -name:Install pnpm | |
| run:| | |
| for attempt in 1 2 3; do | |
| if npm install -g pnpm@latest; then | |
| break | |
| fi | |
| if [ $attempt -eq 3 ]; then | |
| echo "Failed to install pnpm after 3 attempts" | |
| exit 1 | |
| fi | |
| sleep $((10 * attempt)) | |
| done | |
| shell:bash | |
| -name:Install uv | |
| uses:astral-sh/setup-uv@ed21f2f24f8dd64503750218de024bcf64c7250a | |
| with: | |
| version:"0.6.17" | |
| -name:Generate Python schemas | |
| run:pnpm generate-python-schemas | |
| -name:Check for git diff after generating schemas | |
| id:git_diff_check | |
| run:git diff --exit-code | |
| build-windows: | |
| permissions: | |
| contents:read | |
| # Permission for rust-cache to read/write cache | |
| actions:read | |
| runs-on:windows-latest | |
| if:github.repository == 'tensorzero/tensorzero' | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| -name:Install Rust toolchain | |
| run:| | |
| for attempt in 1 2 3; do | |
| if rustup toolchain install stable && rustup default stable; then | |
| break | |
| fi | |
| if [ $attempt -eq 3 ]; then | |
| echo "Failed to install Rust toolchain after 3 attempts" | |
| exit 1 | |
| fi | |
| sleep $((10 * attempt)) | |
| done | |
| shell:bash | |
| -uses:Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 | |
| with: | |
| cache-provider:"buildjet" | |
| save-if:${{ github.event_name == 'merge_group' }} | |
| -name:Install uv | |
| uses:astral-sh/setup-uv@ed21f2f24f8dd64503750218de024bcf64c7250a | |
| with: | |
| version:"0.6.17" | |
| -name:Install Python 3.10 | |
| run:uv python install 3.10 | |
| -name:Set PYO3_PYTHON | |
| run:echo "PYO3_PYTHON=$(uv python find python3.10)" >> $GITHUB_ENV | |
| shell:bash | |
| -name:Build Rust | |
| run:cargo build --workspace | |
| lint-rust: | |
| permissions: | |
| contents:read | |
| runs-on:ubuntu-latest | |
| strategy: | |
| matrix: | |
| # Keep 'total_partitions' in sync with the number of entries in the 'partition' array | |
| partition:[1, 2, 3, 4] | |
| include: | |
| -total_partitions:4 | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| # We install the latest rust when running lints, since we don't care if older rust versions lint successfully. | |
| # This avoids the need to deal with linter bugs in older rust versions. | |
| -name:Install Rust toolchain | |
| run:| | |
| for attempt in 1 2 3; do | |
| if rustup toolchain install stable --component clippy && rustup default stable; then | |
| break | |
| fi | |
| done | |
| shell:bash | |
| -name:Install cargo-hack | |
| uses:taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb | |
| with: | |
| tool:cargo-hack | |
| -name:Lint (Rust) | |
| run:| | |
| cargo hack --partition '${{ matrix.partition }}/${{ matrix.total_partitions }}' clippy --all-targets --each-feature -- -D warnings | |
| validate-python: | |
| permissions: | |
| contents:read | |
| runs-on:ubuntu-latest | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| # We deliberately install our MSRV here (rather than 'stable') to ensure that everything compiles with that version | |
| -name:Install Rust ${{ env.RUST_MSRV }} | |
| run:| | |
| for attempt in 1 2 3; do | |
| if rustup install ${{ env.RUST_MSRV }} --component rustfmt && rustup default ${{ env.RUST_MSRV }}; then | |
| break | |
| fi | |
| if [ $attempt -eq 3 ]; then | |
| echo "Failed to install Rust ${{ env.RUST_MSRV }} after 3 attempts" | |
| exit 1 | |
| fi | |
| sleep $((10 * attempt)) | |
| done | |
| shell:bash | |
| -name:Print Rust version | |
| run:rustc --version | |
| -name:Install uv | |
| uses:astral-sh/setup-uv@ed21f2f24f8dd64503750218de024bcf64c7250a | |
| with: | |
| version:"0.6.17" | |
| -name:uv-lock | |
| run:| | |
| bash -c ' | |
| git ls-files "**/pyproject.toml" \ | |
| | while read f; do | |
| dir=$(dirname "$f") | |
| (cd "$dir" && uv lock --project="pyproject.toml") | |
| done | |
| ' | |
| -name:uv-export | |
| run:| | |
| bash -c ' | |
| git ls-files "**/pyproject.toml" \ | |
| | while read f; do | |
| dir=$(dirname "$f") | |
| (cd "$dir" && uv export --project="pyproject.toml" --output-file=requirements.txt --quiet) | |
| done | |
| ' | |
| -name:verify uv generated files | |
| run:git diff --exit-code | |
| -name:Install Python for python async client tests | |
| run:uv python install 3.9 | |
| -name:Lint (Python:ruff) | |
| run:| | |
| uvx ruff@0.14.0 check --output-format=github --extend-select I . | |
| uvx ruff@0.14.0 format --check . | |
| -name:"Python: Pyo3 Client: Build and install dependencies" | |
| working-directory:clients/python | |
| run:| | |
| uv venv | |
| uv pip sync requirements.txt | |
| -name:"Python: PyO3 Client: pyright" | |
| working-directory:clients/python | |
| run:| | |
| uv pip install pyright==1.1.394 | |
| uv run pyright | |
| -name:"Python: PyO3 Client: stubtest" | |
| working-directory:clients/python | |
| run:| | |
| uv run stubtest tensorzero.tensorzero | |
| -name:"Python: Recipes: Install dependencies" | |
| working-directory:recipes | |
| run:| | |
| uv venv | |
| uv sync | |
| -name:"Python: Recipes: pyright" | |
| working-directory:recipes | |
| run:| | |
| uv run pyright | |
| -name:Compile / validate notebooks | |
| run:ci/compile-check-notebooks.sh | |
| validate-node: | |
| permissions: | |
| contents:read | |
| runs-on:ubuntu-latest | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| # We deliberately install our MSRV here (rather than 'stable') to ensure that everything compiles with that version | |
| -name:Install Rust ${{ env.RUST_MSRV }} | |
| run:| | |
| for attempt in 1 2 3; do | |
| if rustup install ${{ env.RUST_MSRV }} --component rustfmt && rustup default ${{ env.RUST_MSRV }}; then | |
| break | |
| fi | |
| if [ $attempt -eq 3 ]; then | |
| echo "Failed to install Rust ${{ env.RUST_MSRV }} after 3 attempts" | |
| exit 1 | |
| fi | |
| sleep $((10 * attempt)) | |
| done | |
| shell:bash | |
| -name:Print Rust version | |
| run:rustc --version | |
| -name:Setup Node.js | |
| uses:actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e | |
| with: | |
| node-version:"24.11.0" | |
| -name:Install pnpm | |
| run:| | |
| for attempt in 1 2 3; do | |
| if npm install -g pnpm@latest; then | |
| break | |
| fi | |
| if [ $attempt -eq 3 ]; then | |
| echo "Failed to install pnpm after 3 attempts" | |
| exit 1 | |
| fi | |
| sleep $((10 * attempt)) | |
| done | |
| shell:bash | |
| -name:Install JS dependencies | |
| run:pnpm install --frozen-lockfile | |
| -name:Build npm workspace | |
| run:pnpm -r build | |
| -name:Format tensorzero-node package | |
| run:pnpm --filter=tensorzero-node run format:check | |
| -name:Lint tensorzero-node package | |
| run:pnpm --filter=tensorzero-node run lint:check | |
| -name:Typecheck tensorzero-node package | |
| run:pnpm --filter=tensorzero-node run typecheck | |
| -name:Run ESLint | |
| run:pnpm --filter=tensorzero-ui run lint:check | |
| -name:Run Prettier | |
| run:pnpm --filter=tensorzero-ui run format:check | |
| -name:pnpm TypeScript type checking | |
| run:pnpm --filter=tensorzero-ui run typecheck | |
| rust-build: | |
| permissions: | |
| contents:read | |
| runs-on:ubuntu-latest | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| # We deliberately install our MSRV here (rather than 'stable') to ensure that everything compiles with that version | |
| -name:Install Rust ${{ env.RUST_MSRV }} | |
| run:| | |
| for attempt in 1 2 3; do | |
| if rustup install ${{ env.RUST_MSRV }} --component rustfmt && rustup default ${{ env.RUST_MSRV }}; then | |
| break | |
| fi | |
| if [ $attempt -eq 3 ]; then | |
| echo "Failed to install Rust ${{ env.RUST_MSRV }} after 3 attempts" | |
| exit 1 | |
| fi | |
| sleep $((10 * attempt)) | |
| done | |
| shell:bash | |
| -name:Install cargo-nextest, cargo-deny, and cargo-hack | |
| uses:taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb | |
| with: | |
| tool:cargo-nextest,cargo-deny,cargo-hack | |
| -name:Build (Rust) | |
| run:cargo build --workspace --verbose | |
| rust-test: | |
| permissions: | |
| contents:read | |
| runs-on:ubuntu-latest | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| # We deliberately install our MSRV here (rather than 'stable') to ensure that everything compiles with that version | |
| -name:Install Rust ${{ env.RUST_MSRV }} | |
| run:| | |
| for attempt in 1 2 3; do | |
| if rustup install ${{ env.RUST_MSRV }} --component rustfmt && rustup default ${{ env.RUST_MSRV }}; then | |
| break | |
| fi | |
| if [ $attempt -eq 3 ]; then | |
| echo "Failed to install Rust ${{ env.RUST_MSRV }} after 3 attempts" | |
| exit 1 | |
| fi | |
| sleep $((10 * attempt)) | |
| done | |
| shell:bash | |
| -name:Install cargo-nextest, cargo-deny, and cargo-hack | |
| uses:taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb | |
| with: | |
| tool:cargo-nextest,cargo-deny,cargo-hack | |
| -name:Test (Rust) | |
| env: | |
| # Write the exported bindings to somewhere we don't care about | |
| TS_RS_EXPORT_DIR:/tmp/tensorzero-bindings | |
| run:| | |
| cargo test-unit ${{ vars.CARGO_NEXTEST_EXTRA_ARGS }} | |
| validate: | |
| permissions: | |
| contents:read | |
| runs-on:ubuntu-latest | |
| timeout-minutes:30 | |
| if:github.repository == 'tensorzero/tensorzero' | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| # We deliberately install our MSRV here (rather than 'stable') to ensure that everything compiles with that version | |
| -name:Install Rust ${{ env.RUST_MSRV }} | |
| run:| | |
| for attempt in 1 2 3; do | |
| if rustup install ${{ env.RUST_MSRV }} --component rustfmt && rustup default ${{ env.RUST_MSRV }}; then | |
| break | |
| fi | |
| if [ $attempt -eq 3 ]; then | |
| echo "Failed to install Rust ${{ env.RUST_MSRV }} after 3 attempts" | |
| exit 1 | |
| fi | |
| sleep $((10 * attempt)) | |
| done | |
| shell:bash | |
| -name:Print Rust version | |
| run:rustc --version | |
| -name:Setup Node.js | |
| uses:actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e | |
| with: | |
| node-version:"24.11.0" | |
| -name:Install pnpm | |
| run:| | |
| for attempt in 1 2 3; do | |
| if npm install -g pnpm@latest; then | |
| break | |
| fi | |
| if [ $attempt -eq 3 ]; then | |
| echo "Failed to install pnpm after 3 attempts" | |
| exit 1 | |
| fi | |
| sleep $((10 * attempt)) | |
| done | |
| shell:bash | |
| -name:Install uv | |
| uses:astral-sh/setup-uv@ed21f2f24f8dd64503750218de024bcf64c7250a | |
| with: | |
| version:"0.6.17" | |
| -name:Install JS dependencies | |
| run:pnpm install --frozen-lockfile | |
| -name:Set up Helm | |
| uses:azure/setup-helm@b9e51907a09c216f16ebe8536097933489208112# v4.3.0 | |
| with: | |
| version:"3.18.6"# Conservatively match the `Helm 3.2.0+` version specified in examples/production-deployment-k8s-helm/README.md | |
| -name:Install helm-values-schema-json plugin | |
| run:helm plugin install https://github.com/losisin/helm-values-schema-json.git | |
| -name:check‑case‑conflict | |
| run:uv run --with pre-commit pre-commit run check-case-conflict --all-files | |
| -name:end-of-file-fixer | |
| run:uv run --with pre-commit pre-commit run end-of-file-fixer --all-files | |
| -name:check‑executables‑have‑shebangs | |
| run:uv run --with pre-commit pre-commit run check-executables-have-shebangs --all-files | |
| -name:check‑json | |
| run:uv run --with pre-commit pre-commit run check-json --all-files | |
| -name:check‑yaml | |
| run:uv run --with pre-commit pre-commit run check-yaml --all-files | |
| -name:check‑toml | |
| run:uv run --with pre-commit pre-commit run check-toml --all-files | |
| -name:check‑xml | |
| run:uv run --with pre-commit pre-commit run check-xml --all-files | |
| -name:check‑merge‑conflict | |
| run:uv run --with pre-commit pre-commit run check-merge-conflict --all-files | |
| -name:check‑symlinks | |
| run:uv run --with pre-commit pre-commit run check-symlinks --all-files | |
| -name:check‑vcs‑permalinks | |
| run:uv run --with pre-commit pre-commit run check-vcs-permalinks --all-files | |
| -name:detect‑private‑key | |
| run:uv run --with pre-commit pre-commit run detect-private-key --all-files | |
| -name:helm-values-schema | |
| run:uv run --with pre-commit pre-commit run helm-schema --all-files | |
| -name:Check helm schema sync | |
| run:./ci/check-helm-schema-sync.sh | |
| # We don't run these two because we want to allow template files to have trailing whitespace | |
| # TODO: how do we exclude minijinja files using pre-commit in GHA? | |
| # - name: end‑of‑file‑fixer | |
| # run: uv run --with pre-commit pre-commit run end-of-file-fixer | |
| # - name: trailing‑whitespace | |
| # run: uv run --with pre-commit pre-commit run trailing-whitespace | |
| # TODO: Enable this if we can figure out the invocation | |
| # - name: Run nb-clean | |
| # run: uv run --with nb-clean nb-clean check --remove-empty-cells | |
| -name:Install cargo-nextest, cargo-deny, and cargo-hack | |
| uses:taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb | |
| with: | |
| tool:cargo-nextest,cargo-deny,cargo-hack | |
| -name:Run cargo-deny | |
| run:cargo deny check | |
| -name:Run cargo fmt | |
| run:cargo fmt --all --check | |
| -name:"Node.js: Run prettier" | |
| run:pnpm --filter=openai-node run format | |
| -name:"Node.js: OpenAI Client: typecheck" | |
| run:pnpm --filter=openai-node run typecheck | |
| -name:"Node.js: OpenAI Client: lint" | |
| working-directory:clients/openai-node | |
| run:pnpm --filter=openai-node run lint | |
| -name:Lint Helm charts | |
| run:find . -name "Chart.yaml" -exec dirname {} \; | xargs -I {} helm lint {} | |
| clickhouse-tests: | |
| name:"ClickHouse tests (replicated: ${{ matrix.replicated }}) (version: ${{ matrix.clickhouse_version.tag }})" | |
| permissions: | |
| contents:read | |
| # Permission to download artifacts and for rust-cache | |
| actions:read | |
| needs:[build-gateway-container, build-mock-inference-container] | |
| # We don't run many tests here, so use a normal runner with Github Actions caching | |
| # to avoid unnecessarily using Namespace credits (it should still always finish before | |
| # the main 'validate' job) | |
| runs-on:${{ matrix.replicated && 'namespace-profile-tensorzero-8x16;ephemeral-storage.size-multiplier=2' || 'namespace-profile-tensorzero-8x16' }} | |
| continue-on-error:${{ matrix.clickhouse_version.allow_failure }} | |
| # This needs to pull from Docker Hub, so only run in the merge queue, or when running on a PR from the main repository | |
| if:${{ github.event_name == 'merge_group' || (github.event.pull_request.head.repo.full_name == github.repository) }} | |
| strategy: | |
| matrix: | |
| # Only include replicated: true when running in merge queue | |
| replicated:${{ github.event_name == 'merge_group' && fromJSON('[true, false]') || fromJSON('[false]') }} | |
| clickhouse_version:${{ github.event_name == 'merge_group' && fromJSON('[{"tag":"lts","prefix":"lts","allow_failure":false},{"tag":"latest","prefix":"latest","allow_failure":false}]') || fromJSON('[{"tag":"lts","prefix":"lts","allow_failure":false}]') }} | |
| exclude: | |
| -replicated:true | |
| clickhouse_version: | |
| { "tag": "latest", "prefix": "latest", "allow_failure": false } | |
| include:${{ github.event_name == 'merge_group' && fromJSON('[{"clickhouse_version":{"tag":"latest","prefix":"latest","allow_failure":false},"replicated":true}]') || fromJson('[]') }} | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| -name:Install Rust toolchain | |
| run:| | |
| for attempt in 1 2 3; do | |
| if rustup toolchain install stable && rustup default stable; then | |
| break | |
| fi | |
| if [ $attempt -eq 3 ]; then | |
| echo "Failed to install Rust toolchain after 3 attempts" | |
| exit 1 | |
| fi | |
| sleep $((10 * attempt)) | |
| done | |
| shell:bash | |
| -uses:Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 | |
| with: | |
| cache-provider:"buildjet" | |
| shared-key:"build-gateway-cache" | |
| save-if:false | |
| -name:"Check disk space before cleanup" | |
| run:df -h | |
| # ClickHouse intermittently runs out of disk space on the `ubuntu-latest` runner | |
| # Use extreme disk cleanup script that frees up ~26GB of space (11-phase cleanup) | |
| -name:"Free up disk space" | |
| run:./ci/free-disk-space.sh | |
| -name:Install cargo-nextest | |
| uses:taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb | |
| with: | |
| tool:cargo-nextest | |
| -name:Set ClickHouse replicated cluster name | |
| if:matrix.replicated == true | |
| run:echo "TENSORZERO_CLICKHOUSE_CLUSTER_NAME=tensorzero_e2e_tests_cluster" >> $GITHUB_ENV | |
| -name:Setup Namespace caching for Clickhouse fixtures | |
| uses:namespacelabs/nscloud-cache-action@446d8f390563cd54ca27e8de5bdb816f63c0b706 | |
| with: | |
| path:| | |
| ./ui/fixtures/s3-fixtures/ | |
| -name:Install uv | |
| uses:astral-sh/setup-uv@ed21f2f24f8dd64503750218de024bcf64c7250a | |
| with: | |
| version:"0.6.17" | |
| -name:Download container images | |
| run:| | |
| docker pull nscr.io/igvf4asmf8kri/gateway:sha-${{ github.sha }} | |
| docker pull nscr.io/igvf4asmf8kri/mock-inference-provider:sha-${{ github.sha }} | |
| # Retag the images to what we expect the names to be | |
| docker tag nscr.io/igvf4asmf8kri/gateway:sha-${{ github.sha }} tensorzero/gateway:sha-${{ github.sha }} | |
| docker tag nscr.io/igvf4asmf8kri/mock-inference-provider:sha-${{ github.sha }} tensorzero/mock-inference-provider:sha-${{ github.sha }} | |
| continue-on-error:${{ github.event.pull_request.head.repo.full_name != github.repository || github.actor == 'dependabot[bot]' }} | |
| -name:Download ClickHouse fixtures | |
| run:uv run ./ui/fixtures/download-fixtures.py | |
| -name:Set up TENSORZERO_CLICKHOUSE_URL for E2E tests (non-replicated) | |
| if:matrix.replicated == false | |
| run:| | |
| echo "TENSORZERO_CLICKHOUSE_URL=http://chuser:chpassword@localhost:8123/tensorzero_e2e_tests" >> $GITHUB_ENV | |
| -name:Set up TENSORZERO_CLICKHOUSE_URL for E2E tests (replicated) | |
| if:matrix.replicated == true | |
| run:| | |
| # Note: In replicated mode, we use the HTTP port (8124) of the second replica so we can ensure this works | |
| echo "TENSORZERO_CLICKHOUSE_URL=http://chuser:chpassword@localhost:8124/tensorzero_e2e_tests" >> $GITHUB_ENV | |
| -name:Set Postgres database URL for tests | |
| run:| | |
| echo "DATABASE_URL=postgres://postgres:postgres@localhost:5432/tensorzero-e2e-tests" >> $GITHUB_ENV | |
| echo "TENSORZERO_POSTGRES_URL=postgres://postgres:postgres@localhost:5432/tensorzero-e2e-tests" >> $GITHUB_ENV | |
| -name:Set TENSORZERO_GATEWAY_TAG | |
| run:| | |
| echo "TENSORZERO_GATEWAY_TAG=sha-${{ github.sha }}" >> $GITHUB_ENV | |
| -name:Set TENSORZERO_MOCK_INFERENCE_PROVIDER_TAG | |
| run:| | |
| echo "TENSORZERO_MOCK_INFERENCE_PROVIDER_TAG=sha-${{ github.sha }}" >> $GITHUB_ENV | |
| -name:Launch dependency services with non-replicated ClickHouse container for E2E tests | |
| if:matrix.replicated == false | |
| run:TENSORZERO_CLICKHOUSE_VERSION=${{ matrix.clickhouse_version.tag }} docker compose -f tensorzero-core/tests/e2e/docker-compose.yml up --wait | |
| -name:Launch replicated ClickHouse container for E2E tests | |
| if:matrix.replicated == true | |
| run:TENSORZERO_CLICKHOUSE_VERSION=${{ matrix.clickhouse_version.tag }} docker compose -f tensorzero-core/tests/e2e/docker-compose.replicated.yml up --wait | |
| # Make an HTTP request to ClickHouse and check that the version matches '${{ matrix.clickhouse_version }}' | |
| -name:Check ClickHouse version | |
| run:| | |
| CLICKHOUSE_VERSION=$(curl -s "http://localhost:8123/query?user=chuser&password=chpassword" --data-binary "SELECT version()") | |
| echo "Detected ClickHouse version: $CLICKHOUSE_VERSION" | |
| echo "$CLICKHOUSE_VERSION" | grep -q "${{ matrix.clickhouse_version.prefix }}" || echo "WARNING: ClickHouse version does not match expected ${{ matrix.clickhouse_version.prefix }}" | |
| # We run this as a separate step so that we can see live build logs | |
| # (and fail the job immediately if the build fails) | |
| -name:Build the gateway for E2E tests | |
| run:cargo build-e2e | |
| -name:Launch the gateway for E2E tests (not configured for replication) | |
| if:matrix.replicated == false | |
| run:| | |
| cargo run-e2e > e2e_logs.txt 2>&1 & | |
| count=0 | |
| max_attempts=20 | |
| while ! curl http://localhost:3000/health; do | |
| echo "Waiting for gateway to be healthy..." | |
| sleep 1 | |
| count=$((count + 1)) | |
| if [ $count -ge $max_attempts ]; then | |
| echo "Gateway failed to become healthy after $max_attempts attempts" | |
| exit 1 | |
| fi | |
| done | |
| echo "GATEWAY_PID=$!" >> $GITHUB_ENV | |
| -name:Launch the gateway for E2E tests (configured for replication) | |
| if:matrix.replicated == true | |
| env: | |
| TENSORZERO_CLICKHOUSE_URL:http://chuser:chpassword@localhost:8123/tensorzero_e2e_tests | |
| run:| | |
| cargo run-e2e --run-clickhouse-migrations && | |
| cargo run-e2e > e2e_logs.txt 2>&1 & | |
| count=0 | |
| max_attempts=40 | |
| while ! curl http://localhost:3000/health; do | |
| echo "Waiting for gateway to be healthy..." | |
| sleep 1 | |
| count=$((count + 1)) | |
| if [ $count -ge $max_attempts ]; then | |
| echo "Gateway failed to become healthy after $max_attempts attempts" | |
| exit 1 | |
| fi | |
| done | |
| echo "GATEWAY_PID=$!" >> $GITHUB_ENV | |
| -name:Test (Rust) | |
| env: | |
| DATABASE_URL:postgres://postgres:postgres@localhost:5432/tensorzero-e2e-tests | |
| TENSORZERO_POSTGRES_URL:postgres://postgres:postgres@localhost:5432/tensorzero-e2e-tests | |
| run:cargo test-clickhouse | |
| -name:Print docker compose logs (replicated) | |
| if:always() && matrix.replicated == true | |
| run:| | |
| TENSORZERO_CLICKHOUSE_VERSION=${{ matrix.clickhouse_version.tag }} docker compose -f tensorzero-core/tests/e2e/docker-compose.replicated.yml logs -t | |
| -name:Print ClickHouse error logs (replicated) | |
| if:always() && matrix.replicated == true | |
| run:| | |
| echo "Error logs for ClickHouse 01:" | |
| docker exec e2e-clickhouse-01-1 cat /var/log/clickhouse-server/clickhouse-server.err.log | |
| echo "Error logs for ClickHouse 02:" | |
| docker exec e2e-clickhouse-02-1 cat /var/log/clickhouse-server/clickhouse-server.err.log | |
| echo "Error logs for ClickHouse 03:" | |
| docker exec e2e-clickhouse-03-1 cat /var/log/clickhouse-server/clickhouse-server.err.log | |
| -name:Print ClickHouse trace logs (replicated) | |
| if:always() && matrix.replicated == true | |
| run:| | |
| echo "Trace logs for ClickHouse 01:" | |
| docker exec e2e-clickhouse-01-1 cat /var/log/clickhouse-server/clickhouse-server.log | |
| echo "Trace logs for ClickHouse 02:" | |
| docker exec e2e-clickhouse-02-1 cat /var/log/clickhouse-server/clickhouse-server.log | |
| echo "Trace logs for ClickHouse 03:" | |
| docker exec e2e-clickhouse-03-1 cat /var/log/clickhouse-server/clickhouse-server.log | |
| -name:Print container health checks (replicated) | |
| if:always() && matrix.replicated == true | |
| run:| | |
| echo "Health check for ClickHouse 01:" | |
| docker inspect --format "{{json .State.Health }}" $(docker compose -f tensorzero-core/tests/e2e/docker-compose.replicated.yml ps -q clickhouse-01) | jq | |
| echo "Health check for ClickHouse 02:" | |
| docker inspect --format "{{json .State.Health }}" $(docker compose -f tensorzero-core/tests/e2e/docker-compose.replicated.yml ps -q clickhouse-02) | jq | |
| echo "Health check for ClickHouse 03:" | |
| docker inspect --format "{{json .State.Health }}" $(docker compose -f tensorzero-core/tests/e2e/docker-compose.replicated.yml ps -q clickhouse-03) | jq | |
| -name:Print docker compose logs (non-replicated) | |
| if:always() && matrix.replicated == false | |
| run:| | |
| TENSORZERO_CLICKHOUSE_VERSION=${{ matrix.clickhouse_version.tag }} docker compose -f tensorzero-core/tests/e2e/docker-compose.yml logs -t | |
| -name:Print e2e logs | |
| if:always() | |
| run:cat e2e_logs.txt | |
| # Run 'cargo test-optimization' against mock-inference-provider | |
| mock-optimization-tests: | |
| permissions: | |
| contents:read | |
| # Permission for rust-cache | |
| actions:read | |
| runs-on:ubuntu-latest | |
| if:github.repository == 'tensorzero/tensorzero' | |
| env: | |
| OPENAI_API_KEY:not_used | |
| FIREWORKS_API_KEY:not_used | |
| FIREWORKS_ACCOUNT_ID:not_used | |
| TOGETHER_API_KEY:not_used | |
| TENSORZERO_USE_MOCK_INFERENCE_PROVIDER:1 | |
| TENSORZERO_SKIP_LARGE_FIXTURES:1 | |
| R2_ACCESS_KEY_ID:${{ secrets.R2_ACCESS_KEY_ID }} | |
| R2_SECRET_ACCESS_KEY:${{ secrets.R2_SECRET_ACCESS_KEY }} | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| -name:Install Rust toolchain | |
| run:| | |
| for attempt in 1 2 3; do | |
| if rustup toolchain install stable && rustup default stable; then | |
| break | |
| fi | |
| if [ $attempt -eq 3 ]; then | |
| echo "Failed to install Rust toolchain after 3 attempts" | |
| exit 1 | |
| fi | |
| sleep $((10 * attempt)) | |
| done | |
| shell:bash | |
| -uses:Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 | |
| with: | |
| cache-provider:"buildjet" | |
| shared-key:"build-gateway-cache" | |
| save-if:false | |
| -name:Install cargo-nextest | |
| uses:taiki-e/install-action@d12e869b89167df346dd0ff65da342d1fb1202fb | |
| with: | |
| tool:cargo-nextest | |
| -name:Install uv | |
| uses:astral-sh/setup-uv@ed21f2f24f8dd64503750218de024bcf64c7250a | |
| with: | |
| version:"0.6.17" | |
| # Without this, the post-install hook tries to prune and save the cache, but ./ci/free-disk-space.sh already nukes /opt/hostedtoolcache/*, so it fails. | |
| enable-cache:"false" | |
| -name:Download ClickHouse fixtures | |
| run:uv run ./ui/fixtures/download-fixtures.py | |
| -name:Cleanup disk space | |
| run:./ci/free-disk-space.sh | |
| -name:Set up TENSORZERO_CLICKHOUSE_URL for E2E tests | |
| run:| | |
| echo "TENSORZERO_CLICKHOUSE_URL=http://chuser:chpassword@localhost:8123/tensorzero_e2e_tests" >> $GITHUB_ENV | |
| -name:Make sure dependency services are up | |
| run:docker compose -f tensorzero-core/tests/e2e/docker-compose.yml up --wait | |
| -name:Launch the gateway for E2E tests | |
| run:| | |
| cargo run-e2e > e2e_logs.txt 2>&1 & | |
| while ! curl http://localhost:3000/health; do | |
| echo "Waiting for gateway to be healthy..." | |
| sleep 1 | |
| done | |
| -name:Test (Rust) | |
| run:cargo test-optimization-mock | |
| build-gateway-e2e-container: | |
| uses:./.github/workflows/build-gateway-e2e-container.yml | |
| if:(github.repository == 'tensorzero/tensorzero' && github.event_name == 'merge_group') | |
| permissions: | |
| # Permission to checkout the repository | |
| contents:read | |
| # Permission to fetch GitHub OIDC token authentication | |
| id-token:write | |
| secrets: | |
| DOCKERHUB_USERNAME:${{ secrets.DOCKERHUB_USERNAME }} | |
| DOCKERHUB_LIMITED_TOKEN:${{ secrets.DOCKERHUB_LIMITED_TOKEN }} | |
| build-gateway-container: | |
| uses:./.github/workflows/build-gateway-container.yml | |
| permissions: | |
| # Permission to checkout the repository | |
| contents:read | |
| # Permission to fetch GitHub OIDC token authentication | |
| id-token:write | |
| build-ui-container: | |
| uses:./.github/workflows/build-ui-container.yml | |
| permissions: | |
| # Permission to checkout the repository | |
| contents:read | |
| # Permission to fetch GitHub OIDC token authentication | |
| id-token:write | |
| build-mock-inference-container: | |
| uses:./.github/workflows/build-mock-inference-container.yml | |
| permissions: | |
| # Permission to checkout the repository | |
| contents:read | |
| # Permission to fetch GitHub OIDC token authentication | |
| id-token:write | |
| ui-tests: | |
| permissions: | |
| contents:read | |
| actions:read | |
| uses:./.github/workflows/ui-tests.yml | |
| with: | |
| is_merge_group:${{ github.event_name == 'merge_group' }} | |
| needs:[build-gateway-container, build-mock-inference-container] | |
| ui-tests-e2e: | |
| permissions: | |
| contents:read | |
| actions:write | |
| uses:./.github/workflows/ui-tests-e2e.yml | |
| with: | |
| is_merge_group:${{ github.event_name == 'merge_group' }} | |
| needs: | |
| [ | |
| build-gateway-container, | |
| build-ui-container, | |
| build-mock-inference-container, | |
| ] | |
| secrets: | |
| AWS_ACCESS_KEY_ID:${{ secrets.AWS_ACCESS_KEY_ID }} | |
| AWS_SECRET_ACCESS_KEY:${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
| OPENAI_API_KEY:${{ secrets.OPENAI_API_KEY }} | |
| FIREWORKS_ACCOUNT_ID:${{ secrets.FIREWORKS_ACCOUNT_ID }} | |
| FIREWORKS_API_KEY:${{ secrets.FIREWORKS_API_KEY }} | |
| S3_ACCESS_KEY_ID:${{ secrets.AWS_ACCESS_KEY_ID }} | |
| S3_SECRET_ACCESS_KEY:${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
| check-production-docker-container: | |
| permissions: | |
| contents:read | |
| runs-on:ubuntu-latest | |
| if:github.repository == 'tensorzero/tensorzero' && github.event_name == 'merge_group' | |
| steps: | |
| -uses:actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 | |
| -name:Build Docker container for production deployment tests | |
| run:docker build -t tensorzero/gateway -f gateway/Dockerfile . | |
| -name:Launch ClickHouse container for E2E tests | |
| run:| | |
| docker compose -f tensorzero-core/tests/e2e/docker-compose.yml up clickhouse -d --wait | |
| -name:Set up .env file for production deployment tests | |
| run:| | |
| echo "OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}" > examples/production-deployment/.env | |
| echo "TENSORZERO_CLICKHOUSE_URL=http://chuser:chpassword@host.docker.internal:8123/tensorzero" >> examples/production-deployment/.env | |
| echo "TENSORZERO_DISABLE_PSEUDONYMOUS_USAGE_ANALYTICS=1" >> examples/production-deployment/.env | |
| -name:Run docker compose for production deployment tests | |
| run:docker compose -f examples/production-deployment/docker-compose.yml up -d --wait | |
| -name:Run inference for production deployment tests | |
| run:examples/production-deployment/run.sh | |
| -name:Print Docker compose logs | |
| if:always() | |
| run:| | |
| docker compose -f examples/production-deployment/docker-compose.yml logs -t | |
| -name:Take down docker compose for production deployment tests | |
| run:| | |
| docker compose -f examples/production-deployment/docker-compose.yml down | |
| docker compose -f tensorzero-core/tests/e2e/docker-compose.yml down | |
| run-merge-queue-checks: | |
| needs:[build-gateway-e2e-container, build-mock-inference-container] | |
| if:(github.repository == 'tensorzero/tensorzero' && github.event_name == 'merge_group') | |
| permissions: | |
| # Permission to checkout the repository | |
| contents:read | |
| # Permission to fetch GitHub OIDC token authentication | |
| id-token:write | |
| actions:write | |
| uses:./.github/workflows/merge-queue.yml | |
| secrets:inherit | |
| minikube: | |
| needs:[build-gateway-container, build-ui-container] | |
| permissions: | |
| contents:read | |
| uses:./.github/workflows/minikube.yml | |
| mocked-batch-tests: | |
| permissions: | |
| contents:read | |
| actions:read | |
| if:github.repository == 'tensorzero/tensorzero' && github.event_name == 'merge_group' | |
| uses:./.github/workflows/mocked-batch-test.yml | |
| needs:[build-mock-inference-container] | |
| secrets:inherit | |
| # See 'ci/README.md' at the repository root for more details. | |
| check-all-general-jobs-passed: | |
| permissions:{} | |
| if:always() && github.repository == 'tensorzero/tensorzero' | |
| needs: | |
| [ | |
| check-version-consistency, | |
| check-production-docker-container, | |
| check-if-edited-then-edit, | |
| check-docker-compose, | |
| check-latest-docker-compose, | |
| check-python-client-build, | |
| check-node-bindings, | |
| check-python-schemas, | |
| build-windows, | |
| build-ui-container, | |
| build-gateway-container, | |
| build-gateway-e2e-container, | |
| build-mock-inference-container, | |
| mocked-batch-tests, | |
| minikube, | |
| rust-build, | |
| rust-test, | |
| validate, | |
| validate-node, | |
| validate-python, | |
| lint-rust, | |
| clickhouse-tests, | |
| ui-tests, | |
| ui-tests-e2e, | |
| mock-optimization-tests, | |
| run-merge-queue-checks, | |
| ] | |
| runs-on:ubuntu-latest | |
| steps: | |
| -name:Print all job results | |
| run:| | |
| echo "'needs': ${{ toJson(needs) }}" | |
| echo "github.event_name: ${{ github.event_name }}" | |
| # When running in the merge queue, jobs should never be skipped. | |
| # In PR CI, some jobs may be intentionally skipped (e.g. due to running from a fork, or to save money) | |
| -if:${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || (github.event_name == 'merge_group' && contains(needs.*.result, 'skipped')) }} | |
| run:exit 1 |