Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

chore: Fix inconsistent docs example output and fix variable name#5834

chore: Fix inconsistent docs example output and fix variable name

chore: Fix inconsistent docs example output and fix variable name #5834

Workflow file for this run

name:CI
on:
push:
branches:
-main
tags:
-"**"
pull_request:{}
env:
COLUMNS:150
UV_PYTHON:3.12
UV_FROZEN:"1"
permissions:
contents:read
jobs:
lint:
runs-on:ubuntu-latest
steps:
-uses:actions/checkout@v4
-uses:astral-sh/setup-uv@v5
with:
enable-cache:true
-name:Install dependencies
run:uv sync --all-extras --all-packages --group lint
-uses:denoland/setup-deno@v2
with:
deno-version:v2.x
-uses:pre-commit/action@v3.0.0
with:
extra_args:--all-files --verbose
env:
SKIP:no-commit-to-branch
-run:uv build --all-packages
-run:ls -lh dist/
# mypy and lint are a bit slower than other jobs, so we run them separately
mypy:
runs-on:ubuntu-latest
steps:
-uses:actions/checkout@v4
-uses:astral-sh/setup-uv@v5
with:
enable-cache:true
-name:Install dependencies
run:uv sync --no-dev --group lint
-run:make typecheck-mypy
docs:
runs-on:ubuntu-latest
steps:
-uses:actions/checkout@v4
-uses:astral-sh/setup-uv@v5
with:
enable-cache:true
-run:uv sync --group docs
# always build docs to check it works without insiders packages
-run:make docs
-run:make docs-insiders
if:github.event.pull_request.head.repo.full_name == github.repository || github.ref == 'refs/heads/main'
env:
PPPR_TOKEN:${{ secrets.PPPR_TOKEN }}
-run:tree -sh site
-uses:actions/setup-node@v4
-run:npm install
working-directory:docs-site
-run:npm run typecheck
working-directory:docs-site
-name:Store docs
uses:actions/upload-artifact@v4
with:
name:site
path:site
# check all docs images are tinified, You'll need an API key from https://tinify.com/ to fix this if it fails
-run:uvx tinicly docs --check
test-live:
runs-on:ubuntu-latest
timeout-minutes:5
if:github.event.pull_request.head.repo.full_name == github.repository || github.event_name == 'push'
steps:
-uses:actions/checkout@v4
-uses:astral-sh/setup-uv@v5
with:
enable-cache:true
-uses:pydantic/ollama-action@v3
with:
model:qwen2:0.5b
-run:>
uv run
--package pydantic-ai-slim
--extra openai
--extra vertexai
--extra google
--extra groq
--extra anthropic
--extra mistral
--extra cohere
pytest tests/test_live.py -v
--durations=100
env:
PYDANTIC_AI_LIVE_TEST_DANGEROUS: "CHARGE-ME!"
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }}
GOOGLE_SERVICE_ACCOUNT_CONTENT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_CONTENT }}
GROQ_API_KEY: ${{ secrets.GROQ_API_KEY }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
CO_API_KEY: ${{ secrets.COHERE_API_KEY }}
test:
name:test on ${{ matrix.python-version }}
runs-on:ubuntu-latest
timeout-minutes:10
strategy:
fail-fast:false
matrix:
python-version:["3.9", "3.10", "3.11", "3.12", "3.13"]
env:
UV_PYTHON:${{ matrix.python-version }}
CI:true
steps:
-uses:actions/checkout@v4
-uses:astral-sh/setup-uv@v5
with:
enable-cache:true
-uses:denoland/setup-deno@v2
with:
deno-version:v2.x
-run:mkdir coverage
# run tests with just `pydantic-ai-slim` dependencies
-run:uv run --package pydantic-ai-slim coverage run -m pytest
env:
COVERAGE_FILE:coverage/.coverage.${{ runner.os }}-py${{ matrix.python-version }}-slim
-run:uv run coverage run -m pytest
env:
COVERAGE_FILE:coverage/.coverage.${{ runner.os }}-py${{ matrix.python-version }}-standard
-run:uv run --all-extras coverage run -m pytest
env:
COVERAGE_FILE:coverage/.coverage.${{ runner.os }}-py${{ matrix.python-version }}-all-extras
-run:uv run --all-extras python tests/import_examples.py
# this must run last as it modifies the environment!
-name:test lowest versions
if:matrix.python-version != '3.9'
run:|
unset UV_FROZEN
uv run --all-extras --resolution lowest-direct coverage run -m pytest
env:
COVERAGE_FILE:coverage/.coverage.${{ runner.os }}-py${{ matrix.python-version }}-lowest-versions
-name:store coverage files
uses:actions/upload-artifact@v4
with:
name:coverage-${{ matrix.python-version }}
path:coverage
include-hidden-files:true
coverage:
runs-on:ubuntu-latest
needs:[test]
steps:
-uses:actions/checkout@v4
with:
# needed for diff-cover
fetch-depth:0
-name:get coverage files
uses:actions/download-artifact@v4
with:
merge-multiple:true
path:coverage
-uses:astral-sh/setup-uv@v5
with:
enable-cache:true
-run:uv sync --package pydantic-ai-slim --only-dev
-run:rm coverage/.coverage.*-py3.9-*# Exclude 3.9 coverage as it gets the wrong line numbers, causing invalid failures.
-run:uv run coverage combine coverage
-run:uv run coverage html --show-contexts --title "Pydantic AI coverage for ${{ github.sha }}"
-name:Store coverage html
uses:actions/upload-artifact@v4
with:
name:coverage-html
path:htmlcov
include-hidden-files:true
-run:uv run coverage xml
-run:uv run diff-cover coverage.xml --html-report index.html
-name:Store diff coverage html
uses:actions/upload-artifact@v4
with:
name:diff-coverage-html
path:index.html
-run:uv run coverage report --fail-under 100
-run:uv run diff-cover coverage.xml --fail-under 100
-run:uv run strict-no-cover
test-mcp-run-python:
runs-on:ubuntu-latest
timeout-minutes:5
env:
UV_PYTHON:"3.12"
steps:
-uses:actions/checkout@v4
-uses:astral-sh/setup-uv@v5
with:
enable-cache:true
-uses:denoland/setup-deno@v2
with:
deno-version:v2.x
-run:make lint-js
-run:uv run --package mcp-run-python pytest mcp-run-python -v --durations=100
-run:deno task dev warmup
working-directory:mcp-run-python
# https://github.com/marketplace/actions/alls-green#why used for branch protection checks
check:
if:always()
needs:[lint, mypy, docs, test-live, test, coverage, test-mcp-run-python]
runs-on:ubuntu-latest
steps:
-name:Decide whether the needed jobs succeeded or failed
uses:re-actors/alls-green@release/v1
with:
jobs:${{ toJSON(needs) }}
allowed-skips:test-live
deploy-docs:
needs:[check]
if:success() && startsWith(github.ref, 'refs/tags/')
runs-on:ubuntu-latest
environment:
name:deploy-docs
url:https://ai.pydantic.dev
steps:
-uses:actions/checkout@v4
-uses:actions/setup-node@v4
-run:npm install
working-directory:docs-site
-uses:astral-sh/setup-uv@v5
with:
enable-cache:true
-uses:actions/download-artifact@v4
with:
name:site
path:site
-uses:cloudflare/wrangler-action@v3
with:
apiToken:${{ secrets.CLOUDFLARE_API_TOKEN }}
workingDirectory:docs-site
command:>
deploy
--var GIT_COMMIT_SHA:${{ github.sha }}
--var GIT_BRANCH:main
-run:uv sync --group docs-upload
-run:uv run python docs/.hooks/algolia.py upload
env:
ALGOLIA_WRITE_API_KEY:${{ secrets.ALGOLIA_WRITE_API_KEY }}
deploy-docs-preview:
needs:[check]
if:success() && github.ref == 'refs/heads/main'
runs-on:ubuntu-latest
environment:
name:deploy-docs-preview
permissions:
deployments:write
statuses:write
steps:
-uses:actions/checkout@v4
-uses:actions/setup-node@v4
-run:npm install
working-directory:docs-site
-uses:astral-sh/setup-uv@v5
with:
enable-cache:true
-uses:actions/download-artifact@v4
with:
name:site
path:site
-uses:cloudflare/wrangler-action@v3
id:deploy
with:
apiToken:${{ secrets.CLOUDFLARE_API_TOKEN }}
environment:previews
workingDirectory:docs-site
command:>
deploy
--var GIT_COMMIT_SHA:${{ github.sha }}
--var GIT_BRANCH:main
-name:Set preview URL
run:uv run --no-project --with httpx .github/set_docs_main_preview_url.py
env:
DEPLOY_OUTPUT:${{ steps.deploy.outputs.command-output }}
GITHUB_TOKEN:${{ secrets.GITHUB_TOKEN }}
REPOSITORY:${{ github.repository }}
REF:${{ github.sha }}
# TODO(Marcelo): We need to split this into two jobs: `build` and `release`.
release:
needs:[check]
if:success() && startsWith(github.ref, 'refs/tags/')
runs-on:ubuntu-latest
environment:
name:release
url:https://pypi.org/project/pydantic-ai/${{ steps.inspect_package.outputs.version }}
permissions:
id-token:write
outputs:
package-version:${{ steps.inspect_package.outputs.version }}
steps:
-uses:actions/checkout@v4
-uses:astral-sh/setup-uv@v5
with:
enable-cache:true
-run:uv build --all-packages
-name:Inspect package version
id:inspect_package
run:|
uv tool install --with uv-dynamic-versioning hatchling
version=$(uvx hatchling version)
echo "version=$version" >> "$GITHUB_OUTPUT"
-name:Publish to PyPI
uses:pypa/gh-action-pypi-publish@release/v1
with:
skip-existing:true
send-tweet:
name:Send tweet
needs:[release]
if:needs.release.result == 'success'
runs-on:ubuntu-latest
steps:
-uses:actions/setup-python@v5
with:
python-version:"3.12"
-name:Install dependencies
run:pip install tweepy==4.14.0
-name:Send tweet
shell:python
run:|
import os
import tweepy
client = tweepy.Client(
access_token=os.getenv("TWITTER_ACCESS_TOKEN"),
access_token_secret=os.getenv("TWITTER_ACCESS_TOKEN_SECRET"),
consumer_key=os.getenv("TWITTER_CONSUMER_KEY"),
consumer_secret=os.getenv("TWITTER_CONSUMER_SECRET"),
)
version = os.getenv("VERSION").strip('"')
tweet = os.getenv("TWEET").format(version=version)
client.create_tweet(text=tweet)
env:
VERSION:${{ needs.release.outputs.package-version }}
TWEET:|
Pydantic AI version {version} is out! 🎉
https://github.com/pydantic/pydantic-ai/releases/tag/v{version}
TWITTER_CONSUMER_KEY:${{ secrets.TWITTER_CONSUMER_KEY }}
TWITTER_CONSUMER_SECRET:${{ secrets.TWITTER_CONSUMER_SECRET }}
TWITTER_ACCESS_TOKEN:${{ secrets.TWITTER_ACCESS_TOKEN }}
TWITTER_ACCESS_TOKEN_SECRET:${{ secrets.TWITTER_ACCESS_TOKEN_SECRET }}

[8]ページ先頭

©2009-2025 Movatter.jp