Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit1a60d8f

Browse files
authored
Makeuser_prompt optional (pydantic#1406)
1 parent593ff0e commit1a60d8f

File tree

5 files changed

+104
-14
lines changed

5 files changed

+104
-14
lines changed

‎pydantic_ai_slim/pydantic_ai/_agent_graph.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ class GraphAgentDeps(Generic[DepsT, ResultDataT]):
7979

8080
user_deps:DepsT
8181

82-
prompt:str|Sequence[_messages.UserContent]
82+
prompt:str|Sequence[_messages.UserContent]|None
8383
new_message_index:int
8484

8585
model:models.Model
@@ -124,7 +124,7 @@ def is_agent_node(
124124

125125
@dataclasses.dataclass
126126
classUserPromptNode(AgentNode[DepsT,NodeRunEndT]):
127-
user_prompt:str|Sequence[_messages.UserContent]
127+
user_prompt:str|Sequence[_messages.UserContent]|None
128128

129129
system_prompts:tuple[str, ...]
130130
system_prompt_functions:list[_system_prompt.SystemPromptRunner[DepsT]]
@@ -151,7 +151,7 @@ async def _get_first_message(
151151

152152
asyncdef_prepare_messages(
153153
self,
154-
user_prompt:str|Sequence[_messages.UserContent],
154+
user_prompt:str|Sequence[_messages.UserContent]|None,
155155
message_history:list[_messages.ModelMessage]|None,
156156
run_context:RunContext[DepsT],
157157
)->tuple[list[_messages.ModelMessage],_messages.ModelRequest]:
@@ -166,16 +166,18 @@ async def _prepare_messages(
166166
messages=ctx_messages.messages
167167
ctx_messages.used=True
168168

169+
parts:list[_messages.ModelRequestPart]= []
169170
ifmessage_history:
170171
# Shallow copy messages
171172
messages.extend(message_history)
172173
# Reevaluate any dynamic system prompt parts
173174
awaitself._reevaluate_dynamic_prompts(messages,run_context)
174-
returnmessages,_messages.ModelRequest([_messages.UserPromptPart(user_prompt)])
175175
else:
176-
parts=awaitself._sys_parts(run_context)
176+
parts.extend(awaitself._sys_parts(run_context))
177+
178+
ifuser_promptisnotNone:
177179
parts.append(_messages.UserPromptPart(user_prompt))
178-
returnmessages,_messages.ModelRequest(parts)
180+
returnmessages,_messages.ModelRequest(parts)
179181

180182
asyncdef_reevaluate_dynamic_prompts(
181183
self,messages:list[_messages.ModelMessage],run_context:RunContext[DepsT]

‎pydantic_ai_slim/pydantic_ai/agent.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -242,7 +242,7 @@ def instrument_all(instrument: InstrumentationSettings | bool = True) -> None:
242242
@overload
243243
asyncdefrun(
244244
self,
245-
user_prompt:str|Sequence[_messages.UserContent],
245+
user_prompt:str|Sequence[_messages.UserContent]|None=None,
246246
*,
247247
result_type:None=None,
248248
message_history:list[_messages.ModelMessage]|None=None,
@@ -257,7 +257,7 @@ async def run(
257257
@overload
258258
asyncdefrun(
259259
self,
260-
user_prompt:str|Sequence[_messages.UserContent],
260+
user_prompt:str|Sequence[_messages.UserContent]|None=None,
261261
*,
262262
result_type:type[RunResultDataT],
263263
message_history:list[_messages.ModelMessage]|None=None,
@@ -271,7 +271,7 @@ async def run(
271271

272272
asyncdefrun(
273273
self,
274-
user_prompt:str|Sequence[_messages.UserContent],
274+
user_prompt:str|Sequence[_messages.UserContent]|None=None,
275275
*,
276276
result_type:type[RunResultDataT]|None=None,
277277
message_history:list[_messages.ModelMessage]|None=None,
@@ -335,7 +335,7 @@ async def main():
335335
@asynccontextmanager
336336
asyncdefiter(
337337
self,
338-
user_prompt:str|Sequence[_messages.UserContent],
338+
user_prompt:str|Sequence[_messages.UserContent]|None=None,
339339
*,
340340
result_type:type[RunResultDataT]|None=None,
341341
message_history:list[_messages.ModelMessage]|None=None,
@@ -503,7 +503,7 @@ async def main():
503503
@overload
504504
defrun_sync(
505505
self,
506-
user_prompt:str|Sequence[_messages.UserContent],
506+
user_prompt:str|Sequence[_messages.UserContent]|None=None,
507507
*,
508508
message_history:list[_messages.ModelMessage]|None=None,
509509
model:models.Model|models.KnownModelName|str|None=None,
@@ -517,7 +517,7 @@ def run_sync(
517517
@overload
518518
defrun_sync(
519519
self,
520-
user_prompt:str|Sequence[_messages.UserContent],
520+
user_prompt:str|Sequence[_messages.UserContent]|None=None,
521521
*,
522522
result_type:type[RunResultDataT]|None,
523523
message_history:list[_messages.ModelMessage]|None=None,
@@ -531,7 +531,7 @@ def run_sync(
531531

532532
defrun_sync(
533533
self,
534-
user_prompt:str|Sequence[_messages.UserContent],
534+
user_prompt:str|Sequence[_messages.UserContent]|None=None,
535535
*,
536536
result_type:type[RunResultDataT]|None=None,
537537
message_history:list[_messages.ModelMessage]|None=None,

‎pydantic_ai_slim/pydantic_ai/tools.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ class RunContext(Generic[AgentDepsT]):
4848
"""The model used in this run."""
4949
usage:Usage
5050
"""LLM usage associated with the run."""
51-
prompt:str|Sequence[_messages.UserContent]
51+
prompt:str|Sequence[_messages.UserContent]|None
5252
"""The original user prompt passed to the run."""
5353
messages:list[_messages.ModelMessage]=field(default_factory=list)
5454
"""Messages exchanged in the conversation so far."""
Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
interactions:
2+
-request:
3+
headers:
4+
accept:
5+
-application/json
6+
accept-encoding:
7+
-gzip, deflate
8+
connection:
9+
-keep-alive
10+
content-length:
11+
-'101'
12+
content-type:
13+
-application/json
14+
host:
15+
-api.openai.com
16+
method:POST
17+
parsed_body:
18+
messages:
19+
-content:You are a potato.
20+
role:system
21+
model:o3-mini
22+
n:1
23+
stream:false
24+
uri:https://api.openai.com/v1/chat/completions
25+
response:
26+
headers:
27+
access-control-expose-headers:
28+
-X-Request-ID
29+
alt-svc:
30+
-h3=":443"; ma=86400
31+
connection:
32+
-keep-alive
33+
content-length:
34+
-'906'
35+
content-type:
36+
-application/json
37+
openai-organization:
38+
-pydantic-28gund
39+
openai-processing-ms:
40+
-'8045'
41+
openai-version:
42+
-'2020-10-01'
43+
strict-transport-security:
44+
-max-age=31536000; includeSubDomains; preload
45+
transfer-encoding:
46+
-chunked
47+
parsed_body:
48+
choices:
49+
-finish_reason:stop
50+
index:0
51+
message:
52+
annotations:[]
53+
content:That's right—I am a potato! A spud of many talents, here to help you out. How can this humble potato be
54+
of service today?
55+
refusal:null
56+
role:assistant
57+
created:1744099208
58+
id:chatcmpl-BJyAKqCjJI3mIdQmTSW6UlG6NKpjm
59+
model:o3-mini-2025-01-31
60+
object:chat.completion
61+
service_tier:default
62+
system_fingerprint:fp_617f206dd9
63+
usage:
64+
completion_tokens:809
65+
completion_tokens_details:
66+
accepted_prediction_tokens:0
67+
audio_tokens:0
68+
reasoning_tokens:768
69+
rejected_prediction_tokens:0
70+
prompt_tokens:11
71+
prompt_tokens_details:
72+
audio_tokens:0
73+
cached_tokens:0
74+
total_tokens:820
75+
status:
76+
code:200
77+
message:OK
78+
version:1

‎tests/models/test_openai.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1204,3 +1204,13 @@ class MyModel(BaseModel):
12041204
'$ref':'#/$defs/MyModel',
12051205
}
12061206
)
1207+
1208+
1209+
@pytest.mark.vcr
1210+
asyncdeftest_openai_model_without_system_prompt(allow_model_requests:None,openai_api_key:str):
1211+
m=OpenAIModel('o3-mini',provider=OpenAIProvider(api_key=openai_api_key))
1212+
agent=Agent(m,system_prompt='You are a potato.')
1213+
result=awaitagent.run()
1214+
assertresult.data==snapshot(
1215+
"That's right—I am a potato! A spud of many talents, here to help you out. How can this humble potato be of service today?"
1216+
)

0 commit comments

Comments
 (0)

[8]ページ先頭

©2009-2025 Movatter.jp