File tree Expand file tree Collapse file tree 7 files changed +725
-1
lines changed Expand file tree Collapse file tree 7 files changed +725
-1
lines changed Original file line number Diff line number Diff line change
1
+ NOTE: 参见 [ agent_ref/doc]
2
+
3
+ ## 更新到 0.1.13
4
+ 1 . 支持 LiteLLM 模型 (src/agents/extensions/models/litellm_model.py)
5
+ 2 . 支持 multi_provider (src/agents/models/multi_provider.py)
6
+ 3 . 支持 extra_headers 参数 (src/agents/model_settings.py)
7
+ 4 . 支持非严格JSON输出 (src/agents/agent_output.py)
8
+ 5 . 分离 Converter (src/agents/models/chatcmpl_converter.py)
9
+ 6 . 分离 ChatCmlpStreamHandler (src/agents/models/chatcmpl_stream_handler.py)
10
+ 7 . 对于 RunResultStreaming 增加 ` cancel ` 方法 (src/agents/result.py)
11
+
1
12
## RULES
2
13
1 . 透明封装?
3
14
2 . 变更: 将修改点记录到 OVERWRITE.md
21
32
3 . 通过git submodule将SDK引入应用仓库 (pip install -e ./lib/openai-agents)
22
33
4 . 定期执行上游同步:
23
34
``` sh
24
- git remote add upstream [原仓库URL]
35
+ git remote add upstream https://github.com/openai/openai-agents-python
25
36
git fetch upstream
26
37
git merge upstream/main
27
38
```
Load Diff Large diffs are not rendered by default.
Original file line number Diff line number Diff line change
1
+ # merge upstream changes (upstream/main -> eason)
2
+ git remote add upstream https://github.com/openai/openai-agents-python
3
+ git fetch upstream
4
+ git merge upstream/main
5
+ # push changes to eason branch
6
+ git push origin eason
Original file line number Diff line number Diff line change
1
+ from phoenix .otel import register
2
+
3
+ # configure the Phoenix tracer
4
+ tracer_provider = register (
5
+ protocol = "grpc" , # "http/protobuf",
6
+ project_name = "default" ,
7
+ batch = True ,
8
+ auto_instrument = True ,
9
+ endpoint = "http://9.134.230.111:4317" # 6006"
10
+ )
11
+ tracer = tracer_provider .get_tracer (__name__ )
12
+
13
+ @tracer .chain
14
+ def my_func (input : str ) -> str :
15
+ return "output"
16
+
17
+ my_func ("input" )
18
+
19
+ # Add OpenAI API Key
20
+ import openai
21
+
22
+ client = openai .OpenAI ()
23
+ response = client .chat .completions .create (
24
+ model = "gpt-4o" ,
25
+ messages = [{"role" : "user" , "content" : "Write a haiku." }],
26
+ )
27
+ print (response .choices [0 ].message .content )
Original file line number Diff line number Diff line change
1
+ """
2
+ OpenAI
3
+ - ChatCompletion
4
+ - Responses
5
+
6
+ pip install openinference-instrumentation-openai
7
+ """
8
+ from openinference .instrumentation .openai import OpenAIInstrumentor
9
+ from phoenix .otel import register
10
+
11
+ tracer_provider = register (
12
+ protocol = "grpc" , # "http/protobuf",
13
+ project_name = "default" ,
14
+ batch = True ,
15
+ # auto_instrument=True,
16
+ endpoint = "http://9.134.230.111:4317" # 6006"
17
+ )
18
+ OpenAIInstrumentor ().instrument (tracer_provider = tracer_provider )
19
+
20
+
21
+ import openai
22
+ client = openai .OpenAI ()
23
+ tools = [{
24
+ "type" : "function" ,
25
+ "name" : "get_weather" ,
26
+ "description" : "Get current temperature for a given location." ,
27
+ "parameters" : {
28
+ "type" : "object" ,
29
+ "properties" : {
30
+ "location" : {
31
+ "type" : "string" ,
32
+ "description" : "City and country e.g. Bogotá, Colombia"
33
+ }
34
+ },
35
+ "required" : [
36
+ "location"
37
+ ],
38
+ "additionalProperties" : False
39
+ }
40
+ }]
41
+ response = client .responses .create (
42
+ model = "gpt-4o" ,
43
+ input = [{"role" : "user" , "content" : "What is the weather like in Paris today?" }],
44
+ tools = tools
45
+ )
46
+ print (response )
47
+
48
+
49
+ tools = [{
50
+ "type" : "function" ,
51
+ "function" : {
52
+ "name" : "get_weather" ,
53
+ "description" : "Get current temperature for a given location." ,
54
+ "parameters" : {
55
+ "type" : "object" ,
56
+ "properties" : {
57
+ "location" : {
58
+ "type" : "string" ,
59
+ "description" : "City and country e.g. Bogotá, Colombia"
60
+ }
61
+ },
62
+ "required" : [
63
+ "location"
64
+ ],
65
+ "additionalProperties" : False
66
+ },
67
+ "strict" : True
68
+ }
69
+ }]
70
+ response = client .chat .completions .create (
71
+ model = "gpt-4o" ,
72
+ messages = [{"role" : "user" , "content" : "What is the weather like in Paris today?" }],
73
+ tools = tools
74
+ )
75
+ print (response )
Original file line number Diff line number Diff line change
1
+ """
2
+ pip install openinference-instrumentation-openai-agents openai-agents
3
+ """
4
+ from openinference .instrumentation .openai_agents import OpenAIAgentsInstrumentor
5
+ from phoenix .otel import register
6
+
7
+ tracer_provider = register (
8
+ protocol = "grpc" , # "http/protobuf",
9
+ project_name = "default" ,
10
+ batch = True ,
11
+ # auto_instrument=True,
12
+ endpoint = "http://9.134.230.111:4317" # 6006"
13
+ )
14
+ OpenAIAgentsInstrumentor ().instrument (tracer_provider = tracer_provider )
15
+
16
+ from agents import Agent , Runner , function_tool
17
+ @function_tool
18
+ def get_weather (city : str ) -> str :
19
+ """Get current temperature for a given location."""
20
+ return f"The weather in { city } is sunny."
21
+
22
+ agent = Agent (name = "Assistant" , instructions = "You are a helpful assistant" , tools = [get_weather ])
23
+ result = Runner .run_sync (agent , "What is the weather like in Paris today?" )
24
+ print (result .final_output )
Original file line number Diff line number Diff line change
1
+ """
2
+ pip install openinference-instrumentation-openai-agents openai-agents
3
+ """
4
+ # NOTE: 这里强制用 OpenAIInstrumentor 而非 Agents 的也可以!
5
+ from openinference .instrumentation .openai import OpenAIInstrumentor
6
+ from phoenix .otel import register
7
+
8
+ tracer_provider = register (
9
+ protocol = "grpc" , # "http/protobuf",
10
+ project_name = "default" ,
11
+ batch = True ,
12
+ # auto_instrument=True,
13
+ endpoint = "http://9.134.230.111:4317" # 6006"
14
+ )
15
+ OpenAIInstrumentor ().instrument (tracer_provider = tracer_provider )
16
+
17
+ from agents import Agent , Runner , function_tool
18
+ @function_tool
19
+ def get_weather (city : str ) -> str :
20
+ """Get current temperature for a given location."""
21
+ return f"The weather in { city } is sunny."
22
+
23
+ agent = Agent (name = "Assistant" , instructions = "You are a helpful assistant" , tools = [get_weather ])
24
+ result = Runner .run_sync (agent , "What is the weather like in Paris today?" )
25
+ print (result .final_output )
You can’t perform that action at this time.
0 commit comments