forked from microsoft/agent-framework
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathopenai_assistants_basic.py
More file actions
69 lines (52 loc) · 2.26 KB
/
openai_assistants_basic.py
File metadata and controls
69 lines (52 loc) · 2.26 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
# Copyright (c) Microsoft. All rights reserved.
import asyncio
from random import randint
from typing import Annotated
from agent_framework.openai import OpenAIAssistantsClient
from pydantic import Field
"""
OpenAI Assistants Basic Example
This sample demonstrates basic usage of OpenAIAssistantsClient with automatic
assistant lifecycle management, showing both streaming and non-streaming responses.
"""
def get_weather(
location: Annotated[str, Field(description="The location to get the weather for.")],
) -> str:
"""Get the weather for a given location."""
conditions = ["sunny", "cloudy", "rainy", "stormy"]
return f"The weather in {location} is {conditions[randint(0, 3)]} with a high of {randint(10, 30)}°C."
async def non_streaming_example() -> None:
"""Example of non-streaming response (get the complete result at once)."""
print("=== Non-streaming Response Example ===")
# Since no assistant ID is provided, the assistant will be automatically created
# and deleted after getting a response
async with OpenAIAssistantsClient().create_agent(
instructions="You are a helpful weather agent.",
tools=get_weather,
) as agent:
query = "What's the weather like in Seattle?"
print(f"User: {query}")
result = await agent.run(query)
print(f"Agent: {result}\n")
async def streaming_example() -> None:
"""Example of streaming response (get results as they are generated)."""
print("=== Streaming Response Example ===")
# Since no assistant ID is provided, the assistant will be automatically created
# and deleted after getting a response
async with OpenAIAssistantsClient().create_agent(
instructions="You are a helpful weather agent.",
tools=get_weather,
) as agent:
query = "What's the weather like in Portland?"
print(f"User: {query}")
print("Agent: ", end="", flush=True)
async for chunk in agent.run_stream(query):
if chunk.text:
print(chunk.text, end="", flush=True)
print("\n")
async def main() -> None:
print("=== Basic OpenAI Assistants Chat Client Agent Example ===")
await non_streaming_example()
await streaming_example()
if __name__ == "__main__":
asyncio.run(main())