forked from i-am-bee/beeai-framework
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmulti_agents_simple.py
More file actions
85 lines (75 loc) · 2.96 KB
/
multi_agents_simple.py
File metadata and controls
85 lines (75 loc) · 2.96 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
import asyncio
import sys
import traceback
from beeai_framework.backend import ChatModel
from beeai_framework.emitter import EmitterOptions
from beeai_framework.errors import FrameworkError
from beeai_framework.tools.search.wikipedia import WikipediaTool
from beeai_framework.tools.weather import OpenMeteoTool
from beeai_framework.workflows.agent import AgentWorkflow, AgentWorkflowInput
from examples.helpers.io import ConsoleReader
async def main() -> None:
llm = ChatModel.from_name("ollama:llama3.1")
workflow = AgentWorkflow(name="Smart assistant")
reader = ConsoleReader()
workflow.add_agent(
name="Researcher",
role="A diligent researcher",
instructions="You look up and provide information about a specific topic.",
tools=[WikipediaTool()],
llm=llm,
)
workflow.add_agent(
name="WeatherForecaster",
role="A weather reporter",
instructions="You provide detailed weather reports.",
tools=[OpenMeteoTool()],
llm=llm,
)
workflow.add_agent(
name="DataSynthesizer",
role="A meticulous and creative data synthesizer",
instructions="You can combine disparate information into a final coherent summary.",
llm=llm,
)
location = "Saint-Tropez"
await (
workflow.run(
inputs=[
AgentWorkflowInput(
prompt=f"Provide a short history of {location}.",
),
AgentWorkflowInput(
prompt=f"Provide a comprehensive weather summary for {location} today.",
expected_output="Essential weather details such as chance of rain, temperature and wind. Only report information that is available.", # noqa: E501
),
AgentWorkflowInput(
prompt=f"Summarize the historical and weather data for {location}.",
expected_output=f"A paragraph that describes the history of {location}, followed by the current weather conditions.", # noqa: E501
),
]
)
.on(
# Event Matcher -> match agent's 'success' events
lambda event: isinstance(event.creator, ChatModel) and event.name == "success",
# log data to the console
lambda data, event: reader.write(
"->Got response from the LLM",
" \n->".join([str(message.content[0].model_dump()) for message in data.value.messages]),
),
EmitterOptions(match_nested=True),
)
.on(
"success",
lambda data, event: reader.write(
f"->Step '{data.step}' has been completed with the following outcome.\n\n{data.state.final_answer}\n\n",
data.model_dump(exclude={"data"}),
),
)
)
if __name__ == "__main__":
try:
asyncio.run(main())
except FrameworkError as e:
traceback.print_exc()
sys.exit(e.explain())