forked from i-am-bee/beeai-framework
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathchat_stream.py
More file actions
32 lines (24 loc) · 950 Bytes
/
chat_stream.py
File metadata and controls
32 lines (24 loc) · 950 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import asyncio
import sys
import traceback
from beeai_framework.adapters.ollama import OllamaChatModel
from beeai_framework.backend import UserMessage
from beeai_framework.errors import FrameworkError
from examples.helpers.io import ConsoleReader
async def main() -> None:
llm = OllamaChatModel("llama3.1")
reader = ConsoleReader()
for prompt in reader:
response = await llm.create(messages=[UserMessage(prompt)]).observe(
lambda emitter: emitter.match(
"*", lambda data, event: reader.write(f"LLM 🤖 (event: {event.name})", str(data))
)
)
reader.write("LLM 🤖 (txt) : ", response.get_text_content())
reader.write("LLM 🤖 (raw) : ", "\n".join([str(msg.to_plain()) for msg in response.messages]))
if __name__ == "__main__":
try:
asyncio.run(main())
except FrameworkError as e:
traceback.print_exc()
sys.exit(e.explain())