forked from Eyuvaraj/FetchAI-Hackathon
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathquestionnaire_agent.py
More file actions
93 lines (68 loc) · 2.95 KB
/
questionnaire_agent.py
File metadata and controls
93 lines (68 loc) · 2.95 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
from uagents import Agent, Context, Model
import ast
from openai import OpenAI
import re
OPENAI_KEY = open(".env", "r").read().strip()
client = OpenAI(api_key=OPENAI_KEY)
def message(role, content):
return {
"role": role,
"content": content
}
def chat(messages):
completion = client.chat.completions.create(
model="gpt-3.5-turbo-0125",
messages=messages
)
response = completion.choices[0].message.content
return response
class TestRequest(Model):
message: str
class Response(Model):
text: str
QuestionnaireAgent = Agent(
name="QuestionnaireAgent",
seed="questionnaire-Agent-Seed",
port=8001,
endpoint="http://localhost:8001/submit_input",
)
@QuestionnaireAgent.on_event("startup")
async def startup(ctx: Context):
ctx.logger.info(f"Starting up {QuestionnaireAgent.name}")
ctx.logger.info(f"With address: {QuestionnaireAgent.address}")
ctx.logger.info(f"And wallet address: {QuestionnaireAgent.wallet.address()}")
@QuestionnaireAgent.on_query(model=TestRequest, replies={Response})
async def query_handler(ctx: Context, sender: str, _query: TestRequest):
ctx.logger.info("Query received")
ctx.logger.info(f"Message: {_query.message}")
ctx.logger.info(f"Sender: {sender}")
output_struct = {"questions":["list of questions about the 3D model to be built, along with sample options"]}
try:
chat_msgs = []
SYSTEM_PROMPT = f"""You're a 3D graphic designer & planner who specializes in creating a minimalist and simple 3D models schema based on descriptions of objects. Do not assume anything, you have to ask questions about the model, like how it has to be, its approx dimensions, texture, features and other user customizations too. return the response in this json format: {output_struct} only!, don't include any introductory/ending text. 4-5 questions should be enough."""
chat_msgs.append(message("system", SYSTEM_PROMPT))
chat_msgs.append(message("user", _query.message))
response = chat(chat_msgs)
print(response)
try:
if re.search(r"```json\n", response):
response = re.sub(r"```json\n", "", response)
elif re.search("```json", response):
response = re.sub("```json", "", response)
if re.search(r"\n```", response):
response = re.sub(r"\n```", response)
elif re.search("```", response):
response = re.sub("```", "", response)
qu = ast.literal_eval(response)
qu = [q for q in qu["questions"]]
for q in qu:
ctx.logger.info(q)
await ctx.send(sender, Response(text=str(qu)))
except Exception as e:
ctx.logger.error(e)
await ctx.send(sender, Response(text="fail"))
except Exception as e:
ctx.logger.error(e)
await ctx.send(sender, Response(text="fail"))
if __name__ == "__main__":
QuestionnaireAgent.run()