-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
78 lines (60 loc) · 2.14 KB
/
main.py
File metadata and controls
78 lines (60 loc) · 2.14 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
from logging import error
from typing import List
from dotenv import load_dotenv
from yaka_llm import GeminiModel as BaseModel
from yaka_llm.core import BasePrompt, UserPrompt, ModelPrompt
import os
# --- Configuration ---
load_dotenv()
LLM_API_KEY = os.getenv("LLM_API_KEY")
if not LLM_API_KEY:
error("No LLM_API_KEY found in env")
exit(1)
model = BaseModel(
"gemini-2.5-flash-lite",
api_key=LLM_API_KEY,
)
# --- Tools ---
@model.tool
def list_all_files():
"""lists all file in the current directory"""
if input("LLM wants to list all files, allow ?(y/n)").lower() == "n":
return {"result": "User denied running 'list_all_files' "}
files = str(os.listdir("."))
return {"result": files}
@model.tool
def create_file(filename: str, content: str):
"""Create a file in the current directory with the given filename and content."""
# Permission Gate
if input(f"LLM wants to create file '{filename}', allow? (y/n) ").lower() == "n":
return {"result": "User denied running 'create_file'"}
try:
# 'w' mode creates the file or overwrites it if it already exists
with open(filename, 'w', encoding='utf-8') as f:
f.write(content)
return {
"result": f"Successfully created file: {filename}",
"path": os.path.abspath(filename)
}
except Exception as e:
return {"result": f"Error creating file: {str(e)}"}
def main():
history : List[BasePrompt] = []
while True:
try:
prompt = input("ASK LLM - $ ")
if prompt.lower() in ["/exit", "/quit", "/stop"]:
print("Convo ended")
break
llm_response = model.call(history=history, prompt=prompt)
print(f"LLM : {llm_response}")
if not llm_response:
llm_response = "<LLM didnt say anything>"
history.append(UserPrompt(prompt))
history.append(ModelPrompt(llm_response))
except KeyboardInterrupt:
break
except EOFError:
break
if __name__ == "__main__":
main()