diff --git a/01-hello/src/entry.py b/01-hello/src/entry.py index abf34ca..7f5eadd 100644 --- a/01-hello/src/entry.py +++ b/01-hello/src/entry.py @@ -1,4 +1,5 @@ -from workers import Response +from workers import handler, Response +@handler async def on_fetch(request, env): return Response("Hello world!") diff --git a/02-binding/src/entry.py b/02-binding/src/entry.py index bf8f081..1baf0cb 100644 --- a/02-binding/src/entry.py +++ b/02-binding/src/entry.py @@ -1,5 +1,6 @@ -from workers import Response +from workers import handler, Response +@handler async def on_fetch(request, env): await env.FOO.put("bar", "baz") bar = await env.FOO.get("bar") diff --git a/04-langchain/src/worker.py b/04-langchain/src/worker.py index 710b281..3a029db 100644 --- a/04-langchain/src/worker.py +++ b/04-langchain/src/worker.py @@ -1,7 +1,8 @@ -from workers import Response +from workers import handler, Response from langchain_core.prompts import PromptTemplate from langchain_openai import OpenAI +@handler async def on_fetch(request, env): prompt = PromptTemplate.from_template("Complete the following sentence: I am a {profession} and ") llm = OpenAI(api_key=env.API_KEY) diff --git a/05-query-d1/src/entry.py b/05-query-d1/src/entry.py index 19120de..ade57bd 100644 --- a/05-query-d1/src/entry.py +++ b/05-query-d1/src/entry.py @@ -1,5 +1,6 @@ -from workers import Response +from workers import handler, Response +@handler async def on_fetch(request, env): query = """ SELECT quote, author @@ -8,7 +9,7 @@ async def on_fetch(request, env): LIMIT 1; """ results = await env.DB.prepare(query).all() - data = results.results[0] + data = results.results[0] # Return a JSON response return Response.json(data)