Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 54 additions & 7 deletions frameworks/fastpysgi-asgi/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import multiprocessing
import zlib
import sqlite3
import mimetypes
from urllib.parse import parse_qs

import orjson
Expand All @@ -14,6 +15,39 @@
# -- Dataset and constants --------------------------------------------------------

CPU_COUNT = int(multiprocessing.cpu_count())
WRK_COUNT = min(len(os.sched_getaffinity(0)), 128)

MIME_TYPES = {
'.css' : 'text/css',
'.js' : 'application/javascript',
'.html' : 'text/html',
'.woff2': 'font/woff2',
'.svg' : 'image/svg+xml',
'.webp' : 'image/webp',
'.json' : 'application/json',
}
STATIC_DIR = '/data/static/'
STATIC_FILES = { }
def load_static_files():
global STATIC_FILES, STATIC_DIR, MIME_TYPES
for root, dirs, files in os.walk(STATIC_DIR):
for filename in files:
full_path = os.path.join(root, filename)
key = full_path.replace(os.sep, '/')
try:
with open(full_path, 'rb') as file:
data = file.read()
except Exception as e:
continue
ext = os.path.splitext(filename)[1]
content_type = MIME_TYPES.get(ext)
if content_type is None:
content_type, encoding = mimetypes.guess_type(full_path)
if content_type is None:
content_type = 'application/octet-stream'
STATIC_FILES[key] = (data, content_type.encode())

load_static_files()

DB_PATH = "/data/benchmark.db"
DB_AVAILABLE = os.path.exists(DB_PATH)
Expand Down Expand Up @@ -57,7 +91,7 @@
except Exception:
pass

# -- SQLite (thread-local, sync — runs in threadpool via run_in_executor) --
# -- SQLite (thread-local) --------------------------------------------------

_local = threading.local()

Expand All @@ -74,7 +108,7 @@ def _get_db() -> sqlite3.Connection:
# -- Postgres DB ------------------------------------------------------------

PG_POOL_MIN_SIZE = 2
PG_POOL_MAX_SIZE = 3
PG_POOL_MAX_SIZE = 2

class NoResetConnection(asyncpg.Connection):
__slots__ = ()
Expand All @@ -91,7 +125,7 @@ async def db_close():
DATABASE_POOL = None

async def db_setup():
global DATABASE_POOL, DATABASE_URL, CPU_COUNT
global DATABASE_POOL, DATABASE_URL, WRK_COUNT
await db_close()
max_pool_size = 0
'''
Expand All @@ -107,7 +141,7 @@ async def db_setup():
pass
if not max_connections:
return
max_pool_size = int(max_connections * 0.87 / CPU_COUNT) + 1
max_pool_size = int(max_connections * 0.87 / WRK_COUNT) + 1
'''
try:
DATABASE_POOL = await asyncpg.create_pool(
Expand Down Expand Up @@ -254,6 +288,16 @@ async def async_db_endpoint(scope, receive, send):
]
return json_resp( { "items": items, "count": len(items) } )

async def static_file_endpoint(scope, receive, send):
global STATIC_FILES, STATIC_DIR
path = scope['path']
filename = STATIC_DIR + path.removeprefix('/static/')
entry = STATIC_FILES.get(filename)
if entry is None:
return text_resp(b'Not found', status = 404)
data, ct = entry
return 200, [[ b'Content-Type', ct ]], data

async def upload_endpoint(scope, receive, send):
size = 0
while True:
Expand Down Expand Up @@ -306,7 +350,10 @@ async def app(scope, receive, send):
await send( { 'type': 'http.response.body', 'body': b'Method Not Allowed', 'more_body': False } )
return
path = scope['path']
app_handler = ROUTES.get(path, handle_404)
if path.startswith('/static/'):
app_handler = static_file_endpoint
else:
app_handler = ROUTES.get(path, handle_404)
status, headers, body = await app_handler(scope, receive, None)
await send( { 'type': 'http.response.start', 'status': status, 'headers': headers } )
await send( { 'type': 'http.response.body', 'body': body, 'more_body': False } )
Expand All @@ -320,6 +367,6 @@ async def app(scope, receive, send):
port = 8080

fastpysgi.server.read_buffer_size = 256*1024
fastpysgi.server.backlog = 4096
fastpysgi.server.backlog = 16*1024
fastpysgi.server.loop_timeout = 1
fastpysgi.run(app, host, port, workers = CPU_COUNT, loglevel = 0)
fastpysgi.run(app, host, port, workers = WRK_COUNT, loglevel = 0)
4 changes: 2 additions & 2 deletions frameworks/fastpysgi-asgi/meta.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,13 @@
"enabled": true,
"tests": [
"baseline",
"pipelined",
"noisy",
"limited-conn",
"json",
"upload",
"compression",
"mixed",
"async-db"
"async-db",
"static"
]
}
59 changes: 53 additions & 6 deletions frameworks/fastpysgi/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import multiprocessing
import zlib
import sqlite3
import mimetypes
from urllib.parse import parse_qs

import orjson
Expand All @@ -14,6 +15,39 @@
# -- Dataset and constants --------------------------------------------------------

CPU_COUNT = int(multiprocessing.cpu_count())
WRK_COUNT = min(len(os.sched_getaffinity(0)), 128)

MIME_TYPES = {
'.css' : 'text/css',
'.js' : 'application/javascript',
'.html' : 'text/html',
'.woff2': 'font/woff2',
'.svg' : 'image/svg+xml',
'.webp' : 'image/webp',
'.json' : 'application/json',
}
STATIC_DIR = '/data/static/'
STATIC_FILES = { }
def load_static_files():
global STATIC_FILES, STATIC_DIR, MIME_TYPES
for root, dirs, files in os.walk(STATIC_DIR):
for filename in files:
full_path = os.path.join(root, filename)
key = full_path.replace(os.sep, '/')
try:
with open(full_path, 'rb') as file:
data = file.read()
except Exception as e:
continue
ext = os.path.splitext(filename)[1]
content_type = MIME_TYPES.get(ext)
if content_type is None:
content_type, encoding = mimetypes.guess_type(full_path)
if content_type is None:
content_type = 'application/octet-stream'
STATIC_FILES[key] = (data, content_type)

load_static_files()

DB_PATH = "/data/benchmark.db"
DB_AVAILABLE = os.path.exists(DB_PATH)
Expand Down Expand Up @@ -56,7 +90,7 @@
except Exception:
pass

# -- SQLite (thread-local, sync — runs in threadpool via run_in_executor) --
# -- SQLite (thread-local) --------------------------------------------------

_local = threading.local()

Expand Down Expand Up @@ -85,7 +119,7 @@ def db_close():
DATABASE_POOL = None

def db_setup():
global DATABASE_POOL, DATABASE_URL, CPU_COUNT
global DATABASE_POOL, DATABASE_URL, WRK_COUNT
db_close()
max_pool_size = 0
try:
Expand Down Expand Up @@ -230,6 +264,16 @@ def async_db_endpoint(env):
except Exception:
return json_resp( { "items": [ ], "count": 0 } )

def static_file_endpoint(env):
global STATIC_FILES, STATIC_DIR
path = env["PATH_INFO"]
filename = STATIC_DIR + path.removeprefix('/static/')
entry = STATIC_FILES.get(filename)
if entry is None:
return text_resp(b'Not found', status = 404)
data, ct = entry
return 200, [ ('Content-Type', ct) ], data


READ_BUF_SIZE = 256*1024

Expand Down Expand Up @@ -280,8 +324,11 @@ def app(env, start_response):
if req_method not in [ 'GET', 'POST' ]:
status, headers, body = handle_405(env)
else:
path = env["PATH_INFO"]
app_handler = ROUTES.get(path, handle_404)
path = env["PATH_INFO"]
if path.startswith('/static/'):
app_handler = static_file_endpoint
else:
app_handler = ROUTES.get(path, handle_404)
status, headers, body = app_handler(env)
start_response(HTTP_STATUS.get(status, str(status)), headers)
return [ body ]
Expand All @@ -295,5 +342,5 @@ def app(env, start_response):
port = 8080

fastpysgi.server.read_buffer_size = READ_BUF_SIZE
fastpysgi.server.backlog = 4096
fastpysgi.run(app, host, port, workers = CPU_COUNT, loglevel = 0)
fastpysgi.server.backlog = 16*1024
fastpysgi.run(app, host, port, workers = WRK_COUNT, loglevel = 0)
3 changes: 2 additions & 1 deletion frameworks/fastpysgi/meta.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
"upload",
"compression",
"mixed",
"async-db"
"async-db",
"static"
]
}