Skip to content

Commit ae5c65b

Browse files
author
giulio-leone
committed
fix: bound TypeAdapter lru_cache to prevent memory leak in multi-threaded usage
1 parent 656e3ca commit ae5c65b

1 file changed

Lines changed: 1 addition & 1 deletion

File tree

src/openai/_models.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -799,7 +799,7 @@ class GenericModel(BaseGenericModel, BaseModel):
799799
if not PYDANTIC_V1:
800800
from pydantic import TypeAdapter as _TypeAdapter
801801

802-
_CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter))
802+
_CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=4096)(_TypeAdapter))
803803

804804
if TYPE_CHECKING:
805805
from pydantic import TypeAdapter

0 commit comments

Comments
 (0)