Skip to content

Commit 5841d3f

Browse files
committed
fix: code review bugs + bump both packages to 1.6.0
Bug fixes (critical): - callbacks.py: add ABC to UiPathDynamicHeadersCallback so @AbstractMethod is enforced — previously the class could be instantiated without implementing get_headers(), silently deferring the error to runtime - clients/anthropic/chat_models.py: make vendor_type a required field (no default) and seed api_config with VendorType.VERTEXAI as placeholder; the old default of VendorType.ANTHROPIC caused the model validator to immediately raise ValueError, making the class impossible to construct directly; also removed dead VendorType.ANTHROPIC / VendorType.AZURE branches from _anthropic_client and _async_anthropic_client that could never be reached, and unused imports - clients/bedrock/utils.py: fix json.loads(kwargs.get('body', {})) — passing a dict as default caused TypeError since json.loads requires str/bytes; changed default to '{}' string; also corrected converse() and converse_stream() system parameter type annotation from str|None to list[dict[str,Any]]|None to match the list[dict] value langchain-aws actually passes - clients/normalized/chat_models.py: add 'stream': True to request body in _uipath_stream and _uipath_astream — without it the server returns a regular non-streaming JSON response instead of SSE; fix fragile SSE prefix stripping from chunk.split('data:')[1] (splits on all occurrences) to chunk[len('data:'):].strip(); fix _generate_chunk to set text=content or '' instead of text=original_message (raw JSON wire data) - factory.py: replace bare model_info['vendor'] key access in get_embedding_model with model_info.get('vendor') + explicit ValueError, matching the safe pattern already used in get_chat_model Bug fixes (minor): - base_client.py: correct return type annotations on _astream, _uipath_astream, and uipath_astream from AsyncIterator to AsyncGenerator (these are async generator functions that contain yield, not plain iterables) - demo.py: replace unsafe eval(expression) with an AST-based arithmetic-only evaluator to prevent arbitrary code execution via LLM-generated input Version bumps: - packages/uipath_langchain_client: 1.5.10 -> 1.6.0 - src/uipath/llm_client: 1.5.10 -> 1.6.0
1 parent a01f0cf commit 5841d3f

9 files changed

Lines changed: 65 additions & 63 deletions

File tree

packages/uipath_langchain_client/demo.py

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -136,8 +136,20 @@ def calculate(expression: str) -> str:
136136
Args:
137137
expression: A mathematical expression to evaluate (e.g., "2 + 2").
138138
"""
139+
import ast
139140
try:
140-
result = eval(expression)
141+
# Restrict to a safe subset: only literals and basic arithmetic operators.
142+
# This prevents arbitrary code execution via eval().
143+
tree = ast.parse(expression, mode="eval")
144+
allowed_node_types = (
145+
ast.Expression, ast.BinOp, ast.UnaryOp, ast.Constant,
146+
ast.Add, ast.Sub, ast.Mult, ast.Div, ast.FloorDiv,
147+
ast.Mod, ast.Pow, ast.USub, ast.UAdd,
148+
)
149+
for node in ast.walk(tree):
150+
if not isinstance(node, allowed_node_types):
151+
return f"Error: unsupported operation in expression"
152+
result = eval(compile(tree, "<string>", "eval"), {"__builtins__": {}})
141153
return str(result)
142154
except Exception as e:
143155
return f"Error: {e}"
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
__title__ = "UiPath LangChain Client"
22
__description__ = "A Python client for interacting with UiPath's LLM services via LangChain."
3-
__version__ = "1.5.10"
3+
__version__ = "1.6.0"

packages/uipath_langchain_client/src/uipath_langchain_client/base_client.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525

2626
import logging
2727
from abc import ABC
28-
from collections.abc import AsyncIterator, Iterator, Mapping, Sequence
28+
from collections.abc import AsyncGenerator, AsyncIterator, Iterator, Mapping, Sequence
2929
from functools import cached_property
3030
from typing import Any, Literal
3131

@@ -282,7 +282,7 @@ async def uipath_astream(
282282
stream_type: Literal["text", "bytes", "lines", "raw"] = "lines",
283283
raise_status_error: bool = False,
284284
**kwargs: Any,
285-
) -> AsyncIterator[str | bytes]:
285+
) -> AsyncGenerator[str | bytes, None]:
286286
"""Make an asynchronous streaming HTTP request to the UiPath API.
287287
288288
Args:
@@ -423,7 +423,7 @@ async def _astream(
423423
stop: list[str] | None = None,
424424
run_manager: AsyncCallbackManagerForLLMRun | None = None,
425425
**kwargs: Any,
426-
) -> AsyncIterator[ChatGenerationChunk]:
426+
) -> AsyncGenerator[ChatGenerationChunk, None]:
427427
set_captured_response_headers({})
428428
try:
429429
first = True
@@ -443,7 +443,7 @@ async def _uipath_astream(
443443
stop: list[str] | None = None,
444444
run_manager: AsyncCallbackManagerForLLMRun | None = None,
445445
**kwargs: Any,
446-
) -> AsyncIterator[ChatGenerationChunk]:
446+
) -> AsyncGenerator[ChatGenerationChunk, None]:
447447
"""Override in subclasses to provide the core (non-wrapped) async stream logic."""
448448
async for chunk in super()._astream(messages, stop=stop, run_manager=run_manager, **kwargs):
449449
yield chunk

packages/uipath_langchain_client/src/uipath_langchain_client/callbacks.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
"""LangChain callbacks for dynamic per-request header injection."""
22

3-
from abc import abstractmethod
3+
from abc import ABC, abstractmethod
44
from typing import Any
55

66
from langchain_core.callbacks import BaseCallbackHandler
77

88
from uipath.llm_client.utils.headers import set_dynamic_request_headers
99

1010

11-
class UiPathDynamicHeadersCallback(BaseCallbackHandler):
11+
class UiPathDynamicHeadersCallback(BaseCallbackHandler, ABC):
1212
"""Base callback for injecting dynamic headers into each LLM gateway request.
1313
1414
Extend this class and implement ``get_headers()`` to return the headers to

packages/uipath_langchain_client/src/uipath_langchain_client/clients/anthropic/chat_models.py

Lines changed: 17 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -15,13 +15,9 @@
1515

1616
try:
1717
from anthropic import (
18-
Anthropic,
1918
AnthropicBedrock,
20-
AnthropicFoundry,
2119
AnthropicVertex,
22-
AsyncAnthropic,
2320
AsyncAnthropicBedrock,
24-
AsyncAnthropicFoundry,
2521
AsyncAnthropicVertex,
2622
)
2723
from langchain_anthropic.chat_models import ChatAnthropic
@@ -33,13 +29,18 @@
3329

3430

3531
class UiPathChatAnthropic(UiPathBaseChatModel, ChatAnthropic):
32+
# api_config.vendor_type is a placeholder here; the model_validator below
33+
# always overwrites it (and api_flavor / api_version) from self.vendor_type.
34+
# VendorType.VERTEXAI is used as the seed so UiPathAPIConfig's own validator
35+
# (which requires vendor_type when routing_mode=PASSTHROUGH) is satisfied.
3636
api_config: UiPathAPIConfig = UiPathAPIConfig(
3737
api_type=ApiType.COMPLETIONS,
3838
routing_mode=RoutingMode.PASSTHROUGH,
39-
vendor_type=VendorType.ANTHROPIC,
39+
vendor_type=VendorType.VERTEXAI,
4040
freeze_base_url=True,
4141
)
42-
vendor_type: VendorType = VendorType.ANTHROPIC
42+
# Required — caller must supply VendorType.VERTEXAI or VendorType.AWSBEDROCK.
43+
vendor_type: VendorType
4344

4445
@model_validator(mode="after")
4546
def setup_api_flavor_and_version(self) -> Self:
@@ -52,7 +53,8 @@ def setup_api_flavor_and_version(self) -> Self:
5253
self.api_config.api_flavor = ApiFlavor.INVOKE
5354
case _:
5455
raise ValueError(
55-
"anthropic and azure vendors are currently not supported by UiPath"
56+
f"vendor_type '{self.vendor_type}' is not supported by UiPathChatAnthropic. "
57+
"Use VendorType.VERTEXAI or VendorType.AWSBEDROCK."
5658
)
5759
return self
5860

@@ -64,24 +66,8 @@ def setup_api_flavor_and_version(self) -> Self:
6466
@cached_property
6567
def _anthropic_client(
6668
self,
67-
) -> Anthropic | AnthropicVertex | AnthropicBedrock | AnthropicFoundry:
69+
) -> AnthropicVertex | AnthropicBedrock:
6870
match self.vendor_type:
69-
case VendorType.ANTHROPIC:
70-
return Anthropic(
71-
api_key="PLACEHOLDER",
72-
base_url=str(self.uipath_sync_client.base_url),
73-
default_headers=dict(self.uipath_sync_client.headers),
74-
max_retries=0, # handled by the UiPathBaseChatModel
75-
http_client=self.uipath_sync_client,
76-
)
77-
case VendorType.AZURE:
78-
return AnthropicFoundry(
79-
api_key="PLACEHOLDER",
80-
base_url=str(self.uipath_sync_client.base_url),
81-
default_headers=dict(self.uipath_sync_client.headers),
82-
max_retries=0, # handled by the UiPathBaseChatModel
83-
http_client=self.uipath_sync_client,
84-
)
8571
case VendorType.VERTEXAI:
8672
return AnthropicVertex(
8773
region="PLACEHOLDER",
@@ -103,29 +89,15 @@ def _anthropic_client(
10389
http_client=self.uipath_sync_client,
10490
)
10591
case _:
106-
raise ValueError("Anthropic models are currently not hosted on any other provider")
92+
raise ValueError(
93+
f"vendor_type '{self.vendor_type}' is not supported by UiPathChatAnthropic."
94+
)
10795

10896
@cached_property
10997
def _async_anthropic_client(
11098
self,
111-
) -> AsyncAnthropic | AsyncAnthropicVertex | AsyncAnthropicBedrock | AsyncAnthropicFoundry:
99+
) -> AsyncAnthropicVertex | AsyncAnthropicBedrock:
112100
match self.vendor_type:
113-
case VendorType.ANTHROPIC:
114-
return AsyncAnthropic(
115-
api_key="PLACEHOLDER",
116-
base_url=str(self.uipath_async_client.base_url),
117-
default_headers=dict(self.uipath_async_client.headers),
118-
max_retries=0, # handled by the UiPathBaseChatModel
119-
http_client=self.uipath_async_client,
120-
)
121-
case VendorType.AZURE:
122-
return AsyncAnthropicFoundry(
123-
api_key="PLACEHOLDER",
124-
base_url=str(self.uipath_async_client.base_url),
125-
default_headers=dict(self.uipath_async_client.headers),
126-
max_retries=0, # handled by the UiPathBaseChatModel
127-
http_client=self.uipath_async_client,
128-
)
129101
case VendorType.VERTEXAI:
130102
return AsyncAnthropicVertex(
131103
region="PLACEHOLDER",
@@ -147,7 +119,9 @@ def _async_anthropic_client(
147119
http_client=self.uipath_async_client,
148120
)
149121
case _:
150-
raise ValueError("Anthropic models are currently not hosted on any other provider")
122+
raise ValueError(
123+
f"vendor_type '{self.vendor_type}' is not supported by UiPathChatAnthropic."
124+
)
151125

152126
@override
153127
def _create(self, payload: dict) -> Any:

packages/uipath_langchain_client/src/uipath_langchain_client/clients/bedrock/utils.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -71,15 +71,19 @@ def invoke_model(self, **kwargs: Any) -> Any:
7171
return {
7272
"body": self.httpx_client.post(
7373
"/",
74-
json=json.loads(kwargs.get("body", {})),
74+
json=json.loads(kwargs.get("body", "{}")),
7575
)
7676
}
7777

7878
def invoke_model_with_response_stream(self, **kwargs: Any) -> Any:
79-
return {"body": self._stream_generator(json.loads(kwargs.get("body", {})))}
79+
return {"body": self._stream_generator(json.loads(kwargs.get("body", "{}")))}
8080

8181
def converse(
82-
self, *, messages: list[dict[str, Any]], system: str | None = None, **params: Any
82+
self,
83+
*,
84+
messages: list[dict[str, Any]],
85+
system: list[dict[str, Any]] | None = None,
86+
**params: Any,
8387
) -> Any:
8488
if self.httpx_client is None:
8589
raise ValueError("httpx_client is not set")
@@ -95,7 +99,11 @@ def converse(
9599
).json()
96100

97101
def converse_stream(
98-
self, *, messages: list[dict[str, Any]], system: str | None = None, **params: Any
102+
self,
103+
*,
104+
messages: list[dict[str, Any]],
105+
system: list[dict[str, Any]] | None = None,
106+
**params: Any,
99107
) -> Any:
100108
return {
101109
"stream": self._stream_generator(

packages/uipath_langchain_client/src/uipath_langchain_client/clients/normalized/chat_models.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
"""
2525

2626
import json
27-
from collections.abc import AsyncIterator, Callable, Iterator, Sequence
27+
from collections.abc import AsyncGenerator, AsyncIterator, Callable, Iterator, Sequence
2828
from typing import Any
2929

3030
from langchain_core.callbacks import (
@@ -377,10 +377,10 @@ def _generate_chunk(
377377
)
378378

379379
return ChatGenerationChunk(
380-
text=original_message,
380+
text=content or "",
381381
generation_info=generation_info,
382382
message=AIMessageChunk(
383-
content=content,
383+
content=content or "",
384384
usage_metadata=usage_metadata,
385385
tool_call_chunks=tool_call_chunks,
386386
),
@@ -394,12 +394,13 @@ def _uipath_stream(
394394
**kwargs: Any,
395395
) -> Iterator[ChatGenerationChunk]:
396396
request_body = self._preprocess_request(messages, stop=stop, **kwargs)
397+
request_body["stream"] = True
397398
for chunk in self.uipath_stream(
398399
request_body=request_body, stream_type="lines", raise_status_error=True
399400
):
400401
chunk = str(chunk)
401402
if chunk.startswith("data:"):
402-
chunk = chunk.split("data:")[1].strip()
403+
chunk = chunk[len("data:"):].strip()
403404
try:
404405
json_data = json.loads(chunk)
405406
except json.JSONDecodeError:
@@ -414,14 +415,15 @@ async def _uipath_astream(
414415
stop: list[str] | None = None,
415416
run_manager: AsyncCallbackManagerForLLMRun | None = None,
416417
**kwargs: Any,
417-
) -> AsyncIterator[ChatGenerationChunk]:
418+
) -> AsyncGenerator[ChatGenerationChunk, None]:
418419
request_body = self._preprocess_request(messages, stop=stop, **kwargs)
420+
request_body["stream"] = True
419421
async for chunk in self.uipath_astream(
420422
request_body=request_body, stream_type="lines", raise_status_error=True
421423
):
422424
chunk = str(chunk)
423425
if chunk.startswith("data:"):
424-
chunk = chunk.split("data:")[1].strip()
426+
chunk = chunk[len("data:"):].strip()
425427
try:
426428
json_data = json.loads(chunk)
427429
except json.JSONDecodeError:

packages/uipath_langchain_client/src/uipath_langchain_client/factory.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -298,7 +298,13 @@ def get_embedding_model(
298298
**model_kwargs,
299299
)
300300

301-
discovered_vendor_type = model_info["vendor"].lower()
301+
discovered_vendor_type = model_info.get("vendor")
302+
if discovered_vendor_type is None:
303+
raise ValueError(
304+
f"No vendor type found in model info for embedding model '{model_name}'. "
305+
f"Model info returned: {model_info}"
306+
)
307+
discovered_vendor_type = discovered_vendor_type.lower()
302308
match discovered_vendor_type:
303309
case VendorType.OPENAI:
304310
if is_uipath_owned:
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
__title__ = "UiPath LLM Client"
22
__description__ = "A Python client for interacting with UiPath's LLM services."
3-
__version__ = "1.5.10"
3+
__version__ = "1.6.0"

0 commit comments

Comments
 (0)