Skip to content

Commit a5ffd58

Browse files
authored
fixes to discovery endpoint (#32)
1 parent 294e609 commit a5ffd58

8 files changed

Lines changed: 61 additions & 17 deletions

File tree

CHANGELOG.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,11 @@
22

33
All notable changes to `uipath_llm_client` (core package) will be documented in this file.
44

5+
## [1.2.2] - 2026-02-23
6+
7+
### Fix
8+
- Fixes to discovery endpoint on LLMGW
9+
510
## [1.2.1] - 2026-02-18
611

712
### Fix

README.md

Lines changed: 7 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -38,20 +38,15 @@ The client supports two UiPath backends:
3838
# Base installation (core client only)
3939
pip install uipath-llm-client
4040

41-
# With LangChain support
42-
pip install uipath-langchain-client
43-
44-
# With specific provider extras for passthrough mode
45-
pip install "uipath-langchain-client[openai]" # OpenAI/Azure OpenAI models
46-
pip install "uipath-langchain-client[google]" # Google Gemini models
47-
pip install "uipath-langchain-client[anthropic]" # Anthropic Claude models
48-
pip install "uipath-langchain-client[aws]" # AWS Bedrock models
49-
pip install "uipath-langchain-client[azure]" # Azure AI models
50-
pip install "uipath-langchain-client[vertexai]" # Google Vertex AI (Anthropic on Vertex)
51-
pip install "uipath-langchain-client[fireworks]" # Fireworks AI models
52-
pip install "uipath-langchain-client[all]" # All providers
41+
# With optional provider extras for passthrough mode
42+
pip install "uipath-llm-client[openai]" # OpenAI/Azure OpenAI models
43+
pip install "uipath-llm-client[google]" # Google Gemini models
44+
pip install "uipath-llm-client[anthropic]" # Anthropic Claude models
45+
pip install "uipath-llm-client[all]" # All of the above
5346
```
5447

48+
For LangChain support, use the separate package: `pip install uipath-langchain-client`.
49+
5550
### Using `uv`
5651

5752
1. Add the custom index to your `pyproject.toml`:

packages/uipath_langchain_client/CHANGELOG.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,11 @@
22

33
All notable changes to `uipath_langchain_client` will be documented in this file.
44

5+
## [1.2.2] - 2026-02-23
6+
7+
### Fix
8+
- Fixes to discovery endpoint on LLMGW
9+
510
## [1.2.1] - 2026-02-18
611

712
### Fix

packages/uipath_langchain_client/pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ readme = "README.md"
66
requires-python = ">=3.11"
77
dependencies = [
88
"langchain>=1.2.7",
9-
"uipath-llm-client>=1.2.1",
9+
"uipath-llm-client>=1.2.2",
1010
]
1111

1212
[project.optional-dependencies]
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
__title__ = "UiPath LangChain Client"
22
__description__ = "A Python client for interacting with UiPath's LLM services via LangChain."
3-
__version__ = "1.2.1"
3+
__version__ = "1.2.2"
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
__titile__ = "UiPath LLM Client"
22
__description__ = "A Python client for interacting with UiPath's LLM services."
3-
__version__ = "1.2.1"
3+
__version__ = "1.2.2"

src/uipath_llm_client/settings/llmgateway/settings.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99
from uipath_llm_client.settings.base import UiPathAPIConfig, UiPathBaseSettings
1010
from uipath_llm_client.settings.llmgateway.utils import LLMGatewayEndpoints
11+
from uipath_llm_client.utils.exceptions import UiPathAPIError
1112

1213

1314
class LLMGatewayBaseSettings(UiPathBaseSettings):
@@ -107,6 +108,8 @@ def get_available_models(self) -> list[dict[str, Any]]:
107108
discovery_url = f"{self.base_url}/{self.org_id}/{self.tenant_id}/{LLMGatewayEndpoints.DISCOVERY_ENDPOINT.value}"
108109
with Client(auth=self.build_auth_pipeline(), headers=self.build_auth_headers()) as client:
109110
response = client.get(discovery_url)
111+
if response.is_error:
112+
raise UiPathAPIError.from_response(response)
110113
return response.json()
111114

112115
@override

tests/core/test_base_client.py

Lines changed: 38 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -318,12 +318,12 @@ def test_validation_requires_auth_credentials(self):
318318
LLMGatewaySettings()
319319

320320
def test_get_available_models(self, llmgw_env_vars):
321-
"""Test get_available_models returns a list of models."""
321+
"""Test get_available_models returns a list of models on success."""
322322
with patch.dict(os.environ, llmgw_env_vars, clear=True):
323323
settings = LLMGatewaySettings()
324324

325-
# Mock the HTTP request since this is a unit test
326325
mock_response = MagicMock()
326+
mock_response.is_error = False
327327
mock_response.json.return_value = [
328328
{"modelName": "gpt-4o", "vendor": "openai"},
329329
{"modelName": "claude-3-opus", "vendor": "anthropic"},
@@ -334,6 +334,42 @@ def test_get_available_models(self, llmgw_env_vars):
334334
assert isinstance(models, list)
335335
assert len(models) == 2
336336

337+
def test_get_available_models_raises_on_http_error(self, llmgw_env_vars):
338+
"""Test get_available_models raises UiPathAPIError on bad request / server error."""
339+
with patch.dict(os.environ, llmgw_env_vars, clear=True):
340+
settings = LLMGatewaySettings()
341+
342+
mock_response = MagicMock()
343+
mock_response.is_error = True
344+
mock_response.status_code = 500
345+
mock_response.reason_phrase = "Internal Server Error"
346+
mock_response.json.return_value = {"error": "Something went wrong"}
347+
mock_response.request = MagicMock()
348+
mock_response.text = ""
349+
350+
with patch.object(Client, "get", return_value=mock_response):
351+
with pytest.raises(UiPathAPIError) as exc_info:
352+
settings.get_available_models()
353+
assert exc_info.value.status_code == 500
354+
355+
def test_get_available_models_raises_on_unauthorized(self, llmgw_env_vars):
356+
"""Test get_available_models raises UiPathAuthenticationError on 401."""
357+
with patch.dict(os.environ, llmgw_env_vars, clear=True):
358+
settings = LLMGatewaySettings()
359+
360+
mock_response = MagicMock()
361+
mock_response.is_error = True
362+
mock_response.status_code = 401
363+
mock_response.reason_phrase = "Unauthorized"
364+
mock_response.json.return_value = {}
365+
mock_response.request = MagicMock()
366+
mock_response.text = ""
367+
368+
with patch.object(Client, "get", return_value=mock_response):
369+
with pytest.raises(UiPathAuthenticationError) as exc_info:
370+
settings.get_available_models()
371+
assert exc_info.value.status_code == 401
372+
337373

338374
# ============================================================================
339375
# Test LLMGateway Auth Refresh Logic

0 commit comments

Comments
 (0)