Skip to content

Commit 3faef71

Browse files
committed
feat: add chat models
1 parent 42603d3 commit 3faef71

14 files changed

Lines changed: 3147 additions & 1260 deletions

File tree

.gitignore

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -177,3 +177,8 @@ cython_debug/
177177
**/.uipath
178178
**/**.nupkg
179179
**/__uipath/
180+
.claude/settings.local.json
181+
182+
/.vscode/launch.json
183+
184+
playground.py

pyproject.toml

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "uipath-llamaindex"
3-
version = "0.1.5"
3+
version = "0.1.6"
44
description = "UiPath LlamaIndex SDK"
55
readme = { file = "README.md", content-type = "text/markdown" }
66
requires-python = ">=3.11"
@@ -9,6 +9,7 @@ dependencies = [
99
"llama-index>=0.14.8",
1010
"llama-index-embeddings-azure-openai>=0.4.1",
1111
"llama-index-llms-azure-openai>=0.4.2",
12+
"llama-index-llms-google-genai>=0.8.0",
1213
"openinference-instrumentation-llama-index>=4.3.9",
1314
"uipath>=2.2.26, <2.3.0",
1415
]
@@ -24,6 +25,18 @@ maintainers = [
2425
{ name = "Cristian Pufu", email = "cristian.pufu@uipath.com" }
2526
]
2627

28+
[project.optional-dependencies]
29+
bedrock = [
30+
"llama-index-llms-bedrock>=0.3.0",
31+
"llama-index-llms-bedrock-converse>=0.3.0",
32+
"boto3>=1.28.0",
33+
"aiobotocore>=2.5.0",
34+
]
35+
vertex = [
36+
"llama-index-llms-google-genai>=0.8.0",
37+
"google-genai>=1.0.0",
38+
]
39+
2740
[project.entry-points."uipath.middlewares"]
2841
register = "uipath_llamaindex.middlewares:register_middleware"
2942

@@ -58,6 +71,8 @@ select = ["E", "F", "B", "I"]
5871

5972
[tool.ruff.lint.per-file-ignores]
6073
"*" = ["E501"]
74+
"src/uipath_llamaindex/llms/bedrock.py" = ["E402"]
75+
"src/uipath_llamaindex/llms/vertex.py" = ["E402"]
6176

6277
[tool.ruff.format]
6378
quote-style = "double"
Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,22 @@
1-
from ._openai import (
1+
from ._openai import UiPathOpenAI
2+
from .supported_models import (
3+
BedrockModel,
4+
GeminiModel,
25
OpenAIModel,
3-
UiPathOpenAI,
46
)
57

8+
# Note: UiPathVertex requires optional dependencies (google-genai, llama-index-llms-google-genai)
9+
# Import it directly from uipath_llamaindex.llms.vertex:
10+
# from uipath_llamaindex.llms.vertex import UiPathVertex
11+
12+
# Note: UiPathChatBedrock and UiPathChatBedrockConverse require optional dependencies
13+
# (boto3, aiobotocore, llama-index-llms-bedrock, llama-index-llms-bedrock-converse)
14+
# Import them directly from uipath_llamaindex.llms.bedrock:
15+
# from uipath_llamaindex.llms.bedrock import UiPathChatBedrock, UiPathChatBedrockConverse
16+
617
__all__ = [
718
"UiPathOpenAI",
819
"OpenAIModel",
20+
"GeminiModel",
21+
"BedrockModel",
922
]

src/uipath_llamaindex/llms/_openai.py

Lines changed: 2 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,10 @@
11
import os
2-
from enum import Enum
32
from typing import Any
43

54
from llama_index.llms.azure_openai import AzureOpenAI # type: ignore
65
from uipath.utils import EndpointManager
76

8-
9-
class OpenAIModel(Enum):
10-
GPT_4_1_2025_04_14 = "gpt-4.1-2025-04-14"
11-
GPT_4_1_MINI_2025_04_14 = "gpt-4.1-mini-2025-04-14"
12-
GPT_4_1_NANO_2025_04_14 = "gpt-4.1-nano-2025-04-14"
13-
GPT_4O_2024_05_13 = "gpt-4o-2024-05-13"
14-
GPT_4O_2024_08_06 = "gpt-4o-2024-08-06"
15-
GPT_4O_2024_11_20 = "gpt-4o-2024-11-20"
16-
GPT_4O_MINI_2024_07_18 = "gpt-4o-mini-2024-07-18"
17-
O3_MINI_2025_01_31 = "o3-mini-2025-01-31"
18-
TEXT_DAVINCI_003 = "text-davinci-003"
7+
from .supported_models import OpenAIModel
198

209

2110
class UiPathOpenAI(AzureOpenAI):
@@ -42,7 +31,7 @@ def __init__(
4231
defaults = {
4332
"model": model_value,
4433
"deployment_name": model_value,
45-
"azure_endpoint": f"{base_url}/{EndpointManager.get_passthrough_endpoint().format(model=model, api_version=api_version)}",
34+
"azure_endpoint": f"{base_url}/{EndpointManager.get_passthrough_endpoint().format(model=model_value, api_version=api_version)}",
4635
"api_key": os.environ.get("UIPATH_ACCESS_TOKEN"),
4736
"api_version": api_version,
4837
"is_chat_model": True,
Lines changed: 220 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,220 @@
1+
import logging
2+
import os
3+
from typing import Optional
4+
5+
from uipath.utils import EndpointManager
6+
7+
from .supported_models import BedrockModel
8+
9+
logger = logging.getLogger(__name__)
10+
11+
12+
def _check_bedrock_dependencies() -> None:
13+
"""Check if required dependencies for UiPath Bedrock LLMs are installed."""
14+
import importlib.util
15+
16+
missing_packages = []
17+
18+
if importlib.util.find_spec("llama_index.llms.bedrock") is None:
19+
missing_packages.append("llama-index-llms-bedrock")
20+
21+
if importlib.util.find_spec("llama_index.llms.bedrock_converse") is None:
22+
missing_packages.append("llama-index-llms-bedrock-converse")
23+
24+
if importlib.util.find_spec("boto3") is None:
25+
missing_packages.append("boto3")
26+
27+
if importlib.util.find_spec("aiobotocore") is None:
28+
missing_packages.append("aiobotocore")
29+
30+
if missing_packages:
31+
packages_str = ", ".join(missing_packages)
32+
raise ImportError(
33+
f"The following packages are required to use UiPath Bedrock LLMs: {packages_str}\n"
34+
"Please install them using one of the following methods:\n\n"
35+
" # Using pip:\n"
36+
f" pip install uipath-llamaindex[bedrock]\n\n"
37+
" # Using uv:\n"
38+
f" uv add 'uipath-llamaindex[bedrock]'\n\n"
39+
)
40+
41+
42+
_check_bedrock_dependencies()
43+
44+
import boto3 # type: ignore[import-untyped]
45+
from llama_index.llms.bedrock import Bedrock # type: ignore[import-untyped]
46+
from llama_index.llms.bedrock_converse import ( # type: ignore[import-untyped]
47+
BedrockConverse,
48+
)
49+
50+
51+
class AwsBedrockCompletionsPassthroughClient:
52+
def __init__(
53+
self,
54+
model: str,
55+
token: str,
56+
api_flavor: str,
57+
):
58+
self.model = model
59+
self.token = token
60+
self.api_flavor = api_flavor
61+
self._vendor = "awsbedrock"
62+
self._url: Optional[str] = None
63+
64+
@property
65+
def endpoint(self) -> str:
66+
vendor_endpoint = EndpointManager.get_vendor_endpoint()
67+
formatted_endpoint = vendor_endpoint.format(
68+
vendor=self._vendor,
69+
model=self.model,
70+
)
71+
return formatted_endpoint
72+
73+
def _build_base_url(self) -> str:
74+
if not self._url:
75+
env_uipath_url = os.getenv("UIPATH_URL")
76+
77+
if env_uipath_url:
78+
self._url = f"{env_uipath_url.rstrip('/')}/{self.endpoint}"
79+
else:
80+
raise ValueError("UIPATH_URL environment variable is required")
81+
82+
return self._url
83+
84+
def get_client(self):
85+
client = boto3.client(
86+
"bedrock-runtime",
87+
region_name="us-east-1",
88+
aws_access_key_id="none",
89+
aws_secret_access_key="none",
90+
verify=False,
91+
)
92+
client.meta.events.register(
93+
"before-send.bedrock-runtime.*", self._modify_request
94+
)
95+
return client
96+
97+
def get_session(self):
98+
"""Get aiobotocore session for async operations with custom event handlers."""
99+
from aiobotocore.session import get_session # type: ignore[import-untyped]
100+
101+
session = get_session()
102+
session.get_component("event_emitter").register(
103+
"before-send.bedrock-runtime.*", self._modify_request
104+
)
105+
return session
106+
107+
def _modify_request(self, request, **kwargs):
108+
"""Intercept boto3 request and redirect to LLM Gateway"""
109+
# Detect streaming based on URL suffix:
110+
# - converse-stream / invoke-with-response-stream -> streaming
111+
# - converse / invoke -> non-streaming
112+
streaming = "true" if request.url.endswith("-stream") else "false"
113+
request.url = self._build_base_url()
114+
115+
headers = {
116+
"Authorization": f"Bearer {self.token}",
117+
"X-UiPath-LlmGateway-ApiFlavor": self.api_flavor,
118+
"X-UiPath-Streaming-Enabled": streaming,
119+
}
120+
121+
job_key = os.getenv("UIPATH_JOB_KEY")
122+
process_key = os.getenv("UIPATH_PROCESS_KEY")
123+
if job_key:
124+
headers["X-UiPath-JobKey"] = job_key
125+
if process_key:
126+
headers["X-UiPath-ProcessKey"] = process_key
127+
128+
request.headers.update(headers)
129+
130+
131+
class UiPathChatBedrockConverse(BedrockConverse):
132+
def __init__(
133+
self,
134+
org_id: Optional[str] = None,
135+
tenant_id: Optional[str] = None,
136+
token: Optional[str] = None,
137+
model: str = BedrockModel.anthropic_claude_haiku_4_5,
138+
**kwargs,
139+
):
140+
org_id = org_id or os.getenv("UIPATH_ORGANIZATION_ID")
141+
tenant_id = tenant_id or os.getenv("UIPATH_TENANT_ID")
142+
token = token or os.getenv("UIPATH_ACCESS_TOKEN")
143+
144+
if not org_id:
145+
raise ValueError(
146+
"UIPATH_ORGANIZATION_ID environment variable or org_id parameter is required"
147+
)
148+
if not tenant_id:
149+
raise ValueError(
150+
"UIPATH_TENANT_ID environment variable or tenant_id parameter is required"
151+
)
152+
if not token:
153+
raise ValueError(
154+
"UIPATH_ACCESS_TOKEN environment variable or token parameter is required"
155+
)
156+
157+
passthrough_client = AwsBedrockCompletionsPassthroughClient(
158+
model=model,
159+
token=token,
160+
api_flavor="converse",
161+
)
162+
163+
client = passthrough_client.get_client()
164+
botocore_session = passthrough_client.get_session()
165+
166+
super().__init__(
167+
model=model,
168+
client=client,
169+
botocore_session=botocore_session,
170+
region_name="us-east-1",
171+
aws_access_key_id="none",
172+
aws_secret_access_key="none",
173+
**kwargs,
174+
)
175+
176+
177+
class UiPathChatBedrock(Bedrock):
178+
def __init__(
179+
self,
180+
org_id: Optional[str] = None,
181+
tenant_id: Optional[str] = None,
182+
token: Optional[str] = None,
183+
model: str = BedrockModel.anthropic_claude_haiku_4_5,
184+
context_size: int = 200000,
185+
**kwargs,
186+
):
187+
org_id = org_id or os.getenv("UIPATH_ORGANIZATION_ID")
188+
tenant_id = tenant_id or os.getenv("UIPATH_TENANT_ID")
189+
token = token or os.getenv("UIPATH_ACCESS_TOKEN")
190+
191+
if not org_id:
192+
raise ValueError(
193+
"UIPATH_ORGANIZATION_ID environment variable or org_id parameter is required"
194+
)
195+
if not tenant_id:
196+
raise ValueError(
197+
"UIPATH_TENANT_ID environment variable or tenant_id parameter is required"
198+
)
199+
if not token:
200+
raise ValueError(
201+
"UIPATH_ACCESS_TOKEN environment variable or token parameter is required"
202+
)
203+
204+
passthrough_client = AwsBedrockCompletionsPassthroughClient(
205+
model=model,
206+
token=token,
207+
api_flavor="invoke",
208+
)
209+
210+
client = passthrough_client.get_client()
211+
212+
super().__init__(
213+
model=model,
214+
client=client,
215+
context_size=context_size,
216+
aws_access_key_id="none",
217+
aws_secret_access_key="none",
218+
region_name="us-east-1",
219+
**kwargs,
220+
)
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
from enum import Enum
2+
3+
4+
class OpenAIModel(Enum):
5+
GPT_4_1_2025_04_14 = "gpt-4.1-2025-04-14"
6+
GPT_4_1_MINI_2025_04_14 = "gpt-4.1-mini-2025-04-14"
7+
GPT_4_1_NANO_2025_04_14 = "gpt-4.1-nano-2025-04-14"
8+
GPT_4O_2024_05_13 = "gpt-4o-2024-05-13"
9+
GPT_4O_2024_08_06 = "gpt-4o-2024-08-06"
10+
GPT_4O_2024_11_20 = "gpt-4o-2024-11-20"
11+
GPT_4O_MINI_2024_07_18 = "gpt-4o-mini-2024-07-18"
12+
O3_MINI_2025_01_31 = "o3-mini-2025-01-31"
13+
TEXT_DAVINCI_003 = "text-davinci-003"
14+
15+
16+
class GeminiModel:
17+
"""Supported Google Gemini model identifiers."""
18+
19+
gemini_2_5_pro = "gemini-2.5-pro"
20+
gemini_2_5_flash = "gemini-2.5-flash"
21+
gemini_2_0_flash_001 = "gemini-2.0-flash-001"
22+
23+
24+
class BedrockModel:
25+
"""Supported AWS Bedrock model identifiers."""
26+
27+
# Claude 3.7 models
28+
anthropic_claude_3_7_sonnet = "anthropic.claude-3-7-sonnet-20250219-v1:0"
29+
30+
# Claude 4 models
31+
anthropic_claude_sonnet_4 = "anthropic.claude-sonnet-4-20250514-v1:0"
32+
anthropic_claude_sonnet_4_5 = "anthropic.claude-sonnet-4-5-20250929-v1:0"
33+
anthropic_claude_haiku_4_5 = "anthropic.claude-haiku-4-5-20251001-v1:0"

0 commit comments

Comments
 (0)