Skip to content

Commit 6690fd1

Browse files
radugheoionmincu
authored andcommitted
feat: add bedrock chat models
1 parent 42603d3 commit 6690fd1

13 files changed

Lines changed: 1691 additions & 1 deletion

File tree

.gitignore

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -177,3 +177,8 @@ cython_debug/
177177
**/.uipath
178178
**/**.nupkg
179179
**/__uipath/
180+
.claude/settings.local.json
181+
182+
/.vscode/launch.json
183+
184+
playground.py

pyproject.toml

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ dependencies = [
99
"llama-index>=0.14.8",
1010
"llama-index-embeddings-azure-openai>=0.4.1",
1111
"llama-index-llms-azure-openai>=0.4.2",
12+
"llama-index-llms-google-genai>=0.8.0",
1213
"openinference-instrumentation-llama-index>=4.3.9",
1314
"uipath>=2.2.26, <2.3.0",
1415
]
@@ -24,6 +25,19 @@ maintainers = [
2425
{ name = "Cristian Pufu", email = "cristian.pufu@uipath.com" }
2526
]
2627

28+
[project.optional-dependencies]
29+
bedrock = [
30+
"llama-index-llms-bedrock>=0.3.0",
31+
"llama-index-llms-bedrock-converse>=0.3.0",
32+
"boto3>=1.28.0",
33+
"aiobotocore>=2.5.0",
34+
]
35+
vertex = [
36+
"llama-index-llms-vertex>=0.4.0",
37+
"google-cloud-aiplatform>=1.38.0",
38+
"httpx>=0.27.0",
39+
]
40+
2741
[project.entry-points."uipath.middlewares"]
2842
register = "uipath_llamaindex.middlewares:register_middleware"
2943

src/uipath_llamaindex/llms/__init__.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,18 @@
22
OpenAIModel,
33
UiPathOpenAI,
44
)
5+
from .supported_models import (
6+
BedrockModels,
7+
GeminiModels,
8+
OpenAIModels,
9+
)
10+
from .vertex import UiPathVertex
511

612
__all__ = [
713
"UiPathOpenAI",
14+
"UiPathVertex",
815
"OpenAIModel",
16+
"OpenAIModels",
17+
"GeminiModels",
18+
"BedrockModels",
919
]
Lines changed: 218 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,218 @@
1+
import logging
2+
import os
3+
from typing import Optional
4+
5+
from uipath.utils import EndpointManager
6+
7+
from .supported_models import BedrockModels
8+
9+
logger = logging.getLogger(__name__)
10+
11+
12+
def _check_bedrock_dependencies() -> None:
13+
"""Check if required dependencies for UiPath Bedrock LLMs are installed."""
14+
import importlib.util
15+
16+
missing_packages = []
17+
18+
if importlib.util.find_spec("llama_index.llms.bedrock") is None:
19+
missing_packages.append("llama-index-llms-bedrock")
20+
21+
if importlib.util.find_spec("llama_index.llms.bedrock_converse") is None:
22+
missing_packages.append("llama-index-llms-bedrock-converse")
23+
24+
if importlib.util.find_spec("boto3") is None:
25+
missing_packages.append("boto3")
26+
27+
if importlib.util.find_spec("aiobotocore") is None:
28+
missing_packages.append("aiobotocore")
29+
30+
if missing_packages:
31+
packages_str = ", ".join(missing_packages)
32+
raise ImportError(
33+
f"The following packages are required to use UiPath Bedrock LLMs: {packages_str}\n"
34+
"Please install them using one of the following methods:\n\n"
35+
" # Using pip:\n"
36+
f" pip install uipath-llamaindex[bedrock]\n\n"
37+
" # Using uv:\n"
38+
f" uv add 'uipath-llamaindex[bedrock]'\n\n"
39+
)
40+
41+
42+
_check_bedrock_dependencies()
43+
44+
import boto3
45+
from llama_index.llms.bedrock import Bedrock
46+
from llama_index.llms.bedrock_converse import BedrockConverse
47+
48+
49+
class AwsBedrockCompletionsPassthroughClient:
50+
def __init__(
51+
self,
52+
model: str,
53+
token: str,
54+
api_flavor: str,
55+
):
56+
self.model = model
57+
self.token = token
58+
self.api_flavor = api_flavor
59+
self._vendor = "awsbedrock"
60+
self._url: Optional[str] = None
61+
62+
@property
63+
def endpoint(self) -> str:
64+
vendor_endpoint = EndpointManager.get_vendor_endpoint()
65+
formatted_endpoint = vendor_endpoint.format(
66+
vendor=self._vendor,
67+
model=self.model,
68+
)
69+
return formatted_endpoint
70+
71+
def _build_base_url(self) -> str:
72+
if not self._url:
73+
env_uipath_url = os.getenv("UIPATH_URL")
74+
75+
if env_uipath_url:
76+
self._url = f"{env_uipath_url.rstrip('/')}/{self.endpoint}"
77+
else:
78+
raise ValueError("UIPATH_URL environment variable is required")
79+
80+
return self._url
81+
82+
def get_client(self):
83+
client = boto3.client(
84+
"bedrock-runtime",
85+
region_name="us-east-1",
86+
aws_access_key_id="none",
87+
aws_secret_access_key="none",
88+
verify=False,
89+
)
90+
client.meta.events.register(
91+
"before-send.bedrock-runtime.*", self._modify_request
92+
)
93+
return client
94+
95+
def get_session(self):
96+
"""Get aiobotocore session for async operations with custom event handlers."""
97+
from aiobotocore.session import get_session
98+
99+
session = get_session()
100+
session.get_component("event_emitter").register(
101+
"before-send.bedrock-runtime.*", self._modify_request
102+
)
103+
return session
104+
105+
def _modify_request(self, request, **kwargs):
106+
"""Intercept boto3 request and redirect to LLM Gateway"""
107+
# Detect streaming based on URL suffix:
108+
# - converse-stream / invoke-with-response-stream -> streaming
109+
# - converse / invoke -> non-streaming
110+
streaming = "true" if request.url.endswith("-stream") else "false"
111+
request.url = self._build_base_url()
112+
113+
headers = {
114+
"Authorization": f"Bearer {self.token}",
115+
"X-UiPath-LlmGateway-ApiFlavor": self.api_flavor,
116+
"X-UiPath-Streaming-Enabled": streaming,
117+
}
118+
119+
job_key = os.getenv("UIPATH_JOB_KEY")
120+
process_key = os.getenv("UIPATH_PROCESS_KEY")
121+
if job_key:
122+
headers["X-UiPath-JobKey"] = job_key
123+
if process_key:
124+
headers["X-UiPath-ProcessKey"] = process_key
125+
126+
request.headers.update(headers)
127+
128+
129+
class UiPathChatBedrockConverse(BedrockConverse):
130+
def __init__(
131+
self,
132+
org_id: Optional[str] = None,
133+
tenant_id: Optional[str] = None,
134+
token: Optional[str] = None,
135+
model: str = BedrockModels.anthropic_claude_haiku_4_5,
136+
**kwargs,
137+
):
138+
org_id = org_id or os.getenv("UIPATH_ORGANIZATION_ID")
139+
tenant_id = tenant_id or os.getenv("UIPATH_TENANT_ID")
140+
token = token or os.getenv("UIPATH_ACCESS_TOKEN")
141+
142+
if not org_id:
143+
raise ValueError(
144+
"UIPATH_ORGANIZATION_ID environment variable or org_id parameter is required"
145+
)
146+
if not tenant_id:
147+
raise ValueError(
148+
"UIPATH_TENANT_ID environment variable or tenant_id parameter is required"
149+
)
150+
if not token:
151+
raise ValueError(
152+
"UIPATH_ACCESS_TOKEN environment variable or token parameter is required"
153+
)
154+
155+
passthrough_client = AwsBedrockCompletionsPassthroughClient(
156+
model=model,
157+
token=token,
158+
api_flavor="converse",
159+
)
160+
161+
client = passthrough_client.get_client()
162+
botocore_session = passthrough_client.get_session()
163+
164+
super().__init__(
165+
model=model,
166+
client=client,
167+
botocore_session=botocore_session,
168+
region_name="us-east-1",
169+
aws_access_key_id="none",
170+
aws_secret_access_key="none",
171+
**kwargs,
172+
)
173+
174+
175+
class UiPathChatBedrock(Bedrock):
176+
def __init__(
177+
self,
178+
org_id: Optional[str] = None,
179+
tenant_id: Optional[str] = None,
180+
token: Optional[str] = None,
181+
model: str = BedrockModels.anthropic_claude_haiku_4_5,
182+
context_size: int = 200000,
183+
**kwargs,
184+
):
185+
org_id = org_id or os.getenv("UIPATH_ORGANIZATION_ID")
186+
tenant_id = tenant_id or os.getenv("UIPATH_TENANT_ID")
187+
token = token or os.getenv("UIPATH_ACCESS_TOKEN")
188+
189+
if not org_id:
190+
raise ValueError(
191+
"UIPATH_ORGANIZATION_ID environment variable or org_id parameter is required"
192+
)
193+
if not tenant_id:
194+
raise ValueError(
195+
"UIPATH_TENANT_ID environment variable or tenant_id parameter is required"
196+
)
197+
if not token:
198+
raise ValueError(
199+
"UIPATH_ACCESS_TOKEN environment variable or token parameter is required"
200+
)
201+
202+
passthrough_client = AwsBedrockCompletionsPassthroughClient(
203+
model=model,
204+
token=token,
205+
api_flavor="invoke",
206+
)
207+
208+
client = passthrough_client.get_client()
209+
210+
super().__init__(
211+
model=model,
212+
client=client,
213+
context_size=context_size,
214+
aws_access_key_id="none",
215+
aws_secret_access_key="none",
216+
region_name="us-east-1",
217+
**kwargs,
218+
)
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
class OpenAIModels:
2+
"""Supported OpenAI model identifiers."""
3+
4+
# GPT-4o models
5+
gpt_4o_2024_05_13 = "gpt-4o-2024-05-13"
6+
gpt_4o_2024_08_06 = "gpt-4o-2024-08-06"
7+
gpt_4o_2024_11_20 = "gpt-4o-2024-11-20"
8+
gpt_4o_mini_2024_07_18 = "gpt-4o-mini-2024-07-18"
9+
10+
# GPT-4.1 models
11+
gpt_4_1_2025_04_14 = "gpt-4.1-2025-04-14"
12+
gpt_4_1_mini_2025_04_14 = "gpt-4.1-mini-2025-04-14"
13+
gpt_4_1_nano_2025_04_14 = "gpt-4.1-nano-2025-04-14"
14+
15+
# GPT-5 models
16+
gpt_5_2025_08_07 = "gpt-5-2025-08-07"
17+
gpt_5_chat_2025_08_07 = "gpt-5-chat-2025-08-07"
18+
gpt_5_mini_2025_08_07 = "gpt-5-mini-2025-08-07"
19+
gpt_5_nano_2025_08_07 = "gpt-5-nano-2025-08-07"
20+
21+
# GPT-5.1 models
22+
gpt_5_1_2025_11_13 = "gpt-5.1-2025-11-13"
23+
24+
25+
class GeminiModels:
26+
"""Supported Google Gemini model identifiers."""
27+
28+
gemini_2_5_pro = "gemini-2.5-pro"
29+
gemini_2_5_flash = "gemini-2.5-flash"
30+
gemini_2_0_flash_001 = "gemini-2.0-flash-001"
31+
32+
33+
class BedrockModels:
34+
"""Supported AWS Bedrock model identifiers."""
35+
36+
# Claude 3.7 models
37+
anthropic_claude_3_7_sonnet = "anthropic.claude-3-7-sonnet-20250219-v1:0"
38+
39+
# Claude 4 models
40+
anthropic_claude_sonnet_4 = "anthropic.claude-sonnet-4-20250514-v1:0"
41+
anthropic_claude_sonnet_4_5 = "anthropic.claude-sonnet-4-5-20250929-v1:0"
42+
anthropic_claude_haiku_4_5 = "anthropic.claude-haiku-4-5-20251001-v1:0"

0 commit comments

Comments
 (0)