Skip to content

Commit cc98864

Browse files
committed
fix(llm): fix vertex streaming
1 parent 30411a6 commit cc98864

3 files changed

Lines changed: 6 additions & 49 deletions

File tree

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "uipath-langchain"
3-
version = "0.5.7"
3+
version = "0.5.8"
44
description = "Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform"
55
readme = { file = "README.md", content-type = "text/markdown" }
66
requires-python = ">=3.11"

src/uipath_langchain/chat/vertex.py

Lines changed: 4 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,10 @@ def _rewrite_vertex_url(original_url: str, gateway_url: str) -> httpx.URL | None
5555
Returns the gateway URL, or None if no rewrite needed.
5656
"""
5757
if "generateContent" in original_url or "streamGenerateContent" in original_url:
58-
return httpx.URL(gateway_url)
58+
url = httpx.URL(gateway_url)
59+
if "alt=sse" in original_url:
60+
url = url.copy_with(params={"alt": "sse"})
61+
return url
5962
return None
6063

6164

@@ -286,49 +289,3 @@ async def _agenerate(
286289
messages, stop=stop, run_manager=run_manager, **kwargs
287290
)
288291
return self._merge_finish_reason_to_response_metadata(result)
289-
290-
def _stream(self, messages, stop=None, run_manager=None, **kwargs):
291-
"""Streaming fallback - calls _generate and yields single response."""
292-
from langchain_core.messages import AIMessageChunk
293-
from langchain_core.outputs import ChatGenerationChunk
294-
295-
result = self._generate(messages, stop=stop, run_manager=run_manager, **kwargs)
296-
297-
if result.generations:
298-
message = result.generations[0].message
299-
chunk = AIMessageChunk(
300-
content=message.content,
301-
additional_kwargs=message.additional_kwargs,
302-
response_metadata=getattr(message, "response_metadata", {}),
303-
id=message.id,
304-
tool_calls=getattr(message, "tool_calls", []),
305-
tool_call_chunks=getattr(message, "tool_call_chunks", []),
306-
)
307-
if hasattr(message, "usage_metadata") and message.usage_metadata:
308-
chunk.usage_metadata = message.usage_metadata
309-
310-
yield ChatGenerationChunk(message=chunk)
311-
312-
async def _astream(self, messages, stop=None, run_manager=None, **kwargs):
313-
"""Async streaming fallback - calls _agenerate and yields single response."""
314-
from langchain_core.messages import AIMessageChunk
315-
from langchain_core.outputs import ChatGenerationChunk
316-
317-
result = await self._agenerate(
318-
messages, stop=stop, run_manager=run_manager, **kwargs
319-
)
320-
321-
if result.generations:
322-
message = result.generations[0].message
323-
chunk = AIMessageChunk(
324-
content=message.content,
325-
additional_kwargs=message.additional_kwargs,
326-
response_metadata=getattr(message, "response_metadata", {}),
327-
id=message.id,
328-
tool_calls=getattr(message, "tool_calls", []),
329-
tool_call_chunks=getattr(message, "tool_call_chunks", []),
330-
)
331-
if hasattr(message, "usage_metadata") and message.usage_metadata:
332-
chunk.usage_metadata = message.usage_metadata
333-
334-
yield ChatGenerationChunk(message=chunk)

uv.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)