Skip to content

Commit ba8315b

Browse files
authored
[JAR-9116] add interrupt support for convo coded agent (#525)
1 parent 9d4af3e commit ba8315b

6 files changed

Lines changed: 128 additions & 15 deletions

File tree

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "uipath-langchain"
3-
version = "0.5.52"
3+
version = "0.5.53"
44
description = "Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform"
55
readme = { file = "README.md", content-type = "text/markdown" }
66
requires-python = ">=3.11"

samples/chat-hitl-agent/graph.py

Lines changed: 9 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,16 @@
11
from langchain_anthropic import ChatAnthropic
22
from langchain_tavily import TavilySearch
33
from langchain.agents import create_agent
4-
from langchain.agents.middleware import HumanInTheLoopMiddleware
4+
from uipath_langchain.chat import hitl_tool
55

66
tavily_tool = TavilySearch(max_results=5)
77

8+
9+
@hitl_tool
10+
def search_web(query: str) -> str:
11+
"""Search the web for information using Tavily."""
12+
return tavily_tool.invoke({"query": query})
13+
814
system_prompt = """
915
You are a Culinary Research & Recipe Assistant.
1016
@@ -14,22 +20,14 @@
1420
- Recommend dishes based on preferences.
1521
- Explain techniques clearly and safely.
1622
17-
Use TavilySearch whenever external information is useful.
23+
Use search_web whenever external information is useful.
1824
Be concise, helpful, and food-savvy.
1925
"""
2026

2127
llm = ChatAnthropic(model="claude-3-7-sonnet-latest")
2228

2329
graph = create_agent(
2430
model=llm,
25-
tools=[tavily_tool],
31+
tools=[search_web],
2632
system_prompt=system_prompt,
27-
middleware=[
28-
HumanInTheLoopMiddleware(
29-
interrupt_on={
30-
"tavily_search": True # Ask for approval before executing the search
31-
},
32-
description_prefix="Tool execution pending approval",
33-
),
34-
],
3533
)

samples/chat-hitl-agent/pyproject.toml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,7 @@ dependencies = [
99
"langchain-anthropic>=1.2.0",
1010
"tavily-python>=0.7.13",
1111
"langchain-tavily>=0.2.13",
12-
"uipath-langchain>=0.4.15, <0.5.0",
13-
"uipath>=2.5.20, <2.6.0",
12+
"uipath-langchain>=0.5.53, <0.6.0"
1413
]
1514

1615
[dependency-groups]

src/uipath_langchain/chat/__init__.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,10 @@ def __getattr__(name):
2525
from .openai import UiPathChatOpenAI
2626

2727
return UiPathChatOpenAI
28+
if name == "hitl_tool":
29+
from .hitl import hitl_tool
30+
31+
return hitl_tool
2832
if name in ("OpenAIModels", "BedrockModels", "GeminiModels"):
2933
from . import supported_models
3034

@@ -46,4 +50,5 @@ def __getattr__(name):
4650
"GeminiModels",
4751
"LLMProvider",
4852
"APIFlavor",
53+
"hitl_tool",
4954
]

src/uipath_langchain/chat/hitl.py

Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
import functools
2+
import inspect
3+
from inspect import Parameter
4+
from typing import Annotated, Any, Callable
5+
6+
from langchain_core.tools import BaseTool, InjectedToolCallId
7+
from langchain_core.tools import tool as langchain_tool
8+
from langgraph.types import interrupt
9+
from uipath.core.chat import (
10+
UiPathConversationToolCallConfirmationValue,
11+
)
12+
13+
_CANCELLED_MESSAGE = "Cancelled by user"
14+
15+
16+
def _request_approval(
17+
tool_args: dict[str, Any],
18+
tool: BaseTool,
19+
) -> dict[str, Any] | None:
20+
"""Interrupt the graph to request user approval for a tool call.
21+
22+
Returns the (possibly edited) tool arguments if approved, or None if rejected.
23+
"""
24+
tool_call_id: str = tool_args.pop("tool_call_id")
25+
26+
input_schema: dict[str, Any] = {}
27+
tool_call_schema = getattr(
28+
tool, "tool_call_schema", None
29+
) # doesn't include InjectedToolCallId (tool id from claude/oai/etc.)
30+
if tool_call_schema is not None:
31+
input_schema = tool_call_schema.model_json_schema()
32+
33+
response = interrupt(
34+
UiPathConversationToolCallConfirmationValue(
35+
tool_call_id=tool_call_id,
36+
tool_name=tool.name,
37+
input_schema=input_schema,
38+
input_value=tool_args,
39+
)
40+
)
41+
42+
# The resume payload from CAS has shape:
43+
# {"type": "uipath_cas_tool_call_confirmation",
44+
# "value": {"approved": bool, "input": <edited args | None>}}
45+
if not isinstance(response, dict):
46+
return tool_args
47+
48+
confirmation = response.get("value", response)
49+
if not confirmation.get("approved", True):
50+
return None
51+
52+
return confirmation.get("input") or tool_args
53+
54+
55+
def hitl_tool(
56+
func: Callable[..., Any] | None = None,
57+
*,
58+
name: str | None = None,
59+
description: str | None = None,
60+
args_schema: type | None = None,
61+
return_direct: bool = False,
62+
) -> BaseTool | Callable[..., BaseTool]:
63+
64+
def decorator(fn: Callable[..., Any]) -> BaseTool:
65+
_created_tool: list[BaseTool] = []
66+
67+
# wrap the tool/function
68+
@functools.wraps(fn)
69+
def wrapper(**tool_args: Any) -> Any:
70+
approved_args = _request_approval(tool_args, _created_tool[0])
71+
if approved_args is None:
72+
return _CANCELLED_MESSAGE
73+
return fn(**approved_args)
74+
75+
# rewrite the signature: e.g. (query: str) -> (query: str, *, tool_call_id: str)
76+
original_sig = inspect.signature(fn)
77+
params = list[Parameter](original_sig.parameters.values()) + [
78+
inspect.Parameter(
79+
"tool_call_id",
80+
inspect.Parameter.KEYWORD_ONLY,
81+
annotation=Annotated[str, InjectedToolCallId],
82+
),
83+
]
84+
wrapper.__signature__ = original_sig.replace(parameters=params) # type: ignore[attr-defined]
85+
wrapper.__annotations__ = {
86+
**fn.__annotations__,
87+
"tool_call_id": Annotated[str, InjectedToolCallId],
88+
}
89+
90+
# Create the LangChain tool
91+
if name is not None:
92+
result: BaseTool = langchain_tool(
93+
name,
94+
description=description,
95+
args_schema=args_schema,
96+
return_direct=return_direct,
97+
)(wrapper)
98+
else:
99+
result = langchain_tool(
100+
wrapper,
101+
description=description,
102+
args_schema=args_schema,
103+
return_direct=return_direct,
104+
)
105+
106+
_created_tool.append(result)
107+
return result
108+
109+
if func is not None:
110+
return decorator(func)
111+
return decorator

uv.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)