Skip to content

Commit 945fa74

Browse files
committed
fix: io inference for agent workflows
1 parent b0fe1e3 commit 945fa74

4 files changed

Lines changed: 61 additions & 18 deletions

File tree

pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "uipath-llamaindex"
3-
version = "0.0.34"
3+
version = "0.0.35"
44
description = "UiPath LlamaIndex SDK"
55
readme = { file = "README.md", content-type = "text/markdown" }
66
requires-python = ">=3.10"
@@ -9,7 +9,7 @@ dependencies = [
99
"llama-index-embeddings-azure-openai>=0.3.8",
1010
"llama-index-llms-azure-openai>=0.3.2",
1111
"openinference-instrumentation-llama-index>=4.3.0",
12-
"uipath>=2.1.35, <2.2.0",
12+
"uipath>=2.1.36, <2.2.0",
1313
]
1414
classifiers = [
1515
"Development Status :: 3 - Alpha",

src/uipath_llamaindex/_cli/_runtime/_runtime.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import pickle
55
from typing import Optional, cast
66

7+
from llama_index.core.agent.workflow.workflow_events import AgentOutput
78
from llama_index.core.workflow import (
89
Context,
910
HumanResponseEvent,
@@ -102,7 +103,19 @@ async def execute(self) -> Optional[UiPathRuntimeResult]:
102103
UiPathErrorCategory.USER,
103104
) from e
104105
try:
105-
serialized_output = self._serialize_object(output)
106+
if isinstance(output, AgentOutput):
107+
structured_response = getattr(
108+
output, "structured_response", None
109+
)
110+
if structured_response is not None:
111+
serialized_output = self._serialize_object(
112+
structured_response
113+
)
114+
else:
115+
serialized_output = self._serialize_object(output)
116+
else:
117+
serialized_output = self._serialize_object(output)
118+
106119
# create simple kvp from string
107120
if type(serialized_output) is str:
108121
serialized_output = {"result": serialized_output}

src/uipath_llamaindex/_cli/cli_init.py

Lines changed: 40 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,9 @@
22
import json
33
import os
44
import uuid
5-
from typing import Any, Callable, Dict, overload
5+
from typing import Any, Callable, Dict, Optional, Type, overload
66

7+
from llama_index.core.agent.workflow import BaseWorkflowAgent
78
from llama_index.core.workflow import (
89
HumanResponseEvent,
910
InputRequiredEvent,
@@ -15,6 +16,7 @@
1516
get_steps_from_class,
1617
get_steps_from_instance,
1718
)
19+
from pydantic import BaseModel
1820
from uipath._cli._utils._console import ConsoleLogger
1921
from uipath._cli._utils._parse_ast import generate_bindings_json # type: ignore
2022
from uipath._cli.middlewares import MiddlewareResult
@@ -71,18 +73,46 @@ def generate_schema_from_workflow(workflow: Workflow) -> Dict[str, Any]:
7173

7274
# Generate input schema from StartEvent using Pydantic's schema method
7375
try:
74-
input_schema = start_event_class.model_json_schema()
75-
# Resolve references and handle nullable types
76-
input_schema = resolve_refs(input_schema)
77-
schema["input"]["properties"] = process_nullable_types(
78-
input_schema.get("properties", {})
79-
)
80-
schema["input"]["required"] = input_schema.get("required", [])
76+
if isinstance(workflow, BaseWorkflowAgent):
77+
# For workflow agents, define a simple schema with just user_msg
78+
schema["input"] = {
79+
"type": "object",
80+
"properties": {
81+
"user_msg": {
82+
"type": "string",
83+
"title": "User Message",
84+
"description": "The user's question or request",
85+
}
86+
},
87+
"required": ["user_msg"],
88+
}
89+
else:
90+
input_schema = start_event_class.model_json_schema()
91+
# Resolve references and handle nullable types
92+
input_schema = resolve_refs(input_schema)
93+
schema["input"]["properties"] = process_nullable_types(
94+
input_schema.get("properties", {})
95+
)
96+
schema["input"]["required"] = input_schema.get("required", [])
8197
except (AttributeError, Exception):
8298
pass
8399

84-
# For output schema, check if it's the base StopEvent or a custom subclass
85-
if stop_event_class is StopEvent:
100+
# Handle output schema - check if it's a workflow agent with output_cls first
101+
if isinstance(workflow, BaseWorkflowAgent):
102+
output_cls: Optional[Type[BaseModel]] = getattr(workflow, "output_cls", None)
103+
if output_cls is not None:
104+
try:
105+
output_schema = output_cls.model_json_schema()
106+
# Resolve references and handle nullable types
107+
output_schema = resolve_refs(output_schema)
108+
schema["output"]["properties"] = process_nullable_types(
109+
output_schema.get("properties", {})
110+
)
111+
schema["output"]["required"] = output_schema.get("required", [])
112+
except (AttributeError, Exception):
113+
pass
114+
# Check if it's the base StopEvent or a custom subclass
115+
elif stop_event_class is StopEvent:
86116
# base StopEvent
87117
schema["output"] = {
88118
"type": "object",

uv.lock

Lines changed: 5 additions & 5 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)