diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py index 07b41e3d8da1..7cc892c2e719 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py @@ -3,25 +3,30 @@ # --------------------------------------------------------- __path__ = __import__("pkgutil").extend_path(__path__, __name__) -from typing import TYPE_CHECKING, Optional, Any +from typing import TYPE_CHECKING, Any, Optional -from azure.ai.agentserver.agentframework.agent_framework import AgentFrameworkCBAgent -from azure.ai.agentserver.agentframework.tool_client import ToolClient from azure.ai.agentserver.agentframework._version import VERSION +from azure.ai.agentserver.agentframework._agent_framework import AgentFrameworkCBAgent +from azure.ai.agentserver.agentframework._foundry_tools import FoundryToolsChatMiddleware from azure.ai.agentserver.core.application._package_metadata import PackageMetadata, set_current_app if TYPE_CHECKING: # pragma: no cover from azure.core.credentials_async import AsyncTokenCredential -def from_agent_framework(agent, - credentials: Optional["AsyncTokenCredential"] = None, - **kwargs: Any) -> "AgentFrameworkCBAgent": +def from_agent_framework( + agent, + credentials: Optional["AsyncTokenCredential"] = None, + **kwargs: Any, +) -> "AgentFrameworkCBAgent": return AgentFrameworkCBAgent(agent, credentials=credentials, **kwargs) -__all__ = ["from_agent_framework", "ToolClient"] +__all__ = [ + "from_agent_framework", + "FoundryToolsChatMiddleware", +] __version__ = VERSION set_current_app(PackageMetadata.from_dist("azure-ai-agentserver-agentframework")) \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_agent_framework.py similarity index 63% rename from sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py rename to sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_agent_framework.py index b55c4aec3960..9eb649a38f19 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_agent_framework.py @@ -6,7 +6,6 @@ import os from typing import TYPE_CHECKING, Any, AsyncGenerator, Awaitable, Optional, Protocol, Union, List -import inspect from agent_framework import AgentProtocol, AIFunction from agent_framework.azure import AzureAIClient # pylint: disable=no-name-in-module @@ -21,6 +20,7 @@ Response as OpenAIResponse, ResponseStreamEvent, ) +from azure.ai.agentserver.core.models.projects import ResponseErrorEvent, ResponseFailedEvent from .models.agent_framework_input_converters import AgentFrameworkInputConverter from .models.agent_framework_output_non_streaming_converter import ( @@ -28,7 +28,6 @@ ) from .models.agent_framework_output_streaming_converter import AgentFrameworkOutputStreamingConverter from .models.constants import Constants -from .tool_client import ToolClient if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -39,12 +38,12 @@ class AgentFactory(Protocol): """Protocol for agent factory functions. - An agent factory is a callable that takes a ToolClient and returns + An agent factory is a callable that takes a list of tools and returns an AgentProtocol, either synchronously or asynchronously. """ def __call__(self, tools: List[AIFunction]) -> Union[AgentProtocol, Awaitable[AgentProtocol]]: - """Create an AgentProtocol using the provided ToolClient. + """Create an AgentProtocol using the provided tools. :param tools: The list of AIFunction tools available to the agent. :type tools: List[AIFunction] @@ -71,7 +70,7 @@ class AgentFrameworkCBAgent(FoundryCBAgent): - Supports both streaming and non-streaming responses based on the `stream` flag. """ - def __init__(self, agent: Union[AgentProtocol, AgentFactory], + def __init__(self, agent: AgentProtocol, credentials: "Optional[AsyncTokenCredential]" = None, **kwargs: Any): """Initialize the AgentFrameworkCBAgent with an AgentProtocol or a factory function. @@ -83,14 +82,7 @@ def __init__(self, agent: Union[AgentProtocol, AgentFactory], :type credentials: Optional[AsyncTokenCredential] """ super().__init__(credentials=credentials, **kwargs) # pylint: disable=unexpected-keyword-arg - self._agent_or_factory: Union[AgentProtocol, AgentFactory] = agent - self._resolved_agent: "Optional[AgentProtocol]" = None - # If agent is already instantiated, use it directly - if isinstance(agent, AgentProtocol): - self._resolved_agent = agent - logger.info(f"Initialized AgentFrameworkCBAgent with agent: {type(agent).__name__}") - else: - logger.info("Initialized AgentFrameworkCBAgent with agent factory") + self._agent: AgentProtocol = agent @property def agent(self) -> "Optional[AgentProtocol]": @@ -99,7 +91,7 @@ def agent(self) -> "Optional[AgentProtocol]": :return: The resolved AgentProtocol if available, None otherwise. :rtype: Optional[AgentProtocol] """ - return self._resolved_agent + return self._agent def _resolve_stream_timeout(self, request_body: CreateResponse) -> float: """Resolve idle timeout for streaming updates. @@ -121,51 +113,6 @@ def _resolve_stream_timeout(self, request_body: CreateResponse) -> float: env_val = os.getenv(Constants.AGENTS_ADAPTER_STREAM_TIMEOUT_S) return float(env_val) if env_val is not None else float(Constants.DEFAULT_STREAM_TIMEOUT_S) - async def _resolve_agent(self, context: AgentRunContext): - """Resolve the agent if it's a factory function (for single-use/first-time resolution). - Creates a ToolClient and calls the factory function with it. - This is used for the initial resolution. - - :param context: The agent run context containing tools and user information. - :type context: AgentRunContext - """ - if callable(self._agent_or_factory): - logger.debug("Resolving agent from factory function") - - # Create ToolClient with credentials - tool_client = self.get_tool_client(tools=context.get_tools(), user_info=context.get_user_info()) # pylint: disable=no-member - tool_client_wrapper = ToolClient(tool_client) - tools = await tool_client_wrapper.list_tools() - - result = self._agent_or_factory(tools) - if inspect.iscoroutine(result): - self._resolved_agent = await result - else: - self._resolved_agent = result - - logger.debug("Agent resolved successfully") - else: - # Should not reach here, but just in case - self._resolved_agent = self._agent_or_factory - - async def _resolve_agent_for_request(self, context: AgentRunContext): - - logger.debug("Resolving fresh agent from factory function for request") - - # Create ToolClient with credentials - tool_client = self.get_tool_client(tools=context.get_tools(), user_info=context.get_user_info()) # pylint: disable=no-member - tool_client_wrapper = ToolClient(tool_client) - tools = await tool_client_wrapper.list_tools() - - result = self._agent_or_factory(tools) - if inspect.iscoroutine(result): - agent = await result - else: - agent = result - - logger.debug("Fresh agent resolved successfully for request") - return agent, tool_client_wrapper - def init_tracing(self): try: exporter = os.environ.get(AdapterConstants.OTEL_EXPORTER_ENDPOINT) @@ -209,18 +156,7 @@ async def agent_run( # pylint: disable=too-many-statements OpenAIResponse, AsyncGenerator[ResponseStreamEvent, Any], ]: - # Resolve agent - always resolve if it's a factory function to get fresh agent each time - # For factories, get a new agent instance per request to avoid concurrency issues - tool_client = None try: - if callable(self._agent_or_factory): - agent, tool_client = await self._resolve_agent_for_request(context) - elif self._resolved_agent is None: - await self._resolve_agent(context) - agent = self._resolved_agent - else: - agent = self._resolved_agent - logger.info(f"Starting agent_run with stream={context.stream}") request_input = context.request.get("input") @@ -236,27 +172,56 @@ async def agent_run( # pylint: disable=too-many-statements async def stream_updates(): try: update_count = 0 - updates = agent.run_stream(message) - async for event in streaming_converter.convert(updates): - update_count += 1 - yield event - - logger.info("Streaming completed with %d updates", update_count) + try: + updates = self.agent.run_stream(message) + async for event in streaming_converter.convert(updates): + update_count += 1 + yield event + + logger.info("Streaming completed with %d updates", update_count) + except OAuthConsentRequiredError as e: + logger.info("OAuth consent required during streaming updates") + if update_count == 0: + async for event in self.respond_with_oauth_consent_astream(context, e): + yield event + else: + # If we've already emitted events, we cannot safely restart a new + # OAuth-consent stream (it would reset sequence numbers). + yield ResponseErrorEvent( + sequence_number=streaming_converter.next_sequence(), + code="server_error", + message=f"OAuth consent required: {e.consent_url}", + param="agent_run", + ) + yield ResponseFailedEvent( + sequence_number=streaming_converter.next_sequence(), + response=streaming_converter._build_response(status="failed"), # pylint: disable=protected-access + ) + except Exception as e: # pylint: disable=broad-exception-caught + logger.error("Unhandled exception during streaming updates: %s", e, exc_info=True) + + # Emit well-formed error events instead of terminating the stream. + yield ResponseErrorEvent( + sequence_number=streaming_converter.next_sequence(), + code="server_error", + message=str(e), + param="agent_run", + ) + yield ResponseFailedEvent( + sequence_number=streaming_converter.next_sequence(), + response=streaming_converter._build_response(status="failed"), # pylint: disable=protected-access + ) finally: - # Close tool_client if it was created for this request - if tool_client is not None: - try: - await tool_client.close() - logger.debug("Closed tool_client after streaming completed") - except Exception as ex: # pylint: disable=broad-exception-caught - logger.warning(f"Error closing tool_client in stream: {ex}") + # No request-scoped resources to clean up here today. + # Keep this block as a hook for future request-scoped cleanup. + pass return stream_updates() # Non-streaming path logger.info("Running agent in non-streaming mode") non_streaming_converter = AgentFrameworkOutputNonStreamingConverter(context) - result = await agent.run(message) + result = await self.agent.run(message) logger.debug(f"Agent run completed, result type: {type(result)}") transformed_result = non_streaming_converter.transform_output_for_response(result) logger.info("Agent run and transformation completed successfully") @@ -272,10 +237,4 @@ async def oauth_consent_stream(error=e): return oauth_consent_stream() return await self.respond_with_oauth_consent(context, e) finally: - # Close tool_client if it was created for this request (non-streaming only, streaming handles in generator) - if not context.stream and tool_client is not None: - try: - await tool_client.close() - logger.debug("Closed tool_client after request processing") - except Exception as ex: # pylint: disable=broad-exception-caught - logger.warning(f"Error closing tool_client: {ex}") + pass diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_foundry_tools.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_foundry_tools.py new file mode 100644 index 000000000000..875c1de24e8c --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_foundry_tools.py @@ -0,0 +1,150 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from __future__ import annotations + +import inspect +from typing import Any, Awaitable, Callable, Dict, List, Optional, Sequence + +from agent_framework import AIFunction, ChatContext, ChatOptions, ChatMiddleware +from pydantic import Field, create_model + +from azure.ai.agentserver.core import AgentServerContext +from azure.ai.agentserver.core.logger import get_logger +from azure.ai.agentserver.core.tools import FoundryToolLike, ResolvedFoundryTool + +logger = get_logger() + + +def _attach_signature_from_pydantic_model(func, input_model) -> None: + params = [] + annotations: Dict[str, Any] = {} + + for name, field in input_model.model_fields.items(): + ann = field.annotation or Any + annotations[name] = ann + + default = inspect._empty if field.is_required() else field.default + params.append( + inspect.Parameter( + name=name, + kind=inspect.Parameter.KEYWORD_ONLY, + default=default, + annotation=ann, + ) + ) + + func.__signature__ = inspect.Signature(parameters=params, return_annotation=Any) + func.__annotations__ = {**annotations, "return": Any} + +class FoundryToolClient: + + def __init__( + self, + tools: Sequence[FoundryToolLike], + ) -> None: + self._allowed_tools: List[FoundryToolLike] = list(tools) + + async def list_tools(self) -> List[AIFunction]: + server_context = AgentServerContext.get() + foundry_tool_catalog = server_context.tools.catalog + resolved_tools = await foundry_tool_catalog.list(self._allowed_tools) + return [self._to_aifunction(tool) for tool in resolved_tools] + + def _to_aifunction(self, foundry_tool: "ResolvedFoundryTool") -> AIFunction: + """Convert an FoundryTool to an Agent Framework AI Function + + :param foundry_tool: The FoundryTool to convert. + :type foundry_tool: ~azure.ai.agentserver.core.client.tools.aio.FoundryTool + :return: An AI Function Tool. + :rtype: AIFunction + """ + # Get the input schema from the tool descriptor + input_schema = foundry_tool.input_schema or {} + + # Create a Pydantic model from the input schema + properties = input_schema.properties or {} + required_fields = set(input_schema.required or []) + + # Build field definitions for the Pydantic model + field_definitions: Dict[str, Any] = {} + for field_name, field_info in properties.items(): + field_type = self._json_schema_type_to_python(field_info.type or "string") + field_description = field_info.description or "" + is_required = field_name in required_fields + + if is_required: + field_definitions[field_name] = (field_type, Field(description=field_description)) + else: + field_definitions[field_name] = (Optional[field_type], + Field(default=None, description=field_description)) + + # Create the Pydantic model dynamically + input_model = create_model( + f"{foundry_tool.name}_input", + **field_definitions + ) + + # Create a wrapper function that calls the Azure tool + async def tool_func(**kwargs: Any) -> Any: + """Dynamically generated function to invoke the Azure AI tool. + + :return: The result from the tool invocation. + :rtype: Any + """ + server_context = AgentServerContext.get() + logger.debug("Invoking tool: %s with input: %s", foundry_tool.name, kwargs) + return await server_context.tools.invoke(foundry_tool, kwargs) + _attach_signature_from_pydantic_model(tool_func, input_model) + + # Create and return the AIFunction + return AIFunction( + name=foundry_tool.name, + description=foundry_tool.description or "No description available", + func=tool_func, + input_model=input_model + ) + + def _json_schema_type_to_python(self, json_type: str) -> type: + """Convert JSON schema type to Python type. + + :param json_type: The JSON schema type string. + :type json_type: str + :return: The corresponding Python type. + :rtype: type + """ + type_map = { + "string": str, + "number": float, + "integer": int, + "boolean": bool, + "array": list, + "object": dict, + } + return type_map.get(json_type, str) + + +class FoundryToolsChatMiddleware(ChatMiddleware): + """Chat middleware to inject Foundry tools into ChatOptions on each call.""" + + def __init__( + self, + tools: Sequence[FoundryToolLike]) -> None: + self._foundry_tool_client = FoundryToolClient(tools=tools) + + async def process( + self, + context: ChatContext, + next: Callable[[ChatContext], Awaitable[None]], + ) -> None: + tools = await self._foundry_tool_client.list_tools() + base_chat_options = context.chat_options + if not base_chat_options: + logger.debug("No existing ChatOptions found, creating new one with Foundry tools.") + base_chat_options = ChatOptions(tools=tools) + context.chat_options = base_chat_options + else: + logger.debug("Adding Foundry tools to existing ChatOptions.") + base_tools = base_chat_options.tools or [] + context.chat_options.tools = base_tools + tools + await next(context) diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/tool_client.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/tool_client.py deleted file mode 100644 index 4db103577cbf..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/tool_client.py +++ /dev/null @@ -1,183 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -# mypy: disable-error-code="assignment" -"""Tool client for integrating AzureAIToolClient with Agent Framework.""" - -from typing import TYPE_CHECKING, Any, Dict, List, Optional -from agent_framework import AIFunction -from pydantic import Field, create_model -from azure.ai.agentserver.core.logger import get_logger -if TYPE_CHECKING: - from azure.ai.agentserver.core.tools import FoundryToolClient, ResolvedFoundryTool - -logger = get_logger() - -# pylint: disable=client-accepts-api-version-keyword,missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs -class ToolClient: - """Client that integrates AzureAIToolClient with Agent Framework. - - This class provides methods to list tools from AzureAIToolClient and invoke them - in a format compatible with Agent Framework agents. - - :param tool_client: The AzureAIToolClient instance to use for tool operations. - :type tool_client: ~azure.ai.agentserver.core.client.tools.aio.AzureAIToolClient - - .. admonition:: Example: - - .. code-block:: python - - from azure.ai.agentserver.core.client.tools.aio import AzureAIToolClient - from azure.ai.agentserver.agentframework import ToolClient - from azure.identity.aio import DefaultAzureCredential - - async with DefaultAzureCredential() as credential: - tool_client = AzureAIToolClient( - endpoint="https://", - credential=credential - ) - - client = ToolClient(tool_client) - - # List tools as Agent Framework tool definitions - tools = await client.list_tools() - - # Invoke a tool directly - result = await client.invoke_tool( - tool_name="my_tool", - tool_input={"param": "value"} - ) - - :meta private: - """ - - def __init__(self, tool_client: "FoundryToolClient") -> None: - """Initialize the ToolClient. - - :param tool_client: The AzureAIToolClient instance to use for tool operations. - :type tool_client: ~azure.ai.agentserver.core.client.tools.aio.AzureAIToolClient - """ - self._tool_client = tool_client - self._aifunction_cache: List[AIFunction] = None - - async def list_tools(self) -> List[AIFunction]: - """List all available tools as Agent Framework tool definitions. - - Retrieves tools from AzureAIToolClient and returns them in a format - compatible with Agent Framework. - - :return: List of tool definitions. - :rtype: List[AIFunction] - :raises ~azure.core.exceptions.HttpResponseError: - Raised for HTTP communication failures. - - .. admonition:: Example: - - .. code-block:: python - - client = ToolClient(tool_client) - tools = await client.list_tools() - """ - # Get tools from AzureAIToolClient - if self._aifunction_cache is not None: - return self._aifunction_cache - - azure_tools = await self._tool_client.list_tools() - self._aifunction_cache = [] - - # Convert to Agent Framework tool definitions - for azure_tool in azure_tools: - ai_function_tool = self._convert_to_agent_framework_tool(azure_tool) - self._aifunction_cache.append(ai_function_tool) - - return self._aifunction_cache - - def _convert_to_agent_framework_tool(self, azure_tool: "ResolvedFoundryTool") -> AIFunction: - """Convert an AzureAITool to an Agent Framework AI Function - - :param azure_tool: The AzureAITool to convert. - :type azure_tool: ~azure.ai.agentserver.core.client.tools.aio.FoundryTool - :return: An AI Function Tool. - :rtype: AIFunction - """ - # Get the input schema from the tool descriptor - input_schema = azure_tool.input_schema or {} - - # Create a Pydantic model from the input schema - properties = input_schema.get("properties") or {} - required_fields = set(input_schema.get("required") or []) - - # Build field definitions for the Pydantic model - field_definitions: Dict[str, Any] = {} - for field_name, field_info in properties.items(): - field_type = self._json_schema_type_to_python(field_info.get("type", "string")) - field_description = field_info.get("description", "") - is_required = field_name in required_fields - - if is_required: - field_definitions[field_name] = (field_type, Field(description=field_description)) - else: - field_definitions[field_name] = (Optional[field_type], - Field(default=None, description=field_description)) - - # Create the Pydantic model dynamically - input_model = create_model( - f"{azure_tool.name}_input", - **field_definitions - ) - - # Create a wrapper function that calls the Azure tool - async def tool_func(**kwargs: Any) -> Any: - """Dynamically generated function to invoke the Azure AI tool. - - :return: The result from the tool invocation. - :rtype: Any - """ - logger.debug("Invoking tool: %s with input: %s", azure_tool.name, kwargs) - return await azure_tool.ainvoke(kwargs) - - # Create and return the AIFunction - return AIFunction( - name=azure_tool.name, - description=azure_tool.description or "No description available", - func=tool_func, - input_model=input_model - ) - - def _json_schema_type_to_python(self, json_type: str) -> type: - """Convert JSON schema type to Python type. - - :param json_type: The JSON schema type string. - :type json_type: str - :return: The corresponding Python type. - :rtype: type - """ - type_map = { - "string": str, - "number": float, - "integer": int, - "boolean": bool, - "array": list, - "object": dict, - } - return type_map.get(json_type, str) - - async def close(self) -> None: - """Close the tool client and release resources.""" - await self._tool_client.close() - - async def __aenter__(self) -> "ToolClient": - """Async context manager entry. - - :return: The ToolClient instance. - :rtype: ToolClient - """ - return self - - async def __aexit__(self, *exc_details: Any) -> None: - """Async context manager exit. - - :param exc_details: Exception details if an exception occurred. - :type exc_details: Any - """ - await self.close() diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/README.md b/sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/README.md new file mode 100644 index 000000000000..956fc634eb11 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/README.md @@ -0,0 +1,81 @@ +# Chat Client With Foundry Tools + +This sample demonstrates how to attach `FoundryToolsChatMiddleware` to an Agent Framework chat client so that: + +- Foundry tools configured in your Azure AI Project are converted into Agent Framework `AIFunction` tools. +- The tools are injected automatically for each agent run. + +## What this sample does + +The script creates an Agent Framework agent using: + +- `AzureOpenAIChatClient` for model inference +- `FoundryToolsChatMiddleware` to resolve and inject Foundry tools +- `from_agent_framework(agent).run()` to start an AgentServer-compatible HTTP server + +## Prerequisites + +- Python 3.10+ +- An Azure AI Project endpoint +- A tool connection configured in that project (e.g. an MCP connection) +- Azure credentials available to `DefaultAzureCredential` + +## Setup + +1. Install dependencies: + +```bash +pip install -r requirements.txt +``` + +2. Update `.env` in this folder with your values. At minimum you need: + +```dotenv +AZURE_OPENAI_ENDPOINT=https://.openai.azure.com/ +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= +OPENAI_API_VERSION= + +AZURE_AI_PROJECT_ENDPOINT=https://.services.ai.azure.com/api/projects/ +AZURE_AI_PROJECT_TOOL_CONNECTION_ID= +``` + +Notes: + +- This sample uses `DefaultAzureCredential()`. Make sure you are signed in (e.g. `az login`) or otherwise configured. + +## Run + +```bash +python chat_client_with_foundry_tool.py +``` + +This starts a local Uvicorn server (it will keep running and wait for requests). If it looks "stuck" at startup, it may just be waiting for requests. + +## Key code + +The core pattern used by this sample: + +```python +agent = AzureOpenAIChatClient( + credential=DefaultAzureCredential(), + middleware=FoundryToolsChatMiddleware( + tools=[{"type": "mcp", "project_connection_id": tool_connection_id}], + ), +).create_agent( + name="FoundryToolAgent", + instructions="You are a helpful assistant with access to various tools.", +) + +from_agent_framework(agent).run() +``` + +## Troubleshooting + +- **No tools found**: verify `AZURE_AI_PROJECT_TOOL_CONNECTION_ID` points at an existing tool connection in your project. +- **Auth failures**: confirm `DefaultAzureCredential` can acquire a token (try `az login`). +- **Import errors / weird agent_framework circular import**: ensure you are running the sample from this folder (not from inside the package module directory) so the external `agent_framework` dependency is imported correctly. + +## Learn more + +- Azure AI Agent Service: https://learn.microsoft.com/azure/ai-services/agents/ +- Agent Framework: https://github.com/microsoft/agent-framework diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/chat_client_with_foundry_tool.py b/sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/chat_client_with_foundry_tool.py new file mode 100644 index 000000000000..cb9c3cd2c9c6 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/chat_client_with_foundry_tool.py @@ -0,0 +1,34 @@ +# Copyright (c) Microsoft. All rights reserved. +"""Example showing how to use an agent factory function with ToolClient. + +This sample demonstrates how to pass a factory function to from_agent_framework +that receives a ToolClient and returns an AgentProtocol. This pattern allows +the agent to be created dynamically with access to tools from Azure AI Tool +Client at runtime. +""" + +import os +from dotenv import load_dotenv +from agent_framework.azure import AzureOpenAIChatClient + +from azure.ai.agentserver.agentframework import from_agent_framework, FoundryToolsChatMiddleware +from azure.identity import DefaultAzureCredential + +load_dotenv() + +def main(): + tool_connection_id = os.getenv("AZURE_AI_PROJECT_TOOL_CONNECTION_ID") + + agent = AzureOpenAIChatClient( + credential=DefaultAzureCredential(), + middleware=FoundryToolsChatMiddleware( + tools=[{"type": "mcp", "project_connection_id": tool_connection_id}] + )).create_agent( + name="FoundryToolAgent", + instructions="You are a helpful assistant with access to various tools.", + ) + + from_agent_framework(agent).run() + +if __name__ == "__main__": + main() diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/requirements.txt b/sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/requirements.txt similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/requirements.txt rename to sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/requirements.txt diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/README.md b/sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/README.md deleted file mode 100644 index 019e388975ff..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/README.md +++ /dev/null @@ -1,113 +0,0 @@ -# Tool Client Example - -This example demonstrates how to use the `ToolClient` with Agent Framework to dynamically access tools from Azure AI Tool Client. - -## Overview - -The `ToolClient` provides a bridge between Azure AI Tool Client and Agent Framework, allowing agents to access tools configured in your Azure AI project. This example shows how to use a factory function pattern to create agents dynamically with access to tools at runtime. - -## Features - -- **Dynamic Tool Access**: Agents can list and invoke tools from Azure AI Tool Client -- **Factory Pattern**: Create fresh agent instances per request to avoid concurrency issues -- **Tool Integration**: Seamlessly integrate Azure AI tools with Agent Framework agents - -## Prerequisites - -- Python 3.10 or later -- Azure AI project with configured tools -- Azure credentials (DefaultAzureCredential) - -## Setup - -1. Install dependencies: -```bash -pip install -r requirements.txt -``` - -2. Configure environment variables in `.env`: -``` -AZURE_AI_PROJECT_ENDPOINT=https://.services.ai.azure.com/api/projects/ -``` - -3. Ensure your Azure AI project has tools configured (e.g., MCP connections) - -## Running the Example - -```bash -python agent_factory_example.py -``` - -## How It Works - -1. **Factory Function**: The example creates a factory function that: - - Receives a `ToolClient` instance - - Lists available tools from Azure AI Tool Client - - Creates an Agent Framework agent with those tools - - Returns the agent instance - -2. **Dynamic Agent Creation**: The factory is called for each request, ensuring: - - Fresh agent instances per request - - Latest tool configurations - - No concurrency issues - -3. **Tool Access**: The agent can use tools like: - - MCP (Model Context Protocol) connections - - Function tools - - Other Azure AI configured tools - -## Key Code Patterns - -### Creating a Factory Function - -```python -async def agent_factory(tool_client: ToolClient): - # List tools from Azure AI - tools = await tool_client.list_tools() - - # Create agent with tools - agent = Agent( - name="MyAgent", - model="gpt-4o", - instructions="You are a helpful assistant.", - tools=tools - ) - return agent -``` - -### Using the Factory - -```python -from azure.ai.agentserver.agentframework import from_agent_framework - -adapter = from_agent_framework( - agent_factory, - credentials=credential, - tools=[{"type": "mcp", "project_connection_id": "my-mcp"}] -) -``` - -## Alternative: Direct Agent Usage - -You can also use a pre-created agent instead of a factory: - -```python -agent = Agent( - name="MyAgent", - model="gpt-4o", - instructions="You are a helpful assistant." -) - -adapter = from_agent_framework(agent, credentials=credential) -``` - -## Troubleshooting - -- **No tools found**: Ensure your Azure AI project has tools configured -- **Authentication errors**: Check your Azure credentials and project endpoint -- **Import errors**: Verify all dependencies are installed - -## Learn More - -- [Azure AI Agent Service Documentation](https://learn.microsoft.com/azure/ai-services/agents/) -- [Agent Framework Documentation](https://github.com/microsoft/agent-framework) diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/agent_factory_example.py b/sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/agent_factory_example.py deleted file mode 100644 index bc4d6bf8806d..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/agent_factory_example.py +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -"""Example showing how to use an agent factory function with ToolClient. - -This sample demonstrates how to pass a factory function to from_agent_framework -that receives a ToolClient and returns an AgentProtocol. This pattern allows -the agent to be created dynamically with access to tools from Azure AI Tool -Client at runtime. -""" - -import asyncio -import os -from typing import List -from dotenv import load_dotenv -from agent_framework import AIFunction -from agent_framework.azure import AzureOpenAIChatClient - -from azure.ai.agentserver.agentframework import from_agent_framework -from azure.identity.aio import DefaultAzureCredential - -load_dotenv() - - -def create_agent_factory(): - """Create a factory function that builds an agent with ToolClient. - - This function returns a factory that takes a ToolClient and returns - an AgentProtocol. The agent is created at runtime for every request, - allowing it to access the latest tool configuration dynamically. - """ - - async def agent_factory(tools: List[AIFunction]) -> AzureOpenAIChatClient: - """Factory function that creates an agent using the provided tools. - - :param tools: The list of AIFunction tools available to the agent. - :type tools: List[AIFunction] - :return: An Agent Framework ChatAgent instance. - :rtype: ChatAgent - """ - # List all available tools from the ToolClient - print("Fetching tools from Azure AI Tool Client via factory...") - print(f"Found {len(tools)} tools:") - for tool in tools: - print(f" - tool: {tool.name}, description: {tool.description}") - - if not tools: - print("\nNo tools found!") - print("Make sure your Azure AI project has tools configured.") - raise ValueError("No tools available to create agent") - - # Create the Agent Framework agent with the tools - print("\nCreating Agent Framework agent with tools from factory...") - agent = AzureOpenAIChatClient(credential=DefaultAzureCredential()).create_agent( - name="ToolClientAgent", - instructions="You are a helpful assistant with access to various tools.", - tools=tools, - ) - - print("Agent created successfully!") - return agent - - return agent_factory - - -async def quickstart(): - """Build and return an AgentFrameworkCBAgent using an agent factory function.""" - - # Get configuration from environment - project_endpoint = os.getenv("AZURE_AI_PROJECT_ENDPOINT") - - if not project_endpoint: - raise ValueError( - "AZURE_AI_PROJECT_ENDPOINT environment variable is required. " - "Set it to your Azure AI project endpoint, e.g., " - "https://.services.ai.azure.com/api/projects/" - ) - - # Create Azure credentials - credential = DefaultAzureCredential() - - # Create a factory function that will build the agent at runtime - # The factory will receive a ToolClient when the agent first runs - agent_factory = create_agent_factory() - - tool_connection_id = os.getenv("AZURE_AI_PROJECT_TOOL_CONNECTION_ID") - # Pass the factory function to from_agent_framework instead of a compiled agent - # The agent will be created on every agent run with access to ToolClient - print("Creating Agent Framework adapter with factory function...") - adapter = from_agent_framework( - agent_factory, - credentials=credential, - tools=[{"type": "mcp", "project_connection_id": tool_connection_id}] - ) - - print("Adapter created! Agent will be built on every request.") - return adapter - - -async def main(): # pragma: no cover - sample entrypoint - """Main function to run the agent.""" - adapter = await quickstart() - - if adapter: - print("\nStarting agent server...") - print("The agent factory will be called for every request that arrives.") - await adapter.run_async() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py index 319e02da7e98..e15ccd86f9cc 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py @@ -43,6 +43,9 @@ def get_dimensions(): def get_project_endpoint(): + project_endpoint = os.environ.get(Constants.AZURE_AI_PROJECT_ENDPOINT) + if project_endpoint: + return project_endpoint project_resource_id = os.environ.get(Constants.AGENT_PROJECT_RESOURCE_ID) if project_resource_id: last_part = project_resource_id.split("/")[-1] diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py index 2239899225c0..a5f69664cf66 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py @@ -59,8 +59,7 @@ async def dispatch(self, request: Request, call_next): logger.error(f"Invalid JSON payload: {e}") return JSONResponse({"error": f"Invalid JSON payload: {e}"}, status_code=400) try: - agent_tools = self.agent.tools if self.agent else [] - request.state.agent_run_context = AgentRunContext(payload, agent_tools=agent_tools) + request.state.agent_run_context = AgentRunContext(payload) self.set_run_context_to_context_var(request.state.agent_run_context) except Exception as e: logger.error(f"Context build failed: {e}.", exc_info=True) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/_exceptions.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/_exceptions.py index 534cc0c5daa7..b91c1f71c7a3 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/_exceptions.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/_exceptions.py @@ -1,6 +1,8 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- +from __future__ import annotations + from typing import TYPE_CHECKING if TYPE_CHECKING: diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_models.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_models.py index b5d57946fbcc..6736ed442a1a 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_models.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_models.py @@ -46,9 +46,17 @@ def id(self) -> str: def __str__(self): return self.id + + def __eq__(self, other: object) -> bool: + if not isinstance(other, FoundryTool): + return False + return self.id == other.id + + def __hash__(self) -> int: + return hash(self.id) -@dataclass(frozen=True, kw_only=True) +@dataclass(frozen=True, kw_only=True, eq=False) class FoundryHostedMcpTool(FoundryTool): """Foundry MCP tool definition. @@ -65,7 +73,7 @@ def id(self) -> str: return f"{self.source}:{self.name}" -@dataclass(frozen=True, kw_only=True) +@dataclass(frozen=True, kw_only=True, eq=False) class FoundryConnectedTool(FoundryTool): """Foundry connected tool definition. @@ -76,7 +84,7 @@ class FoundryConnectedTool(FoundryTool): project_connection_id: str @property - def id(self): + def id(self) -> str: return f"{self.source}:{self.protocol}:{self.project_connection_id}" @@ -566,7 +574,7 @@ class InvokeConnectedToolsResult(BaseModel): :ivar Any value: The result value from the tool invocation. """ - value: Any = Field(serialization_alias="toolResult") + value: Any = Field(validation_alias="toolResult") class InvokeFoundryConnectedToolsResponse(BaseModel): @@ -592,7 +600,10 @@ class InvokeFoundryConnectedToolsResponse(BaseModel): Annotated[InvokeConnectedToolsResult, Tag("ResultType")], ], Discriminator( - lambda payload: "ErrorType" if isinstance(payload, dict) and "type" in payload else "ResultType" + lambda payload: "ErrorType" if isinstance(payload, dict) and + # handle other error types in the future + payload.get("type") == "OAuthConsentRequired" + else "ResultType" ), ]) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_base.py index 6123305883c2..5248ab7aa7fa 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_base.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_base.py @@ -4,6 +4,7 @@ from __future__ import annotations from abc import ABC +import json from typing import Any, ClassVar, MutableMapping, Type from azure.core import AsyncPipelineClient @@ -61,3 +62,12 @@ def _handle_response_error(self, response: AsyncHttpResponse) -> None: if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=self._error_map) raise HttpResponseError(response=response) + + def _extract_response_json(self, response: AsyncHttpResponse) -> Any: + try: + payload_text = response.text() + payload_json = json.loads(payload_text) if payload_text else {} + except AttributeError as e: + payload_bytes = response.body() + payload_json = json.loads(payload_bytes.decode("utf-8")) if payload_bytes else {} + return payload_json \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_connected_tools.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_connected_tools.py index b54da32ed4fe..f50abb7ed0cc 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_connected_tools.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_connected_tools.py @@ -109,7 +109,7 @@ def _build_invoke_tool_request( "tenantId": user.tenant_id, } return self._client.post( - self._list_tools_path(agent_name), + self._invoke_tool_path(agent_name), params=self._QUERY_PARAMS, headers=self._HEADERS, content=payload) @@ -146,7 +146,8 @@ async def list_tools(self, request = self._build_list_tools_request(tools, user, agent_name) response = await self._send_request(request) async with response: - tools_response = ListFoundryConnectedToolsResponse.model_validate(response.json()) + json_response = self._extract_response_json(response) + tools_response = ListFoundryConnectedToolsResponse.model_validate(json_response) return self._convert_listed_tools(tools_response, tools) @@ -172,5 +173,7 @@ async def invoke_tool( request = self._build_invoke_tool_request(tool, arguments, user, agent_name) response = await self._send_request(request) async with response: - invoke_response = InvokeFoundryConnectedToolsResponse.model_validate(response.json()) + json_response = self._extract_response_json(response) + invoke_response = InvokeFoundryConnectedToolsResponse.model_validate(json_response) return self._convert_invoke_result(invoke_response) + \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_hosted_mcp_tools.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_hosted_mcp_tools.py index 197b4c645b8b..471e18bf18ee 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_hosted_mcp_tools.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_hosted_mcp_tools.py @@ -6,9 +6,14 @@ from azure.core.rest import HttpRequest -from azure.ai.agentserver.core.tools import FoundryHostedMcpTool, FoundryToolSource, ResolvedFoundryTool, \ - ToolInvocationError -from azure.ai.agentserver.core.tools.client._models import FoundryToolDetails, ListFoundryHostedMcpToolsResponse +from azure.ai.agentserver.core.tools._exceptions import ToolInvocationError +from azure.ai.agentserver.core.tools.client._models import ( + FoundryHostedMcpTool, + FoundryToolSource, + ResolvedFoundryTool, + FoundryToolDetails, + ListFoundryHostedMcpToolsResponse, +) from azure.ai.agentserver.core.tools.client.operations._base import BaseOperations @@ -88,7 +93,7 @@ def _convert_listed_tools( return result def _build_invoke_tool_request(self, tool: ResolvedFoundryTool, arguments: Dict[str, Any]) -> HttpRequest: - if tool.definition.source != FoundryToolSource.FOUNDRY_HOSTED_MCP: + if tool.definition.source != FoundryToolSource.HOSTED_MCP: raise ToolInvocationError(f"Tool {tool.name} is not a Foundry-hosted MCP tool.", tool=tool) definition = cast(FoundryHostedMcpTool, tool.definition) if TYPE_CHECKING else tool.definition @@ -140,7 +145,8 @@ async def list_tools( request = self._build_list_tools_request() response = await self._send_request(request) async with response: - tools_response = ListFoundryHostedMcpToolsResponse.model_validate(response.json()) + json_response = self._extract_response_json(response) + tools_response = ListFoundryHostedMcpToolsResponse.model_validate(json_response) return self._convert_listed_tools(tools_response, allowed_tools) async def invoke_tool( @@ -160,4 +166,6 @@ async def invoke_tool( request = self._build_invoke_tool_request(tool, arguments) response = await self._send_request(request) async with response: - return response.json().get("result") + json_response = self._extract_response_json(response) + invoke_response = json_response + return invoke_response diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_catalog.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_catalog.py index 9fcf2da0bb1f..a4244b0ca212 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_catalog.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_catalog.py @@ -63,7 +63,7 @@ async def list(self, tools: List[FoundryToolLike]) -> List[ResolvedFoundryTool]: foundry_tools = [ensure_foundry_tool(tool) for tool in tools] # for tools that are not being listed, create a batch task, convert to per-tool resolving tasks, and cache them - tools_to_fetch = {k: tool for tool in foundry_tools if (k := self._get_key(user, tool) not in self._cache)} + tools_to_fetch = {k: tool for tool in foundry_tools if (k := self._get_key(user, tool)) not in self._cache} if tools_to_fetch: # Awaitable[Mapping[FoundryTool, List[FoundryToolDetails]]] fetched_tools = asyncio.create_task(self._fetch_tools(tools_to_fetch.values(), user))