Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion python/packages/azure-ai/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ classifiers = [
]
dependencies = [
"agent-framework-core>=1.0.0b260210",
"azure-ai-projects >= 2.0.0b3",
"azure-ai-agents == 1.2.0b5",
"aiohttp",
]
Expand Down
98 changes: 93 additions & 5 deletions python/packages/core/agent_framework/azure/_responses_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,14 @@
from typing import TYPE_CHECKING, Any, Generic
from urllib.parse import urljoin

from azure.ai.projects.aio import AIProjectClient
from azure.core.credentials import TokenCredential
from openai.lib.azure import AsyncAzureADTokenProvider, AsyncAzureOpenAI
from openai import AsyncOpenAI
from openai.lib.azure import AsyncAzureADTokenProvider
from pydantic import ValidationError

from .._middleware import ChatMiddlewareLayer
from .._telemetry import AGENT_FRAMEWORK_USER_AGENT
from .._tools import FunctionInvocationConfiguration, FunctionInvocationLayer
from ..exceptions import ServiceInitializationError
from ..observability import ChatTelemetryLayer
Expand Down Expand Up @@ -72,7 +75,9 @@ def __init__(
token_endpoint: str | None = None,
credential: TokenCredential | None = None,
default_headers: Mapping[str, str] | None = None,
async_client: AsyncAzureOpenAI | None = None,
async_client: AsyncOpenAI | None = None,
project_client: Any | None = None,
project_endpoint: str | None = None,
env_file_path: str | None = None,
env_file_encoding: str | None = None,
instruction_role: str | None = None,
Expand All @@ -82,6 +87,14 @@ def __init__(
) -> None:
"""Initialize an Azure OpenAI Responses client.

The client can be created in two ways:

1. **Direct Azure OpenAI** (default): Provide endpoint, api_key, or credential
to connect directly to an Azure OpenAI deployment.
2. **Foundry project endpoint**: Provide a ``project_client`` or ``project_endpoint``
(with ``credential``) to create the client via an Azure AI Foundry project.
This requires the ``azure-ai-projects`` package to be installed.

Keyword Args:
api_key: The API key. If provided, will override the value in the env vars or .env file.
Can also be set via environment variable AZURE_OPENAI_API_KEY.
Expand All @@ -105,6 +118,12 @@ def __init__(
default_headers: The default headers mapping of string keys to
string values for HTTP requests.
async_client: An existing client to use.
project_client: An existing ``AIProjectClient`` (from ``azure.ai.projects.aio``) to use.
The OpenAI client will be obtained via ``project_client.get_openai_client()``.
Requires the ``azure-ai-projects`` package.
project_endpoint: The Azure AI Foundry project endpoint URL.
When provided with ``credential``, an ``AIProjectClient`` will be created
and used to obtain the OpenAI client. Requires the ``azure-ai-projects`` package.
env_file_path: Use the environment settings file as a fallback to using env vars.
env_file_encoding: The encoding of the environment settings file, defaults to 'utf-8'.
instruction_role: The role to use for 'instruction' messages, for example, summarization
Expand Down Expand Up @@ -132,6 +151,27 @@ def __init__(
# Or loading from a .env file
client = AzureOpenAIResponsesClient(env_file_path="path/to/.env")

# Using a Foundry project endpoint
from azure.identity import DefaultAzureCredential

client = AzureOpenAIResponsesClient(
project_endpoint="https://your-project.services.ai.azure.com",
deployment_name="gpt-4o",
credential=DefaultAzureCredential(),
)

# Or using an existing AIProjectClient
from azure.ai.projects.aio import AIProjectClient

project_client = AIProjectClient(
endpoint="https://your-project.services.ai.azure.com",
credential=DefaultAzureCredential(),
)
client = AzureOpenAIResponsesClient(
project_client=project_client,
deployment_name="gpt-4o",
)

# Using custom ChatOptions with type safety:
from typing import TypedDict
from agent_framework.azure import AzureOpenAIResponsesOptions
Expand All @@ -146,6 +186,15 @@ class MyOptions(AzureOpenAIResponsesOptions, total=False):
"""
if model_id := kwargs.pop("model_id", None) and not deployment_name:
deployment_name = str(model_id)

# Project client path: create OpenAI client from an Azure AI Foundry project
if async_client is None and (project_client is not None or project_endpoint is not None):
async_client = self._create_client_from_project(
project_client=project_client,
project_endpoint=project_endpoint,
credential=credential,
)

try:
azure_openai_settings = AzureOpenAISettings(
# pydantic settings will see if there is a value, if not, will try the env var or .env file
Expand Down Expand Up @@ -195,9 +244,48 @@ class MyOptions(AzureOpenAIResponsesOptions, total=False):
function_invocation_configuration=function_invocation_configuration,
)

@staticmethod
def _create_client_from_project(
*,
project_client: AIProjectClient | None,
project_endpoint: str | None,
credential: TokenCredential | None,
) -> AsyncOpenAI:
"""Create an AsyncOpenAI client from an Azure AI Foundry project.

Args:
project_client: An existing AIProjectClient to use.
project_endpoint: The Azure AI Foundry project endpoint URL.
credential: Azure credential for authentication.

Returns:
An AsyncAzureOpenAI client obtained from the project client.

Raises:
ServiceInitializationError: If required parameters are missing or
the azure-ai-projects package is not installed.
"""
if project_client is not None:
return project_client.get_openai_client()

if not project_endpoint:
raise ServiceInitializationError(
"Azure AI project endpoint is required when project_client is not provided."
)
if not credential:
raise ServiceInitializationError(
"Azure credential is required when using project_endpoint without a project_client."
)
project_client = AIProjectClient(
endpoint=project_endpoint,
credential=credential, # type: ignore[arg-type]
user_agent=AGENT_FRAMEWORK_USER_AGENT,
)
return project_client.get_openai_client()

@override
def _check_model_presence(self, run_options: dict[str, Any]) -> None:
if not run_options.get("model"):
def _check_model_presence(self, options: dict[str, Any]) -> None:
if not options.get("model"):
if not self.model_id:
raise ValueError("deployment_name must be a non-empty string")
run_options["model"] = self.model_id
options["model"] = self.model_id
3 changes: 2 additions & 1 deletion python/packages/core/agent_framework/azure/_shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from typing import Any, ClassVar, Final

from azure.core.credentials import TokenCredential
from openai import AsyncOpenAI
from openai.lib.azure import AsyncAzureOpenAI
from pydantic import SecretStr, model_validator

Expand Down Expand Up @@ -162,7 +163,7 @@ def __init__(
token_endpoint: str | None = None,
credential: TokenCredential | None = None,
default_headers: Mapping[str, str] | None = None,
client: AsyncAzureOpenAI | None = None,
client: AsyncOpenAI | None = None,
instruction_role: str | None = None,
**kwargs: Any,
) -> None:
Expand Down
19 changes: 13 additions & 6 deletions python/packages/core/agent_framework/openai/_responses_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -901,6 +901,7 @@ def _prepare_message_for_openai(
"""Prepare a chat message for the OpenAI Responses API format."""
all_messages: list[dict[str, Any]] = []
args: dict[str, Any] = {
"type": "message",
"role": message.role,
}
for content in message.contents:
Expand All @@ -911,16 +912,22 @@ def _prepare_message_for_openai(
case "function_result":
new_args: dict[str, Any] = {}
new_args.update(self._prepare_content_for_openai(message.role, content, call_id_to_id)) # type: ignore[arg-type]
all_messages.append(new_args)
if new_args:
all_messages.append(new_args)
case "function_call":
function_call = self._prepare_content_for_openai(message.role, content, call_id_to_id) # type: ignore[arg-type]
all_messages.append(function_call) # type: ignore
if function_call:
all_messages.append(function_call) # type: ignore
case "function_approval_response" | "function_approval_request":
all_messages.append(self._prepare_content_for_openai(message.role, content, call_id_to_id)) # type: ignore
prepared = self._prepare_content_for_openai(Role(message.role), content, call_id_to_id)
if prepared:
all_messages.append(prepared) # type: ignore
case _:
if "content" not in args:
args["content"] = []
args["content"].append(self._prepare_content_for_openai(message.role, content, call_id_to_id)) # type: ignore
prepared_content = self._prepare_content_for_openai(message.role, content, call_id_to_id) # type: ignore
if prepared_content:
if "content" not in args:
args["content"] = []
args["content"].append(prepared_content) # type: ignore
if "content" in args or "tool_calls" in args:
all_messages.append(args)
return all_messages
Expand Down
1 change: 1 addition & 0 deletions python/packages/core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ dependencies = [
# connectors and functions
"openai>=1.99.0",
"azure-identity>=1,<2",
"azure-ai-projects >= 2.0.0b3",
"mcp[ws]>=1.24.0,<2",
"packaging>=24.1",
]
Expand Down
114 changes: 114 additions & 0 deletions python/packages/core/tests/azure/test_azure_responses_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import json
import os
from typing import Annotated, Any
from unittest.mock import MagicMock

import pytest
from azure.identity import AzureCliCredential
Expand Down Expand Up @@ -115,6 +116,119 @@ def test_init_with_empty_model_id(azure_openai_unit_test_env: dict[str, str]) ->
)


def test_init_with_project_client(azure_openai_unit_test_env: dict[str, str]) -> None:
"""Test initialization with an existing AIProjectClient."""
from unittest.mock import patch

from openai import AsyncOpenAI

# Create a mock AIProjectClient that returns a mock AsyncOpenAI client
mock_openai_client = MagicMock(spec=AsyncOpenAI)
mock_openai_client.default_headers = {}

mock_project_client = MagicMock()
mock_project_client.get_openai_client.return_value = mock_openai_client

with patch(
"agent_framework.azure._responses_client.AzureOpenAIResponsesClient._create_client_from_project",
return_value=mock_openai_client,
):
azure_responses_client = AzureOpenAIResponsesClient(
project_client=mock_project_client,
deployment_name="gpt-4o",
)

assert azure_responses_client.model_id == "gpt-4o"
assert azure_responses_client.client is mock_openai_client
assert isinstance(azure_responses_client, SupportsChatGetResponse)


def test_init_with_project_endpoint(azure_openai_unit_test_env: dict[str, str]) -> None:
"""Test initialization with a project endpoint and credential."""
from unittest.mock import patch

from openai import AsyncOpenAI

mock_openai_client = MagicMock(spec=AsyncOpenAI)
mock_openai_client.default_headers = {}

with patch(
"agent_framework.azure._responses_client.AzureOpenAIResponsesClient._create_client_from_project",
return_value=mock_openai_client,
):
azure_responses_client = AzureOpenAIResponsesClient(
project_endpoint="https://test-project.services.ai.azure.com",
deployment_name="gpt-4o",
credential=AzureCliCredential(),
)

assert azure_responses_client.model_id == "gpt-4o"
assert azure_responses_client.client is mock_openai_client
assert isinstance(azure_responses_client, SupportsChatGetResponse)


def test_create_client_from_project_with_project_client() -> None:
"""Test _create_client_from_project with an existing project client."""
from openai import AsyncOpenAI

mock_openai_client = MagicMock(spec=AsyncOpenAI)
mock_project_client = MagicMock()
mock_project_client.get_openai_client.return_value = mock_openai_client

result = AzureOpenAIResponsesClient._create_client_from_project(
project_client=mock_project_client,
project_endpoint=None,
credential=None,
)

assert result is mock_openai_client
mock_project_client.get_openai_client.assert_called_once()


def test_create_client_from_project_with_endpoint() -> None:
"""Test _create_client_from_project with a project endpoint."""
from unittest.mock import patch

from openai import AsyncOpenAI

mock_openai_client = MagicMock(spec=AsyncOpenAI)
mock_credential = MagicMock()

with patch("agent_framework.azure._responses_client.AIProjectClient") as MockAIProjectClient:
mock_instance = MockAIProjectClient.return_value
mock_instance.get_openai_client.return_value = mock_openai_client

result = AzureOpenAIResponsesClient._create_client_from_project(
project_client=None,
project_endpoint="https://test-project.services.ai.azure.com",
credential=mock_credential,
)

assert result is mock_openai_client
MockAIProjectClient.assert_called_once()
mock_instance.get_openai_client.assert_called_once()


def test_create_client_from_project_missing_endpoint() -> None:
"""Test _create_client_from_project raises error when endpoint is missing."""
with pytest.raises(ServiceInitializationError, match="project endpoint is required"):
AzureOpenAIResponsesClient._create_client_from_project(
project_client=None,
project_endpoint=None,
credential=MagicMock(),
)


def test_create_client_from_project_missing_credential() -> None:
"""Test _create_client_from_project raises error when credential is missing."""
with pytest.raises(ServiceInitializationError, match="credential is required"):
AzureOpenAIResponsesClient._create_client_from_project(
project_client=None,
project_endpoint="https://test-project.services.ai.azure.com",
credential=None,
)


def test_serialize(azure_openai_unit_test_env: dict[str, str]) -> None:
default_headers = {"X-Unit-Test": "test-guid"}

Expand Down
16 changes: 4 additions & 12 deletions python/packages/core/tests/openai/test_openai_responses_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -798,12 +798,8 @@ def test_chat_message_with_error_content() -> None:

result = client._prepare_message_for_openai(message, call_id_to_id)

# Message should be prepared with empty content list since ErrorContent returns {}
assert len(result) == 1
prepared_message = result[0]
assert prepared_message["role"] == "assistant"
# Content should be a list with empty dict since ErrorContent returns {}
assert prepared_message.get("content") == [{}]
# Message should be empty since ErrorContent is filtered out
assert len(result) == 0


def test_chat_message_with_usage_content() -> None:
Expand All @@ -823,12 +819,8 @@ def test_chat_message_with_usage_content() -> None:

result = client._prepare_message_for_openai(message, call_id_to_id)

# Message should be prepared with empty content list since UsageContent returns {}
assert len(result) == 1
prepared_message = result[0]
assert prepared_message["role"] == "assistant"
# Content should be a list with empty dict since UsageContent returns {}
assert prepared_message.get("content") == [{}]
# Message should be empty since UsageContent is filtered out
assert len(result) == 0


def test_hosted_file_content_preparation() -> None:
Expand Down
3 changes: 2 additions & 1 deletion python/pyrightconfig.samples.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
"**/autogen-migration/**",
"**/semantic-kernel-migration/**",
"**/demos/**",
"**/agent_with_foundry_tracing.py"
"**/agent_with_foundry_tracing.py",
"**/azure_responses_client_with_foundry.py"
],
"typeCheckingMode": "off",
"reportMissingImports": "error",
Expand Down
1 change: 1 addition & 0 deletions python/samples/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ This directory contains samples demonstrating the capabilities of Microsoft Agen
| [`getting_started/agents/azure_openai/azure_responses_client_image_analysis.py`](./getting_started/agents/azure_openai/azure_responses_client_image_analysis.py) | Azure OpenAI Responses Client with Image Analysis Example |
| [`getting_started/agents/azure_openai/azure_responses_client_with_code_interpreter.py`](./getting_started/agents/azure_openai/azure_responses_client_with_code_interpreter.py) | Azure OpenAI Responses Client with Code Interpreter Example |
| [`getting_started/agents/azure_openai/azure_responses_client_with_explicit_settings.py`](./getting_started/agents/azure_openai/azure_responses_client_with_explicit_settings.py) | Azure OpenAI Responses Client with Explicit Settings Example |
| [`getting_started/agents/azure_openai/azure_responses_client_with_foundry.py`](./getting_started/agents/azure_openai/azure_responses_client_with_foundry.py) | Azure OpenAI Responses Client with Foundry Project Example |
| [`getting_started/agents/azure_openai/azure_responses_client_with_function_tools.py`](./getting_started/agents/azure_openai/azure_responses_client_with_function_tools.py) | Azure OpenAI Responses Client with Function Tools Example |
| [`getting_started/agents/azure_openai/azure_responses_client_with_hosted_mcp.py`](./getting_started/agents/azure_openai/azure_responses_client_with_hosted_mcp.py) | Azure OpenAI Responses Client with Hosted Model Context Protocol (MCP) Example |
| [`getting_started/agents/azure_openai/azure_responses_client_with_local_mcp.py`](./getting_started/agents/azure_openai/azure_responses_client_with_local_mcp.py) | Azure OpenAI Responses Client with local Model Context Protocol (MCP) Example |
Expand Down
Loading
Loading