|
1 | 1 | # Copyright (c) Microsoft. All rights reserved. |
2 | 2 |
|
3 | | -import asyncio |
4 | | -import os |
| 3 | +"""Host your agent with Azure Functions. |
5 | 4 |
|
6 | | -from agent_framework.azure import AzureOpenAIResponsesClient |
7 | | -from azure.identity import AzureCliCredential |
8 | | - |
9 | | -""" |
10 | | -Host Your Agent — Minimal A2A hosting stub |
11 | | -
|
12 | | -This sample shows the pattern for exposing an agent via the Agent-to-Agent |
13 | | -(A2A) protocol. It creates the agent and demonstrates how to wrap it with |
14 | | -the A2A hosting layer. |
| 5 | +This sample shows the Python hosting pattern used in docs: |
| 6 | +- Create an agent with `AzureOpenAIChatClient` |
| 7 | +- Register it with `AgentFunctionApp` |
| 8 | +- Run with Azure Functions Core Tools (`func start`) |
15 | 9 |
|
16 | 10 | Prerequisites: |
17 | | - pip install agent-framework[a2a] --pre |
| 11 | + pip install agent-framework-azurefunctions --pre |
18 | 12 |
|
19 | 13 | Environment variables: |
20 | | - AZURE_AI_PROJECT_ENDPOINT — Your Azure AI Foundry project endpoint |
21 | | - AZURE_OPENAI_RESPONSES_DEPLOYMENT_NAME — Model deployment name (e.g. gpt-4o) |
22 | | -
|
23 | | -To run a full A2A server, see samples/04-hosting/a2a/ for a complete example. |
| 14 | + AZURE_OPENAI_ENDPOINT |
| 15 | + AZURE_OPENAI_CHAT_DEPLOYMENT_NAME |
24 | 16 | """ |
25 | 17 |
|
| 18 | +from typing import Any |
| 19 | + |
| 20 | +from agent_framework.azure import AgentFunctionApp, AzureOpenAIChatClient |
| 21 | +from azure.identity import AzureCliCredential |
26 | 22 |
|
27 | | -async def main() -> None: |
28 | | - # <create_agent> |
29 | | - credential = AzureCliCredential() |
30 | | - client = AzureOpenAIResponsesClient( |
31 | | - project_endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"], |
32 | | - deployment_name=os.environ["AZURE_OPENAI_RESPONSES_DEPLOYMENT_NAME"], |
33 | | - credential=credential, |
34 | | - ) |
35 | 23 |
|
36 | | - agent = client.as_agent( |
| 24 | +# <create_agent> |
| 25 | +def _create_agent() -> Any: |
| 26 | + """Create a hosted agent backed by Azure OpenAI.""" |
| 27 | + return AzureOpenAIChatClient(credential=AzureCliCredential()).as_agent( |
37 | 28 | name="HostedAgent", |
38 | | - instructions="You are a helpful assistant exposed via A2A.", |
| 29 | + instructions="You are a helpful assistant hosted in Azure Functions.", |
39 | 30 | ) |
40 | | - # </create_agent> |
41 | 31 |
|
42 | | - # <host_agent> |
43 | | - # The A2A hosting integration wraps your agent behind an HTTP endpoint. |
44 | | - # Import is gated so this sample can run without the a2a extra installed. |
45 | | - try: |
46 | | - from agent_framework.a2a import A2AAgent # noqa: F401 |
47 | 32 |
|
48 | | - print("A2A support is available.") |
49 | | - print("See samples/04-hosting/a2a/ for a runnable A2A server example.") |
50 | | - except ImportError: |
51 | | - print("Install a2a extras: pip install agent-framework[a2a] --pre") |
| 33 | +# </create_agent> |
52 | 34 |
|
53 | | - # Quick smoke-test: run the agent locally to verify it works |
54 | | - result = await agent.run("Hello! What can you do?") |
55 | | - print(f"Agent: {result}") |
56 | | - # </host_agent> |
| 35 | +# <host_agent> |
| 36 | +app = AgentFunctionApp(agents=[_create_agent()], enable_health_check=True, max_poll_retries=50) |
| 37 | +# </host_agent> |
57 | 38 |
|
58 | 39 |
|
59 | 40 | if __name__ == "__main__": |
60 | | - asyncio.run(main()) |
| 41 | + print("Start the Functions host with: func start") |
| 42 | + print("Then call: POST /api/agents/HostedAgent/run") |
0 commit comments