-
Notifications
You must be signed in to change notification settings - Fork 104
Expand file tree
/
Copy pathagentframework_learn.py
More file actions
67 lines (59 loc) · 2.25 KB
/
agentframework_learn.py
File metadata and controls
67 lines (59 loc) · 2.25 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import asyncio
import logging
import os
from agent_framework import ChatAgent, MCPStreamableHTTPTool
from agent_framework.azure import AzureOpenAIChatClient
from agent_framework.openai import OpenAIChatClient
from azure.identity import DefaultAzureCredential
from dotenv import load_dotenv
from rich import print
from rich.logging import RichHandler
# Configure logging
logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()])
logger = logging.getLogger("agentframework_learn")
logger.setLevel(logging.INFO)
# Load environment variables
load_dotenv(override=True)
# Configure chat client based on API_HOST
API_HOST = os.getenv("API_HOST", "github")
if API_HOST == "azure":
client = AzureOpenAIChatClient(
credential=DefaultAzureCredential(),
deployment_name=os.environ.get("AZURE_OPENAI_CHAT_DEPLOYMENT"),
endpoint=os.environ.get("AZURE_OPENAI_ENDPOINT"),
api_version=os.environ.get("AZURE_OPENAI_VERSION"),
)
elif API_HOST == "github":
client = OpenAIChatClient(
base_url="https://models.github.ai/inference",
api_key=os.environ["GITHUB_TOKEN"],
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-4o"),
)
elif API_HOST == "ollama":
client = OpenAIChatClient(
base_url=os.environ.get("OLLAMA_ENDPOINT", "http://localhost:11434/v1"),
api_key="none",
model_id=os.environ.get("OLLAMA_MODEL", "llama3.1:latest"),
)
else:
client = OpenAIChatClient(
api_key=os.environ.get("OPENAI_API_KEY"), model_id=os.environ.get("OPENAI_MODEL", "gpt-4o")
)
async def http_mcp_example():
"""
Creates an agent that can answer questions about Microsoft documentation
using the Microsoft Learn MCP server.
"""
async with (
MCPStreamableHTTPTool(name="Microsoft Learn MCP", url="https://learn.microsoft.com/api/mcp") as mcp_server,
ChatAgent(
chat_client=client,
name="DocsAgent",
instructions="You help with Microsoft documentation questions.",
) as agent,
):
query = "How to create an Azure storage account using az cli?"
result = await agent.run(query, tools=mcp_server)
print(result)
if __name__ == "__main__":
asyncio.run(http_mcp_example())