Abid Ali Awan commited on
Commit
43d984b
·
1 Parent(s): 030e4b6

feat: Migrate agent to streamable HTTP, streamline app with environment variable configuration and a unified chat/file upload UI, and add dev utility files.

Browse files
Files changed (5) hide show
  1. .gitignore +2 -0
  2. agent.py +6 -5
  3. app.py +50 -72
  4. requirements.txt +4 -4
  5. test_warning.py +8 -0
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ .venv
2
+ __pycache__
agent.py CHANGED
@@ -1,7 +1,7 @@
1
  import os
2
  import asyncio
3
  from typing import Any, Optional
4
- from mcp.client.sse import sse_client
5
  from mcp.client.session import ClientSession
6
  from agents import Agent, Runner
7
  import openai
@@ -9,6 +9,7 @@ import openai
9
  # Define the MCP Server URL
10
  MCP_SERVER_URL = "https://mcp-1st-birthday-auto-deployer.hf.space/gradio_api/mcp/"
11
 
 
12
  def __init__(self, provider: str, api_key: str, mcp_server_url: str, base_url: Optional[str] = None):
13
  self.provider = provider
14
  self.api_key = api_key
@@ -20,10 +21,10 @@ MCP_SERVER_URL = "https://mcp-1st-birthday-auto-deployer.hf.space/gradio_api/mcp
20
 
21
  async def initialize(self):
22
  """Connects to the MCP server and initializes the OpenAI Agent."""
23
- # 1. Connect to MCP Server via SSE
24
- self.sse_context = sse_client(self.mcp_server_url)
25
  self.transport = await self.sse_context.__aenter__()
26
- self.session = ClientSession(self.transport, self.transport)
27
  await self.session.__aenter__()
28
  await self.session.initialize()
29
 
@@ -60,7 +61,7 @@ MCP_SERVER_URL = "https://mcp-1st-birthday-auto-deployer.hf.space/gradio_api/mcp
60
  name="MLOps Agent",
61
  instructions="You are an expert MLOps agent. You have access to tools for analyzing data, training models, and deploying them. Use them to help the user.",
62
  tools=agent_tools,
63
- model="gpt-4o", # Default, can be overridden if needed
64
  )
65
 
66
  async def chat(self, user_message: str):
 
1
  import os
2
  import asyncio
3
  from typing import Any, Optional
4
+ from mcp.client.streamable_http import streamablehttp_client
5
  from mcp.client.session import ClientSession
6
  from agents import Agent, Runner
7
  import openai
 
9
  # Define the MCP Server URL
10
  MCP_SERVER_URL = "https://mcp-1st-birthday-auto-deployer.hf.space/gradio_api/mcp/"
11
 
12
+ class MCPAgent:
13
  def __init__(self, provider: str, api_key: str, mcp_server_url: str, base_url: Optional[str] = None):
14
  self.provider = provider
15
  self.api_key = api_key
 
21
 
22
  async def initialize(self):
23
  """Connects to the MCP server and initializes the OpenAI Agent."""
24
+ # 1. Connect to MCP Server via Streamable HTTP
25
+ self.sse_context = streamablehttp_client(self.mcp_server_url)
26
  self.transport = await self.sse_context.__aenter__()
27
+ self.session = ClientSession(self.transport[0], self.transport[1])
28
  await self.session.__aenter__()
29
  await self.session.initialize()
30
 
 
61
  name="MLOps Agent",
62
  instructions="You are an expert MLOps agent. You have access to tools for analyzing data, training models, and deploying them. Use them to help the user.",
63
  tools=agent_tools,
64
+ model="gpt-5-mini-2025-08-07", # Default, can be overridden if needed
65
  )
66
 
67
  async def chat(self, user_message: str):
app.py CHANGED
@@ -2,101 +2,79 @@ import gradio as gr
2
  import asyncio
3
  from agent import MCPAgent
4
  import os
 
5
 
6
- # Global variable to store agent instances per session (simplified for demo)
7
- # In a real production app, you'd want more robust session management.
 
 
 
 
 
 
8
  agents = {}
9
 
10
- async def get_or_create_agent(session_id, provider, api_key, mcp_server_url, base_url):
11
  if session_id not in agents:
12
- agent = MCPAgent(provider, api_key, mcp_server_url, base_url)
 
 
 
13
  await agent.initialize()
14
  agents[session_id] = agent
15
  return agents[session_id]
16
 
17
- async def chat_fn(message, history, provider, api_key, mcp_server_url, base_url, request: gr.Request):
18
  if not message:
19
  return history
20
 
21
- if not api_key:
22
- raise gr.Error("Please enter an API Key.")
23
-
24
- session_id = request.session_hash
25
- try:
26
- agent = await get_or_create_agent(session_id, provider, api_key, mcp_server_url, base_url)
27
- response = await agent.chat(message)
28
- history.append((message, str(response)))
29
  return history, ""
30
- except Exception as e:
31
- raise gr.Error(f"Error: {str(e)}")
32
 
33
- async def one_shot_fn(file_obj, provider, api_key, mcp_server_url, base_url, request: gr.Request):
34
- if not api_key:
35
- raise gr.Error("Please enter an API Key.")
36
-
37
  session_id = request.session_hash
38
  try:
39
- agent = await get_or_create_agent(session_id, provider, api_key, mcp_server_url, base_url)
 
 
 
 
 
 
 
 
 
 
 
 
 
40
 
41
- # Construct the prompt
42
- prompt = "Please perform the full MLOps pipeline: Analyze the data, train a model, deploy it, test the API, and provide a final report."
43
- if file_obj:
44
- # In a real scenario, we would upload this file using the web_auto_deploy tool
45
- # For now, we pass the path if the agent can handle local paths, or we need to upload it.
46
- # The prompt implies the agent uses tools.
47
- prompt += f" Use this file: {file_obj.name}"
48
 
49
- response = await agent.chat(prompt)
50
- return str(response)
 
 
 
51
  except Exception as e:
52
- return f"Error: {str(e)}"
 
 
53
 
54
  with gr.Blocks(title="MCP MLOps Agent") as demo:
55
  gr.Markdown("# 🤖 MCP MLOps Agent")
56
  gr.Markdown("Powered by OpenAI Agents SDK & MCP 1st Birthday Hackathon Tools")
57
 
58
- with gr.Sidebar():
59
- provider = gr.Dropdown(
60
- choices=["OpenAI", "OpenAI Compatible"],
61
- value="OpenAI",
62
- label="Provider"
63
- )
64
- api_key = gr.Textbox(
65
- label="API Key",
66
- type="password",
67
- placeholder="sk-..."
68
- )
69
- mcp_server_url = gr.Textbox(
70
- label="MCP Server URL",
71
- value="https://mcp-1st-birthday-auto-deployer.hf.space/gradio_api/mcp/",
72
- placeholder="https://..."
73
- )
74
- base_url = gr.Textbox(
75
- label="Base URL (for Compatible Providers)",
76
- placeholder="https://api.nebius.ai/v1",
77
- visible=False
78
- )
79
-
80
- def update_base_url(p):
81
- return gr.update(visible=(p == "OpenAI Compatible"))
82
-
83
- provider.change(update_base_url, inputs=provider, outputs=base_url)
84
-
85
- with gr.Tabs():
86
- with gr.Tab("💬 Chat"):
87
- chatbot = gr.Chatbot(height=600)
88
- msg = gr.Textbox(label="Message")
89
- clear = gr.ClearButton([msg, chatbot])
90
-
91
- msg.submit(chat_fn, [msg, chatbot, provider, api_key, mcp_server_url, base_url], [chatbot, msg])
92
-
93
- with gr.Tab("🚀 One-Shot Workflow"):
94
- gr.Markdown("Upload a dataset and let the agent handle the rest: Analyze -> Train -> Deploy -> Test -> Report")
95
- file_input = gr.File(label="Upload Dataset (CSV)")
96
- run_btn = gr.Button("Run Full Pipeline", variant="primary")
97
- output_md = gr.Markdown(label="Agent Report")
98
-
99
- run_btn.click(one_shot_fn, [file_input, provider, api_key, mcp_server_url, base_url], [output_md])
100
 
101
  if __name__ == "__main__":
102
  demo.queue().launch()
 
2
  import asyncio
3
  from agent import MCPAgent
4
  import os
5
+ from dotenv import load_dotenv
6
 
7
+ load_dotenv()
8
+
9
+ # Configuration from Environment Variables
10
+ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
11
+ MCP_SERVER_URL = os.getenv("MCP_SERVER_URL", "https://mcp-1st-birthday-auto-deployer.hf.space/gradio_api/mcp/")
12
+ PROVIDER = "OpenAI" # Default to OpenAI
13
+
14
+ # Global variable to store agent instances per session
15
  agents = {}
16
 
17
+ async def get_or_create_agent(session_id):
18
  if session_id not in agents:
19
+ if not OPENAI_API_KEY:
20
+ raise gr.Error("OPENAI_API_KEY environment variable is not set.")
21
+
22
+ agent = MCPAgent(PROVIDER, OPENAI_API_KEY, MCP_SERVER_URL)
23
  await agent.initialize()
24
  agents[session_id] = agent
25
  return agents[session_id]
26
 
27
+ async def chat_fn(message, history, file_obj, request: gr.Request):
28
  if not message:
29
  return history
30
 
31
+ if not file_obj:
32
+ # Enforce file upload
33
+ history.append({"role": "user", "content": message})
34
+ history.append({"role": "assistant", "content": "Please upload a CSV file first."})
 
 
 
 
35
  return history, ""
 
 
36
 
 
 
 
 
37
  session_id = request.session_hash
38
  try:
39
+ agent = await get_or_create_agent(session_id)
40
+
41
+ # Pass file path context if needed, or just assume agent knows tools
42
+ # For now, we append the file path to the message if it's the first interaction or just let the agent handle it via tools.
43
+ # The user said "ask user to load the CSV file first and then let it type the question".
44
+ # We can prepend the file info to the message invisibly or just rely on the agent having access to the file via a tool if we uploaded it.
45
+ # But wait, the agent needs to know WHERE the file is.
46
+ # The previous `one_shot_fn` implied using `upload_file_to_gradio` tool.
47
+ # Since we are in a chat loop, we might need to tell the agent about the file once.
48
+
49
+ # Let's just pass the message as is, but maybe prepend context about the file if it's new?
50
+ # Or better: The agent has tools. One tool is `Auto_Deployer_web_analyze`.
51
+ # It takes a file path.
52
+ # We should probably tell the agent: "I have uploaded a file at {file_obj.name}. {message}"
53
 
54
+ full_message = f"I have uploaded a file at {file_obj.name}. {message}"
 
 
 
 
 
 
55
 
56
+ response = await agent.chat(full_message)
57
+
58
+ history.append({"role": "user", "content": message})
59
+ history.append({"role": "assistant", "content": str(response)})
60
+ return history, ""
61
  except Exception as e:
62
+ history.append({"role": "user", "content": message})
63
+ history.append({"role": "assistant", "content": f"Error: {str(e)}"})
64
+ return history, ""
65
 
66
  with gr.Blocks(title="MCP MLOps Agent") as demo:
67
  gr.Markdown("# 🤖 MCP MLOps Agent")
68
  gr.Markdown("Powered by OpenAI Agents SDK & MCP 1st Birthday Hackathon Tools")
69
 
70
+ with gr.Row():
71
+ file_input = gr.File(label="Upload Dataset (CSV)")
72
+
73
+ chatbot = gr.Chatbot(height=600, type="messages", allow_tags=False)
74
+ msg = gr.Textbox(label="Message", placeholder="Type your message here...")
75
+ clear = gr.ClearButton([msg, chatbot, file_input])
76
+
77
+ msg.submit(chat_fn, [msg, chatbot, file_input], [chatbot, msg])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
78
 
79
  if __name__ == "__main__":
80
  demo.queue().launch()
requirements.txt CHANGED
@@ -1,4 +1,4 @@
1
- gradio
2
- openai
3
- mcp
4
- agents-sdk
 
1
+ gradio==5.50.0
2
+ openai==2.8.1
3
+ mcp==1.10.1
4
+ openai-agents==0.1.0
test_warning.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+
3
+ with gr.Blocks() as demo:
4
+ chatbot = gr.Chatbot(type="messages", allow_tags=False)
5
+
6
+ if __name__ == "__main__":
7
+ print("Running demo...")
8
+ # demo.launch()