KartikJoshiUK commited on
Commit
314cc33
Β·
1 Parent(s): 8fc98d5

Initial UI

Browse files
Files changed (1) hide show
  1. app.py +288 -66
app.py CHANGED
@@ -1,70 +1,292 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
3
-
4
-
5
- def respond(
6
- message,
7
- history: list[dict[str, str]],
8
- system_message,
9
- max_tokens,
10
- temperature,
11
- top_p,
12
- hf_token: gr.OAuthToken,
13
- ):
14
- """
15
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
16
- """
17
- client = InferenceClient(token=hf_token.token, model="openai/gpt-oss-20b")
18
-
19
- messages = [{"role": "system", "content": system_message}]
20
-
21
- messages.extend(history)
22
-
23
- messages.append({"role": "user", "content": message})
24
-
25
- response = ""
26
-
27
- for message in client.chat_completion(
28
- messages,
29
- max_tokens=max_tokens,
30
- stream=True,
31
- temperature=temperature,
32
- top_p=top_p,
33
- ):
34
- choices = message.choices
35
- token = ""
36
- if len(choices) and choices[0].delta.content:
37
- token = choices[0].delta.content
38
-
39
- response += token
40
- yield response
41
-
42
-
43
- """
44
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
45
- """
46
- chatbot = gr.ChatInterface(
47
- respond,
48
- type="messages",
49
- additional_inputs=[
50
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
51
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
52
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
53
- gr.Slider(
54
- minimum=0.1,
55
- maximum=1.0,
56
- value=0.95,
57
- step=0.05,
58
- label="Top-p (nucleus sampling)",
59
- ),
60
- ],
61
- )
62
-
63
- with gr.Blocks() as demo:
64
- with gr.Sidebar():
65
- gr.LoginButton()
66
- chatbot.render()
67
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68
 
69
  if __name__ == "__main__":
70
- demo.launch()
 
1
  import gradio as gr
2
+ import requests
3
+ import time
4
+ import json
5
+ import re
6
+ from urllib.parse import quote
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
 
8
+ API_URL = "http://154.201.126.27:3000"
9
+ client = requests.Session()
10
+
11
+ session_ok = False
12
+ tools_ok = False
13
+ initialized = False
14
+
15
+ pending_requests = [] # stores pending tool requests for UI
16
+ pending_auth_token = "" # stores auth token for pending approvals
17
+
18
+ # Demo mode - pre-fills fields for quick testing
19
+ DEMO_MODE = True
20
+
21
+ DEMO_SYSTEM_PROMPT = """You are a helpful AI assistant for a banking and task management API.
22
+ You can check balances, view transactions, transfer money, and manage tasks.
23
+ Be friendly, concise, and always confirm before taking actions."""
24
+
25
+ DEMO_ENV_VARS = """{
26
+ "API_BASE_URL": "http://localhost:8000"
27
+ }"""
28
+
29
+
30
+ def start_session():
31
+ global session_ok
32
+ try:
33
+ r = client.get(f"{API_URL}/session")
34
+ if r.status_code == 200:
35
+ session_ok = True
36
+ return "βœ… **Session started!** You can now upload your Postman collection."
37
+ return f"❌ **Failed to start session:** {r.text}\n\n*Make sure the backend server is running on {API_URL}*"
38
+ except Exception as e:
39
+ return f"❌ **Connection Error:** Could not connect to backend server.\n\n**Fix:** Run `cd demo/server && npm start` first.\n\n**Details:** {str(e)}"
40
+
41
+
42
+ def upload_tools(file):
43
+ global tools_ok
44
+ if not session_ok:
45
+ return "⚠️ **Please start the session first** by clicking the 'Start Session' button above."
46
+ try:
47
+ files = {"api": open(file, "rb")}
48
+ r = client.post(f"{API_URL}/tools", files=files)
49
+ if r.status_code == 200:
50
+ tools_ok = True
51
+ try:
52
+ data = r.json()
53
+ tool_count = data.get("toolCount", "multiple")
54
+ return f"βœ… **Success!** Generated **{tool_count} tools** from your API collection.\n\n➑️ Now you can initialize the agent below."
55
+ except:
56
+ return "βœ… **Tools uploaded & generated successfully!**\n\n➑️ Now you can initialize the agent below."
57
+ return f"❌ **Upload failed:** {r.text}"
58
+ except Exception as e:
59
+ return f"❌ **Error:** {str(e)}"
60
+
61
+
62
+ def initialize(system_prompt, env_json):
63
+ global initialized
64
+ if not (session_ok and tools_ok):
65
+ return "⚠️ **Please upload your Postman collection first!**"
66
+
67
+ try:
68
+ parsed_env = json.loads(env_json) if env_json.strip() else {}
69
+ except Exception as e:
70
+ return f"❌ **Invalid JSON in Environment Variables:**\n```\n{str(e)}\n```\n\nExpected format: `{{\"KEY\": \"value\"}}`"
71
+
72
+ body = {
73
+ "systemIntructions": system_prompt,
74
+ "envVariables": parsed_env
75
+ }
76
+ try:
77
+ r = client.post(f"{API_URL}/initialize", json=body)
78
+ if r.status_code == 200:
79
+ initialized = True
80
+ return "βœ… **Agent initialized successfully!**\n\nπŸŽ‰ You can now start chatting below!"
81
+ return f"❌ **Initialization failed:** {r.text}"
82
+ except Exception as e:
83
+ return f"❌ **Error:** {str(e)}"
84
+
85
+
86
+ def detect_chart_url(text):
87
+ quickchart_pattern = r'(https://quickchart\.io/chart\?[^\s\)]+)'
88
+ match = re.search(quickchart_pattern, text)
89
+ return match.group(1) if match else None
90
+
91
+
92
+ def format_response_with_chart(message):
93
+ chart_url = detect_chart_url(message)
94
+ if chart_url:
95
+ return message + f"\n\n![Chart Visualization]({chart_url})", chart_url
96
+ return message, None
97
+
98
+
99
+ def call_query(message, auth_token):
100
+ headers = {"Authorization": f"Bearer {auth_token}"} if auth_token else {}
101
+ params = {"query": message}
102
+
103
+ try:
104
+ # FIXED: endpoint must be /query
105
+ r = client.get(f"{API_URL}", params=params, headers=headers, timeout=30)
106
+ body = r.json()
107
+ return {"message": body.get("message", ""), "pending": body.get("data", [])}
108
+ except requests.exceptions.Timeout:
109
+ return {"message": "⏱️ Request timed out. The API might be slow or unresponsive.", "pending": []}
110
+ except Exception as e:
111
+ return {"message": f"❌ Error: {str(e)}", "pending": []}
112
+
113
+
114
+ def chat_send(message, history, auth_token):
115
+ global pending_requests, pending_auth_token
116
+
117
+ if not initialized:
118
+ history.append((None, "⚠️ **Please initialize the agent first!**\n\nGo to **STEP 1** above and complete the setup."))
119
+ return history, "", gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
120
+
121
+ if not message.strip():
122
+ return history, "", gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
123
+
124
+ # FIXED: Add user message as tuple
125
+ history.append((message, None))
126
+
127
+ # Show loading
128
+ history[-1] = (message, "πŸ”„ *Thinking and calling APIs...*")
129
+ yield history, "", gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
130
+
131
+ result = call_query(message, auth_token)
132
+
133
+ # remove loading message safely
134
+ history[-1] = (message, None)
135
+
136
+ if result["pending"]:
137
+ pending_requests = result["pending"]
138
+ pending_auth_token = auth_token
139
+
140
+ approvals_msg = "### ⚠️ APPROVAL REQUIRED\n\nThe following action(s) need your permission:\n\n"
141
+ for i, p in enumerate(result["pending"], 1):
142
+ approvals_msg += f"**{i}. Tool:** `{p['name']}`\n"
143
+ if 'args' in p:
144
+ approvals_msg += f"**Arguments:**\n```json\n{json.dumps(p['args'], indent=2)}\n```\n\n"
145
+ approvals_msg += "**πŸ‘‡ Use the buttons below to approve or reject:**"
146
+
147
+ # FIXED: approval message
148
+ history[-1] = (message, approvals_msg)
149
+
150
+ yield history, "", gr.update(visible=True), gr.update(visible=True), gr.update(visible=True)
151
+ else:
152
+ formatted_message, chart_url = format_response_with_chart(result["message"])
153
+ # FIXED: assistant reply
154
+ history[-1] = (message, formatted_message)
155
+
156
+ yield history, "", gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
157
+
158
+
159
+ def send_approval(approved, history):
160
+ global pending_requests, pending_auth_token
161
+
162
+ if not pending_requests:
163
+ return "⚠️ No pending approvals", history, gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
164
+
165
+ headers = {"Authorization": f"Bearer {pending_auth_token}"} if pending_auth_token else {}
166
+ approval_data = [{"toolCallId": p["id"], "approved": approved} for p in pending_requests]
167
+
168
+ try:
169
+ r = client.post(f"{API_URL}/approval", json=approval_data, headers=headers)
170
+ body = r.json()
171
+ message = body.get("message", "Done")
172
+ pending_requests = []
173
+
174
+ # FIXED: approval result message
175
+ history.append((None, message))
176
+
177
+ return message, history, gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
178
+ except Exception as e:
179
+ err = f"❌ Error: {str(e)}"
180
+ history.append((None, err))
181
+ return err, history, gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
182
+
183
+
184
+ def reset_chat():
185
+ global session_ok, tools_ok, initialized, pending_requests, pending_auth_token
186
+ try:
187
+ client.delete(API_URL)
188
+ except:
189
+ pass
190
+ session_ok = tools_ok = initialized = False
191
+ pending_requests = []
192
+ pending_auth_token = ""
193
+ return [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
194
+
195
+
196
+ # --- UI (unchanged below) ----------------------------------------------------
197
+ with gr.Blocks(title="FluidTools - AI-Powered API Agent") as demo:
198
+
199
+ gr.HTML("""
200
+ <div style="text-align: center; padding: 30px; background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); color: white; border-radius: 10px; margin-bottom: 30px;">
201
+ <h1 style="margin: 0; font-size: 2.5em;">πŸ€– FluidTools</h1>
202
+ <p style="margin: 10px 0 0 0; font-size: 1.2em;">Turn any Postman API collection into an AI-powered chatbot</p>
203
+ </div>
204
+ """)
205
+
206
+ gr.Markdown("""
207
+ ### 🎯 What This Demo Does:
208
+ 1. **Upload** your Postman collection (JSON export from Postman)
209
+ 2. **Initialize** the AI agent with your API tools
210
+ 3. **Chat** with your APIs using natural language - no coding required!
211
+
212
+ ---
213
+ """)
214
+
215
+ with gr.Accordion("πŸš€ STEP 1 β€” Initialize System", open=True):
216
+ step1_status = gr.Markdown("""
217
+ **Current Status:** πŸ”΄ Not started
218
+
219
+ **Instructions:**
220
+ 1. Click **"Start Session"** to begin
221
+ 2. Upload your Postman collection JSON file
222
+ 3. (Optional) Customize the system prompt
223
+ 4. (Optional) Add API keys/environment variables
224
+ 5. Click **"Initialize Agent"** to start
225
+ """)
226
+
227
+ with gr.Row():
228
+ start_btn = gr.Button("πŸ§ͺ Start Session", variant="primary", size="lg")
229
+
230
+ gr.Markdown("---")
231
+ gr.Markdown("**πŸ“ Upload Your Postman Collection**")
232
+ gr.Markdown("*Export your collection from Postman as JSON (Collection v2.1)*")
233
+
234
+ tool_file = gr.File(file_types=[".json"], label="Postman Collection File", file_count="single")
235
+
236
+ gr.Markdown("---")
237
+ gr.Markdown("**βš™οΈ Configure Agent (Optional)**")
238
+
239
+ sys_prompt = gr.Textbox(
240
+ label="System Prompt - Describe how the AI should behave",
241
+ value=DEMO_SYSTEM_PROMPT if DEMO_MODE else "",
242
+ lines=4
243
+ )
244
+
245
+ env_vars = gr.Textbox(
246
+ label="Environment Variables (JSON format)",
247
+ value=DEMO_ENV_VARS if DEMO_MODE else "",
248
+ lines=4
249
+ )
250
+
251
+ with gr.Row():
252
+ init_btn = gr.Button("πŸš€ Initialize Agent", variant="primary", interactive=False, size="lg")
253
+
254
+ gr.Markdown("---")
255
+ with gr.Accordion("πŸ’¬ STEP 2 β€” Chat with Your API", open=True):
256
+
257
+ auth_box = gr.Textbox(label="πŸ” Authentication Token (optional)", type="password")
258
+ chat = gr.Chatbot(height=500, avatar_images=(None, "πŸ€–"))
259
+
260
+ msg = gr.Textbox(label="Your Message", lines=2, max_lines=5)
261
+
262
+ with gr.Row():
263
+ send = gr.Button("πŸ“€ Send", variant="primary")
264
+ reset = gr.Button("πŸ”„ Reset Conversation")
265
+
266
+ approval_section = gr.Markdown("### πŸ” Pending Approvals", visible=False)
267
+ with gr.Row():
268
+ approve_btn = gr.Button("βœ… Approve All", visible=False)
269
+ reject_btn = gr.Button("❌ Reject All", visible=False)
270
+
271
+ approval_result = gr.Textbox(label="Approval Result", visible=False, interactive=False)
272
+
273
+ def update_buttons():
274
+ return gr.update(interactive=session_ok), gr.update(interactive=(session_ok and tools_ok))
275
+
276
+ start_btn.click(start_session, None, step1_status).then(update_buttons, None, [tool_file, init_btn])
277
+ tool_file.upload(upload_tools, tool_file, step1_status).then(update_buttons, None, [tool_file, init_btn])
278
+ init_btn.click(initialize, [sys_prompt, env_vars], step1_status)
279
+
280
+ send.click(chat_send, [msg, chat, auth_box], [chat, msg, approval_section, approve_btn, reject_btn])
281
+ msg.submit(chat_send, [msg, chat, auth_box], [chat, msg, approval_section, approve_btn, reject_btn])
282
+
283
+ approve_btn.click(send_approval, [gr.State(True), chat],
284
+ [approval_result, chat, approval_section, approve_btn, reject_btn])
285
+
286
+ reject_btn.click(send_approval, [gr.State(False), chat],
287
+ [approval_result, chat, approval_section, approve_btn, reject_btn])
288
+
289
+ reset.click(reset_chat, None, [chat, approval_section, approve_btn, reject_btn])
290
 
291
  if __name__ == "__main__":
292
+ demo.launch(server_port=7860, share=False, show_error=True)