InfiniteValueMe commited on
Commit
8de3e88
·
verified ·
1 Parent(s): 551d369

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +46 -54
app.py CHANGED
@@ -3,133 +3,125 @@ from supabase import create_client
3
  import requests, tempfile
4
  import gradio as gr
5
  from llama_cpp import Llama
 
6
 
7
- #Model Download & Load
8
  MODEL_URL = "https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-GGUF/resolve/main/mistral-7b-instruct-v0.1.Q4_K_M.gguf"
9
- #MODEL_URL = "https://huggingface.co/TheBloke/bling-stable-lm-3b-4e1t-v0-GGUF/resolve/main/bling-stable-lm-3b-4e1t-v0.Q4_K_M.gguf"
10
 
11
  llm = None
12
  def get_model():
13
  global llm
14
  if llm is None:
15
- print("Downloading model")
16
  response = requests.get(MODEL_URL, stream=True, timeout=600)
17
  response.raise_for_status()
18
-
19
  with tempfile.NamedTemporaryFile(delete=False, suffix=".gguf") as tmp_file:
20
  for chunk in response.iter_content(chunk_size=8192):
21
  tmp_file.write(chunk)
22
  model_path = tmp_file.name
23
-
24
- print("Loading model into memory")
25
  llm = Llama(model_path=model_path, n_ctx=512, n_threads=2)
26
  return llm
27
 
28
- #Supabase setup to store data in db
29
  SUPABASE_URL = os.getenv("SUPABASE_URL")
30
  SUPABASE_KEY = os.getenv("SUPABASE_KEY")
31
-
32
  supabase = create_client(SUPABASE_URL, SUPABASE_KEY)
33
 
34
  def save_to_supabase(user_id, prompt, response, model, tokens=None):
35
  try:
36
  supabase.table("ai_logs").insert({
37
- "user_id": user_id,
38
- "prompt": prompt,
39
- "response": response,
40
- "model": model,
41
- "tokens": tokens
42
  }).execute()
43
  except Exception as e:
44
  print("Supabase insert failed:", e)
45
 
46
- #Chat Function
47
- MAX_HISTORY = 4
48
 
49
- def chat_with_infinite_agent(message, lang, history):
50
  history = history or []
51
-
52
- #Keep only the last MAX_HISTORY items
53
  history = history[-MAX_HISTORY:]
54
 
55
- #Build context from previous messages (trimmed for stability)
56
  context_lines = []
57
  for m, a in zip(history[::2], history[1::2]):
58
  if m['role']=='user' and a['role']=='assistant':
59
- #Trim very long messages to reduce memory
60
- user_msg = m['content'][:300]
61
- assistant_msg = a['content'][:300]
62
- context_lines.append(f"User: {user_msg}\nAgent: {assistant_msg}")
63
  context = "\n".join(context_lines)
64
 
65
- #Language adjustment
66
  if lang == "Svenska":
67
  message = f"Svara på svenska: {message}"
68
  elif lang == "Türkçe":
69
  message = f"Türkçe cevapla: {message}"
70
 
71
- #Full prompt with your Human Design & Gene Keys
72
  prompt = f"""
73
- You are Infinite Agent — the AI embodiment of Tugce Ozdeger,
74
- a 5/1 Emotional Manifestor whose wisdom flows through Gene Keys:
75
- 12, 22, 11, 37, 21, 61, 31, 39, 46, and 25.
76
-
77
- Guide others toward emotional clarity, self-worth, and life direction
78
- with empathy, depth, and grace.
79
 
80
- {context}
81
- User: {message}
82
- Agent:
83
- """
84
 
85
- #Generate response safely
 
 
 
86
  try:
87
- llm = get_model()
88
- response = llm(prompt, max_tokens=512)
89
  output = response["choices"][0]["text"].strip()
 
90
  except Exception as e:
91
  print("LLM error:", e)
92
  output = "⚠️ Sorry, the model failed to respond. Please try again."
 
93
 
94
- #Append new messages to history
95
  history.append({"role": "user", "content": message})
96
  history.append({"role": "assistant", "content": output})
97
 
98
- save_to_supabase(
99
- user_id="anonymous",
100
- prompt=message,
101
- response=output,
102
- model=MODEL_URL,
103
- tokens=len(message)
104
- )
105
- return history, history
106
 
107
- #Gradio UI
 
 
 
 
 
108
  css_path = os.path.join(os.path.dirname(__file__), "style.css")
109
 
110
  with gr.Blocks(title="Infinite Agent", css=open(css_path).read()) as demo:
111
- #Avatar image
112
  gr.Image("avatar.png", elem_id="agent-avatar")
113
-
114
- #Header
115
  gr.Markdown(
116
  "### 🌙 Infinite Agent — Emotional Clarity & Life Direction\n"
117
  "_Guided by Tugce Ozdeger’s Human Design & Gene Keys_",
118
  elem_classes="header-text"
119
  )
120
 
121
- #Language selector
122
  language = gr.Dropdown(["English", "Svenska", "Türkçe"], label="Choose language", value="English")
123
 
124
- #Chatbot
125
  chatbot = gr.Chatbot(type="messages", value=[
126
  {"role": "system", "content": "🌙 Welcome to Infinite Agent — your guide to emotional clarity, self-worth, and life direction."}
127
  ])
128
-
129
  msg = gr.Textbox(placeholder="Ask about your emotions, direction, or purpose...")
130
  clear = gr.Button("Clear")
131
 
 
132
  msg.submit(chat_with_infinite_agent, [msg, language, chatbot], [chatbot, chatbot])
133
  clear.click(lambda: [], None, chatbot, queue=False)
134
 
 
 
 
 
135
  demo.launch()
 
3
  import requests, tempfile
4
  import gradio as gr
5
  from llama_cpp import Llama
6
+ from typing import List, Dict
7
 
8
+ # ---------------- Model Setup ----------------
9
  MODEL_URL = "https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-GGUF/resolve/main/mistral-7b-instruct-v0.1.Q4_K_M.gguf"
 
10
 
11
  llm = None
12
  def get_model():
13
  global llm
14
  if llm is None:
15
+ print("Downloading model...")
16
  response = requests.get(MODEL_URL, stream=True, timeout=600)
17
  response.raise_for_status()
 
18
  with tempfile.NamedTemporaryFile(delete=False, suffix=".gguf") as tmp_file:
19
  for chunk in response.iter_content(chunk_size=8192):
20
  tmp_file.write(chunk)
21
  model_path = tmp_file.name
22
+ print("Loading model into memory...")
 
23
  llm = Llama(model_path=model_path, n_ctx=512, n_threads=2)
24
  return llm
25
 
26
+ # ---------------- Supabase Setup ----------------
27
  SUPABASE_URL = os.getenv("SUPABASE_URL")
28
  SUPABASE_KEY = os.getenv("SUPABASE_KEY")
 
29
  supabase = create_client(SUPABASE_URL, SUPABASE_KEY)
30
 
31
  def save_to_supabase(user_id, prompt, response, model, tokens=None):
32
  try:
33
  supabase.table("ai_logs").insert({
34
+ "user_id": user_id,
35
+ "prompt": prompt,
36
+ "response": response,
37
+ "model": model,
38
+ "tokens": tokens
39
  }).execute()
40
  except Exception as e:
41
  print("Supabase insert failed:", e)
42
 
43
+ # ---------------- Chat Function ----------------
44
+ MAX_HISTORY = 4
45
 
46
+ def chat_with_infinite_agent(message: str, lang: str = "English", history: List[Dict[str, str]] = None):
47
  history = history or []
 
 
48
  history = history[-MAX_HISTORY:]
49
 
50
+ # Build context
51
  context_lines = []
52
  for m, a in zip(history[::2], history[1::2]):
53
  if m['role']=='user' and a['role']=='assistant':
54
+ context_lines.append(f"User: {m['content'][:300]}\nAgent: {a['content'][:300]}")
 
 
 
55
  context = "\n".join(context_lines)
56
 
57
+ # Language adjustment
58
  if lang == "Svenska":
59
  message = f"Svara på svenska: {message}"
60
  elif lang == "Türkçe":
61
  message = f"Türkçe cevapla: {message}"
62
 
 
63
  prompt = f"""
64
+ You are Infinite Agent — the AI embodiment of Tugce Ozdeger,
65
+ a 5/1 Emotional Manifestor whose wisdom flows through Gene Keys:
66
+ 12, 22, 11, 37, 21, 61, 31, 39, 46, and 25.
 
 
 
67
 
68
+ Guide others toward emotional clarity, self-worth, and life direction
69
+ with empathy, depth, and grace.
 
 
70
 
71
+ {context}
72
+ User: {message}
73
+ Agent:
74
+ """
75
  try:
76
+ llm_model = get_model()
77
+ response = llm_model(prompt, max_tokens=512)
78
  output = response["choices"][0]["text"].strip()
79
+ tokens_used = response.get("usage", {}).get("total_tokens", None)
80
  except Exception as e:
81
  print("LLM error:", e)
82
  output = "⚠️ Sorry, the model failed to respond. Please try again."
83
+ tokens_used = None
84
 
85
+ # Update history
86
  history.append({"role": "user", "content": message})
87
  history.append({"role": "assistant", "content": output})
88
 
89
+ save_to_supabase("anonymous", message, output, MODEL_URL, tokens_used)
90
+ return output, history
 
 
 
 
 
 
91
 
92
+ # ---------------- API Function ----------------
93
+ def api_predict(message: str, lang: str = "English", history: List[Dict[str, str]] = None) -> dict:
94
+ output, updated_history = chat_with_infinite_agent(message, lang, history or [])
95
+ return {"response": output, "history": updated_history}
96
+
97
+ # ---------------- Gradio UI ----------------
98
  css_path = os.path.join(os.path.dirname(__file__), "style.css")
99
 
100
  with gr.Blocks(title="Infinite Agent", css=open(css_path).read()) as demo:
101
+ # Avatar & header
102
  gr.Image("avatar.png", elem_id="agent-avatar")
 
 
103
  gr.Markdown(
104
  "### 🌙 Infinite Agent — Emotional Clarity & Life Direction\n"
105
  "_Guided by Tugce Ozdeger’s Human Design & Gene Keys_",
106
  elem_classes="header-text"
107
  )
108
 
109
+ # Language selector
110
  language = gr.Dropdown(["English", "Svenska", "Türkçe"], label="Choose language", value="English")
111
 
112
+ # Chatbot
113
  chatbot = gr.Chatbot(type="messages", value=[
114
  {"role": "system", "content": "🌙 Welcome to Infinite Agent — your guide to emotional clarity, self-worth, and life direction."}
115
  ])
 
116
  msg = gr.Textbox(placeholder="Ask about your emotions, direction, or purpose...")
117
  clear = gr.Button("Clear")
118
 
119
+ # Actions
120
  msg.submit(chat_with_infinite_agent, [msg, language, chatbot], [chatbot, chatbot])
121
  clear.click(lambda: [], None, chatbot, queue=False)
122
 
123
+ # Expose API endpoint
124
+ gr.api(api_predict, api_name="/api/predict")
125
+
126
+ # Launch the app
127
  demo.launch()