Eric Houzelle commited on
Commit
af0dad7
·
1 Parent(s): 5b16d71

Add application files

Browse files
.history/app_20250731152748.py ADDED
File without changes
.history/app_20250731152751.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM
3
+ import torch
4
+
5
+ # Charger ton modèle et tokenizer depuis le Hub
6
+ model_name = "eric-houzelle/mermaidGPT"
7
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
8
+ model = AutoModelForCausalLM.from_pretrained(model_name)
9
+
10
+ def generate_mermaid(prompt):
11
+ inputs = tokenizer(prompt, return_tensors="pt")
12
+ with torch.no_grad():
13
+ outputs = model.generate(**inputs, max_new_tokens=100)
14
+ result = tokenizer.decode(outputs[0], skip_special_tokens=True)
15
+ return result
16
+
17
+ iface = gr.Interface(
18
+ fn=generate_mermaid,
19
+ inputs=gr.Textbox(lines=2, placeholder="Describe your diagram in English..."),
20
+ outputs=gr.Code(language="markdown"),
21
+ title="🧠 mermaidGPT — Mermaid Generator",
22
+ description="Enter a short English instruction and get a Mermaid.js diagram code."
23
+ )
24
+
25
+ iface.launch()
.history/app_20250731171037.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM
3
+ import torch
4
+
5
+ # Charger ton modèle et tokenizer depuis le Hub
6
+ model_name = "Houzeric/text-to-mermaid"
7
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
8
+ model = AutoModelForCausalLM.from_pretrained(model_name)
9
+
10
+ def generate_mermaid(prompt):
11
+ inputs = tokenizer(prompt, return_tensors="pt")
12
+ with torch.no_grad():
13
+ outputs = model.generate(**inputs, max_new_tokens=100)
14
+ result = tokenizer.decode(outputs[0], skip_special_tokens=True)
15
+ return result
16
+
17
+ iface = gr.Interface(
18
+ fn=generate_mermaid,
19
+ inputs=gr.Textbox(lines=2, placeholder="Describe your diagram in English..."),
20
+ outputs=gr.Code(language="markdown"),
21
+ title="🧠 mermaidGPT — Mermaid Generator",
22
+ description="Enter a short English instruction and get a Mermaid.js diagram code."
23
+ )
24
+
25
+ iface.launch()
.history/app_20250731171040.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM
3
+ import torch
4
+
5
+ model_name = "Houzeric/text-to-mermaid"
6
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
7
+ model = AutoModelForCausalLM.from_pretrained(model_name)
8
+
9
+ def generate_mermaid(prompt):
10
+ inputs = tokenizer(prompt, return_tensors="pt")
11
+ with torch.no_grad():
12
+ outputs = model.generate(**inputs, max_new_tokens=100)
13
+ result = tokenizer.decode(outputs[0], skip_special_tokens=True)
14
+ return result
15
+
16
+ iface = gr.Interface(
17
+ fn=generate_mermaid,
18
+ inputs=gr.Textbox(lines=2, placeholder="Describe your diagram in English..."),
19
+ outputs=gr.Code(language="markdown"),
20
+ title="🧠 mermaidGPT — Mermaid Generator",
21
+ description="Enter a short English instruction and get a Mermaid.js diagram code."
22
+ )
23
+
24
+ iface.launch()
.history/requirements_20250731152804.txt ADDED
File without changes
.history/requirements_20250731152806.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ gradio
2
+ transformers
3
+ torch
app.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM
3
+ import torch
4
+
5
+ model_name = "Houzeric/text-to-mermaid"
6
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
7
+ model = AutoModelForCausalLM.from_pretrained(model_name)
8
+
9
+ def generate_mermaid(prompt):
10
+ inputs = tokenizer(prompt, return_tensors="pt")
11
+ with torch.no_grad():
12
+ outputs = model.generate(**inputs, max_new_tokens=100)
13
+ result = tokenizer.decode(outputs[0], skip_special_tokens=True)
14
+ return result
15
+
16
+ iface = gr.Interface(
17
+ fn=generate_mermaid,
18
+ inputs=gr.Textbox(lines=2, placeholder="Describe your diagram in English..."),
19
+ outputs=gr.Code(language="markdown"),
20
+ title="🧠 mermaidGPT — Mermaid Generator",
21
+ description="Enter a short English instruction and get a Mermaid.js diagram code."
22
+ )
23
+
24
+ iface.launch()
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ gradio
2
+ transformers
3
+ torch