import gradio as gr import requests import os # Hugging Face Inference API Token API_URL = "https://api-inference.huggingface.co/models/gpt2" API_TOKEN = os.getenv("HUGGINGFACE_API_TOKEN") # Replace with your Hugging Face API token # Chatbot Function using Hugging Face Inference API def ielts_chat(user_input, part): prompt = f"You are an IELTS tutor. Answer the following question in detail as if you were helping a student prepare for the IELTS Speaking test.\n\nPart: {part}\nQuestion: {user_input}\n\nAnswer:" response = requests.post( API_URL, headers={"Authorization": f"Bearer {API_TOKEN}"}, json={"inputs": prompt} ) if response.status_code == 200: return response.json()[0]['generated_text'].strip() else: return f"Error {response.status_code}: {response.text}" # Gradio Interface iface = gr.Interface( fn=ielts_chat, inputs=[ gr.Textbox(label="Ask a question related to IELTS Speaking"), gr.Radio(["Part 1", "Part 2", "Part 3"], label="Choose IELTS Part") ], outputs="text", title="IELTS Speaking Practice Bot", description="Ask a question related to IELTS Speaking, and the bot will respond as if it were an IELTS tutor using AI-generated responses from Hugging Face's GPT-2 model." ) iface.launch()