Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI, File, UploadFile | |
| from transformers import AutoImageProcessor, SiglipForImageClassification | |
| from PIL import Image | |
| import torch | |
| import io | |
| MODEL_NAME = "prithivMLmods/Alphabet-Sign-Language-Detection" | |
| try: | |
| print(f"Cargando modelo '{MODEL_NAME}'...") | |
| processor = AutoImageProcessor.from_pretrained(MODEL_NAME) | |
| model = SiglipForImageClassification.from_pretrained(MODEL_NAME) | |
| print(f"Modelo '{MODEL_NAME}' cargado exitosamente") | |
| model_loaded = True | |
| except Exception as e: | |
| print(f"Error al cargar el modelo: {e}") | |
| print("Usando modelo de ejemplo. Para uso real, necesitas un modelo específico de ASL.") | |
| model = None | |
| processor = None | |
| model_loaded = False | |
| app = FastAPI(title="API de ASL con modelo de HF") | |
| async def translate_sign(file: UploadFile = File(...)): | |
| if not model or not processor: | |
| return { | |
| "error": "Modelo no disponible.", | |
| "message": "El modelo específico de ASL no pudo cargarse. Verifica que el modelo existe en Hugging Face.", | |
| "model_attempted": MODEL_NAME | |
| } | |
| try: | |
| image_bytes = await file.read() | |
| image = Image.open(io.BytesIO(image_bytes)) | |
| inputs = processor(images=image, return_tensors="pt") | |
| with torch.no_grad(): | |
| outputs = model(**inputs) | |
| logits = outputs.logits | |
| probs = torch.nn.functional.softmax(logits, dim=1) | |
| predicted_class_idx = logits.argmax(-1).item() | |
| confidence = probs[0][predicted_class_idx].item() | |
| predicted_label = model.config.id2label[predicted_class_idx] | |
| return { | |
| "prediction": predicted_label, | |
| "confidence": round(confidence * 100, 2) | |
| } | |
| except Exception as e: | |
| return {"error": f"Error al procesar la imagen: {str(e)}"} | |
| def read_root(): | |
| return {"message": "API ok. Usa el endpoint /predict/ para predecir."} | |
| def get_status(): | |
| return { | |
| "model_loaded": model_loaded, | |
| "model_name": MODEL_NAME, | |
| "message": "Modelo cargado correctamente" if model_loaded else "Modelo no disponible" | |
| } | |