somkerasapi / main.py
Alex Vega
up
9863f44
import io
import pickle
import numpy as np
import tensorflow as tf
from fastapi import FastAPI, File, UploadFile, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from PIL import Image
from minisom import MiniSom
app = FastAPI(title="API de Clasificación de Señas con Modelo Híbrido (CNN+SOM)")
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
try:
feature_extractor = tf.keras.models.load_model('feature_extractor.keras')
print("✅ Extractor de características (feature_extractor.keras) cargado.")
with open('som_model.pkl', 'rb') as f:
som_data = pickle.load(f)
som = som_data['som']
label_map = som_data['label_map']
class_names = som_data['class_names']
print("✅ Modelo SOM (som_model.pkl) y etiquetas cargados.")
except Exception as e:
print(f"❌ Error fatal al cargar los modelos: {e}")
feature_extractor = None
som = None
label_map = None
class_names = None
IMG_SIZE = 224
def process_image(image_bytes: bytes):
try:
image = Image.open(io.BytesIO(image_bytes)).convert("RGB")
image = image.resize((IMG_SIZE, IMG_SIZE))
image_array = np.array(image) / 255.0
return np.expand_dims(image_array, axis=0)
except Exception as e:
raise HTTPException(status_code=400, detail=f"Fallo en el procesamiento de imagen: {e}")
@app.post("/predict/")
async def translate_sign(file: UploadFile = File(...)):
if not all([feature_extractor, som, label_map, class_names]):
raise HTTPException(status_code=500, detail="Los modelos no están cargados. El servidor no puede procesar la petición.")
image_bytes = await file.read()
processed_image = process_image(image_bytes)
feature_vector = feature_extractor.predict(processed_image)
winner_neuron = som.winner(feature_vector[0]) # [0] para quitar la dimensión de batch
predicted_class_index = label_map.get(winner_neuron)
if predicted_class_index is None:
return {"prediction": "Desconocido", "confidence": 0.0}
predicted_label = class_names[predicted_class_index]
return {"prediction": predicted_label, "confidence": 0.9}
@app.get("/")
def read_root():
return {"message": "API para modelo híbrido CNN+SOM funcionando."}