from IPython import get_ipython from IPython.display import display from fastapi import FastAPI, HTTPException from fastapi.middleware.cors import CORSMiddleware from pydantic import BaseModel import numpy as np import pandas as pd import joblib import keras from tensorflow.keras.preprocessing import image from tensorflow.keras.models import load_model from sklearn.preprocessing import StandardScaler import io from PIL import Image from pyngrok import ngrok import nest_asyncio import uvicorn import cv2 import base64 # Fix async issues in Colab nest_asyncio.apply() app = FastAPI(title="AI-Powered Multi-Disease Diagnostic System") # Add CORS middleware to allow request from all origins app.add_middleware( CORSMiddleware, allow_origins=["*"], # Allows all origins allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) ################################# # DIABETES MODEL AND ENDPOINT ################################# diabetes_model_path = "/content/drive/MyDrive/AI Powered Multi Disease Diagnostic System/Diabetes/SVM.joblib" diabetes_dataset_path = "/content/drive/MyDrive/AI Powered Multi Disease Diagnostic System/diabetes.csv" diabetes_model = joblib.load(diabetes_model_path) diabetes_df = pd.read_csv(diabetes_dataset_path) diabetes_scaler = StandardScaler() diabetes_scaler.fit(diabetes_df.drop(columns="Outcome", axis=1)) class DiabetesInput(BaseModel): pregnancies: int glucose: float blood_pressure: float skin_thickness: float insulin: float bmi: float diabetes_pedigree: float age: int @app.post("/diabetes/predict") def predict_diabetes(data: DiabetesInput): input_data = np.array([[data.pregnancies, data.glucose, data.blood_pressure, data.skin_thickness, data.insulin, data.bmi, data.diabetes_pedigree, data.age]]) input_scaled = diabetes_scaler.transform(input_data) prediction = diabetes_model.predict(input_scaled)[0] return {"prediction": "Diabetic" if prediction == 1 else "Non-Diabetic"} ################################# # HEART DISEASE MODEL AND ENDPOINT ################################# heart_model_path = "/content/drive/MyDrive/AI Powered Multi Disease Diagnostic System/Heart/saved_models/Support Vector Machine_model.pkl" heart_dataset_path = "/content/drive/MyDrive/AI Powered Multi Disease Diagnostic System/heart.csv" heart_model = joblib.load(heart_model_path) heart_df = pd.read_csv(heart_dataset_path) heart_scaler = StandardScaler() heart_scaler.fit(heart_df.drop(columns="target", axis=1)) class HeartDiseaseInput(BaseModel): age: int sex: int cp: int trestbps: float chol: float fbs: int restecg: int thalach: float exang: int oldpeak: float slope: int ca: int thal: int @app.post("/heart_disease/predict") def predict_heart_disease(data: HeartDiseaseInput): input_data = np.array([[data.age, data.sex, data.cp, data.trestbps, data.chol, data.fbs, data.restecg, data.thalach, data.exang, data.oldpeak, data.slope, data.ca, data.thal]]) input_scaled = heart_scaler.transform(input_data) prediction = heart_model.predict(input_scaled)[0] return {"prediction": "Heart Disease Detected" if prediction == 1 else "No Heart Disease"} ################################# # PARKINSON'S DISEASE MODEL AND ENDPOINT ################################# parkinsons_model_path = "/content/drive/MyDrive/AI Powered Multi Disease Diagnostic System/Parkinsons/New_Updated_Models/SVM_model.joblib" parkinsons_scaler_path = "/content/drive/MyDrive/AI Powered Multi Disease Diagnostic System/Parkinsons/New_Updated_Models/scaler.joblib" parkinsons_model = joblib.load(parkinsons_model_path) parkinsons_scaler = joblib.load(parkinsons_scaler_path) class ParkinsonsInput(BaseModel): MDVP_Fo: float MDVP_Fhi: float MDVP_Flo: float MDVP_Jitter_percent: float MDVP_Jitter_Abs: float MDVP_RAP: float MDVP_PPQ: float Jitter_DDP: float MDVP_Shimmer: float MDVP_Shimmer_dB: float Shimmer_APQ3: float Shimmer_APQ5: float MDVP_APQ: float Shimmer_DDA: float NHR: float HNR: float RPDE: float DFA: float spread1: float spread2: float D2: float PPE: float @app.post("/parkinsons/predict") def predict_parkinsons(data: ParkinsonsInput): input_data = np.array([[data.MDVP_Fo, data.MDVP_Fhi, data.MDVP_Flo, data.MDVP_Jitter_percent, data.MDVP_Jitter_Abs, data.MDVP_RAP, data.MDVP_PPQ, data.Jitter_DDP, data.MDVP_Shimmer, data.MDVP_Shimmer_dB, data.Shimmer_APQ3, data.Shimmer_APQ5, data.MDVP_APQ, data.Shimmer_DDA, data.NHR, data.HNR, data.RPDE, data.DFA, data.spread1, data.spread2, data.D2, data.PPE]]) input_scaled = parkinsons_scaler.transform(input_data) prediction = parkinsons_model.predict(input_scaled)[0] probability = parkinsons_model.predict_proba(input_scaled)[:, 1][0] return { "prediction": "Parkinson's Detected" if prediction == 1 else "No Parkinson's", "probability": probability } ################################# # BREAST CANCER DETECTION MODEL AND ENDPOINT ################################# breast_cancer_model_path = "/content/drive/MyDrive/AI Powered Multi Disease Diagnostic System/Breast Cancer Detection/Breast Cancer Detection DenseNet121/Breast_Cancer_Detection_DenseNet121.h5" breast_cancer_model = keras.models.load_model(breast_cancer_model_path) breast_cancer_labels = {0: 'benign', 1: 'malignant', 2: 'normal'} ################################# # TUBERCULOSIS DETECTION MODEL AND ENDPOINT ################################# tb_model_path = "/content/drive/MyDrive/AI Powered Multi Disease Diagnostic System/Tuberculosis Classification/Tuberculosis Classification DenseNet/Tuberculosis_detection_DenseNet169.h5" tb_model = load_model(tb_model_path) tb_labels = {0: "Normal", 1: "Tuberculosis"} ################################# # BRAIN TUMOR DETECTION MODEL AND ENDPOINT ################################# brain_tumor_model_path = "/content/drive/MyDrive/AI Powered Multi Disease Diagnostic System/Brain Tumor Detection/Brain Tumor VGG16/Brain_tumor_VGG16.h5" brain_tumor_model = load_model(brain_tumor_model_path, compile=False) brain_tumor_labels = ['Glioma Tumor', 'Meningioma Tumor', 'No Tumor', 'Pituitary Tumor'] ################################# # UTILITY: BASE64 IMAGE CONVERSION ################################# class Base64ImageRequest(BaseModel): base64_image: str def base64_to_array(base64_string, target_size=(256, 256)): """ Convert a Base64 string to a NumPy image array with a given target size. """ try: image_bytes = base64.b64decode(base64_string) img = Image.open(io.BytesIO(image_bytes)).convert("RGB") img = img.resize(target_size) img_array = image.img_to_array(img) img_array = np.expand_dims(img_array, axis=0) / 255.0 return img_array except Exception as e: raise HTTPException(status_code=400, detail=f"Invalid image data: {str(e)}") ################################# # BREAST CANCER PREDICTION ENDPOINT (256x256 input) ################################# @app.post("/breast_cancer/predict") async def predict_cancer(request: Base64ImageRequest): try: img_array = base64_to_array(request.base64_image, target_size=(256, 256)) prediction = breast_cancer_model.predict(img_array) predicted_label_index = np.argmax(prediction) predicted_label = breast_cancer_labels.get(predicted_label_index, "Unknown") confidence = np.max(prediction) * 100 return { "predicted_class": predicted_label, "confidence": f"{confidence:.2f}%", } except Exception as e: raise HTTPException(status_code=500, detail=str(e)) ################################# # TUBERCULOSIS PREDICTION ENDPOINT (224x224 input) ################################# @app.post("/tuberculosis/predict") async def predict_tuberculosis(request: Base64ImageRequest): try: img_array = base64_to_array(request.base64_image, target_size=(224, 224)) y_pred = tb_model.predict(img_array) predicted_label_index = np.argmax(y_pred, axis=1)[0] predicted_label = tb_labels.get(predicted_label_index, "Unknown") confidence = np.max(y_pred) * 100 return { "prediction": predicted_label, "confidence": f"{confidence:.2f}%", } except Exception as e: raise HTTPException(status_code=500, detail=str(e)) ################################# # BRAIN TUMOR PREDICTION ENDPOINT (224x224 input) ################################# @app.post("/brain_tumor/predict") async def predict_brain_tumor(request: Base64ImageRequest): try: img_array = base64_to_array(request.base64_image, target_size=(224, 224)) predictions = brain_tumor_model.predict(img_array) predicted_class = np.argmax(predictions) confidence = np.max(predictions) * 100 return { "prediction": brain_tumor_labels[predicted_class], "confidence": f"{confidence:.2f}%", } except Exception as e: raise HTTPException(status_code=500, detail=str(e)) ################################# # NGROK & SERVER SETUP ################################# ngrok.set_auth_token("28WOZmFsePzDohg4URD5MU7tgBN_3onpraLYjUsXNXoB4iiro") ngrok_tunnel = ngrok.connect(8000) print(f"FastAPI running at {ngrok_tunnel.public_url}") uvicorn.run(app, host="0.0.0.0", port=8000)