File size: 1,425 Bytes
63310f4 5fe7457 84c7ff1 d9f2801 84c7ff1 63310f4 5b867e1 63310f4 684ff5c 63310f4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
from fastapi import FastAPI
import uvicorn
from app.lawbot import router as lawgpt_router
from app.predict_pipeline import router as bail_reckoner_router
from app.fir_pdf_gen import router as fir_router
import os
# Set cache directory to a path you have permission to write to
# Set custom cache directories
os.makedirs("/app/.cache/huggingface/transformers", exist_ok=True)
os.makedirs("/app/.cache/sentence_transformers", exist_ok=True)
os.makedirs("/app/.cache/torch", exist_ok=True)
os.environ["HF_HOME"] = "/app/.cache/huggingface"
os.environ["TRANSFORMERS_CACHE"] = "/app/.cache/huggingface/transformers"
os.environ["SENTENCE_TRANSFORMERS_HOME"] = "/app/.cache/sentence_transformers"
os.environ["TORCH_HOME"] = "/app/.cache/torch"
app = FastAPI()
# Include routers with distinct prefixes
app.include_router(lawgpt_router, prefix="/lawgpt", tags=["LawGPT"])
app.include_router(bail_reckoner_router, prefix="/bail-reckoner", tags=["Bail Reckoner"])
app.include_router(fir_router, prefix="/generate-fir", tags=["Generate FIR"])
@app.get("/")
async def root():
return {
"message": "API Gateway is running",
"routes": ["/lawgpt", "/bail-reckoner", "/generate-fir", "/process-fir-description"]
}
if __name__ == "__main__":
port = int(os.getenv("PORT", 7860)) # Default to 7860 if PORT is not set
# Force the host to localhost only
uvicorn.run("main:app", host="0.0.0.0", port=port)
|