sushil3125 commited on
Commit
693e699
·
1 Parent(s): 3bbc5a2

Add application file

Browse files
Files changed (3) hide show
  1. Dockerfile +16 -0
  2. app.py +26 -0
  3. requirements.txt +3 -0
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
2
+ # you will also find guides on how best to write your Dockerfile
3
+
4
+ FROM python:3.9
5
+
6
+ RUN useradd -m -u 1000 user
7
+ USER user
8
+ ENV PATH="/home/user/.local/bin:$PATH"
9
+
10
+ WORKDIR /app
11
+
12
+ COPY --chown=user ./requirements.txt requirements.txt
13
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
14
+
15
+ COPY --chown=user . /app
16
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
app.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from pydantic import BaseModel
3
+ from sentence_transformers import SentenceTransformer
4
+ from typing import List
5
+
6
+ # Load the pre-trained sentence transformer model
7
+ bge_small_model = SentenceTransformer('BAAI/bge-small-en-v1.5', device="cpu")
8
+ all_mp_net_model = SentenceTransformer('sentence-transformers/all-mpnet-base-v2', device="cpu")
9
+
10
+ # Initialize FastAPI app
11
+ app = FastAPI()
12
+
13
+ # Request body model
14
+ class TextInput(BaseModel):
15
+ text: List[str] # List of sentences or text data
16
+ model_name: str
17
+
18
+ # Route to calculate embeddings
19
+ @app.post("/get-embedding/")
20
+ async def get_embedding(input: TextInput):
21
+ # Generate embeddings using the sentence transformer model
22
+ if input.model_name == "BM":
23
+ embeddings = all_mp_net_model.encode(input.text)
24
+ else:
25
+ embeddings = bge_small_model.encode(input.text)
26
+ return {"embeddings": embeddings.tolist()}
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ fastapi
2
+ uvicorn[standard]
3
+ sentence_transformers