Rom89823974978 commited on
Commit
5536134
·
1 Parent(s): 86fd3c3

Cleaned imports

Browse files
Files changed (2) hide show
  1. backend/main.py +2 -2
  2. backend/rag.py +33 -60
backend/main.py CHANGED
@@ -15,7 +15,7 @@ import torch
15
  import zipfile
16
  from fastapi import Depends, FastAPI, HTTPException, Request
17
  from fastapi.middleware.cors import CORSMiddleware
18
- from pydantic import BaseModel, BaseSettings, PrivateAttr
19
  from pydantic_settings import BaseSettings as SettingsBase
20
  from sentence_transformers import CrossEncoder
21
  from starlette.concurrency import run_in_threadpool
@@ -227,7 +227,7 @@ class RAGRequest(BaseModel):
227
  class RAGResponse(BaseModel):
228
  answer: str
229
  source_ids: List[str]
230
-
231
  # ---------------------------------------------------------------------------- #
232
  # RAG Endpoint #
233
  # ---------------------------------------------------------------------------- #
 
15
  import zipfile
16
  from fastapi import Depends, FastAPI, HTTPException, Request
17
  from fastapi.middleware.cors import CORSMiddleware
18
+ from pydantic import BaseModel, PrivateAttr
19
  from pydantic_settings import BaseSettings as SettingsBase
20
  from sentence_transformers import CrossEncoder
21
  from starlette.concurrency import run_in_threadpool
 
227
  class RAGResponse(BaseModel):
228
  answer: str
229
  source_ids: List[str]
230
+
231
  # ---------------------------------------------------------------------------- #
232
  # RAG Endpoint #
233
  # ---------------------------------------------------------------------------- #
backend/rag.py CHANGED
@@ -1,81 +1,54 @@
1
- import os
2
  import logging
3
- from typing import Any, Dict, List, Optional, Tuple, AsyncGenerator
 
 
 
 
4
  from contextlib import asynccontextmanager
 
 
5
 
6
- import gcsfs
7
  import aiofiles
 
 
8
  import polars as pl
9
- from pydantic_settings import BaseSettings
10
- from fastapi import FastAPI, HTTPException
 
 
 
11
  from fastapi.middleware.cors import CORSMiddleware
 
 
 
12
  from starlette.concurrency import run_in_threadpool
13
- from pydantic import BaseModel
 
 
 
 
 
 
 
14
 
15
- from langchain.schema import BaseRetriever, Document
16
- from langchain.text_splitter import RecursiveCharacterTextSplitter
17
- from langchain_community.vectorstores import FAISS
18
- from langchain.retrievers.document_compressors import DocumentCompressorPipeline
19
- from langchain_community.document_transformers import EmbeddingsRedundantFilter
20
- from langchain.memory import ConversationBufferWindowMemory
21
- from langchain.chains import ConversationalRetrievalChain
22
- from langchain.prompts import PromptTemplate
23
- from langchain_huggingface import HuggingFacePipeline, HuggingFaceEmbeddings
24
-
25
- from transformers import AutoTokenizer, pipeline
26
- from transformers import AutoTokenizer, AutoModelForCausalLM
27
- #from optimum.onnxruntime import ORTModelForCausalLM, ORTOptimizer
28
- from sentence_transformers import CrossEncoder
29
  from whoosh import index
30
- from whoosh.fields import Schema, TEXT, ID
31
  from whoosh.analysis import StemmingAnalyzer
 
32
  from whoosh.qparser import MultifieldParser
33
- from tqdm import tqdm
34
- import faiss
35
 
36
- from functools import lru_cache
37
- from fastapi import FastAPI, Request, HTTPException, Depends
38
- from fastapi.middleware.cors import CORSMiddleware
39
- import traceback
40
- from starlette.concurrency import run_in_threadpool
41
- from pydantic import BaseModel
42
- from pydantic_settings import BaseSettings
43
- from contextlib import asynccontextmanager
44
- from typing import Any, Dict, List, Optional, AsyncGenerator, Tuple
45
-
46
- import os
47
- import logging
48
- import aiofiles
49
- import polars as pl
50
- import zipfile
51
- import gcsfs
52
-
53
- from langchain.schema import Document,BaseRetriever
54
  from langchain.text_splitter import RecursiveCharacterTextSplitter
55
- from langchain_community.vectorstores import FAISS
56
- from langchain.retrievers.document_compressors import DocumentCompressorPipeline
57
- from langchain_community.document_transformers import EmbeddingsRedundantFilter
58
  from langchain.memory import ConversationBufferWindowMemory
59
  from langchain.chains import ConversationalRetrievalChain
60
  from langchain.prompts import PromptTemplate
 
 
 
61
  from langchain_huggingface import HuggingFacePipeline, HuggingFaceEmbeddings
62
 
63
- from transformers import AutoTokenizer, pipeline, AutoModelForCausalLM, AutoModelForSeq2SeqLM, T5Tokenizer,T5ForConditionalGeneration
64
- from sentence_transformers import CrossEncoder
65
-
66
- from whoosh import index
67
- from whoosh.fields import Schema, TEXT, ID
68
- from whoosh.analysis import StemmingAnalyzer
69
- from whoosh.qparser import MultifieldParser
70
- import pickle
71
- from pydantic import PrivateAttr
72
- from tqdm import tqdm
73
- import faiss
74
- import torch
75
- import tempfile
76
- import shutil
77
-
78
- from functools import lru_cache
79
 
80
  # === Logging ===
81
  logging.basicConfig(level=logging.INFO)
 
1
+ # Standard library
2
  import logging
3
+ import os
4
+ import shutil
5
+ import tempfile
6
+ import traceback
7
+ import zipfile
8
  from contextlib import asynccontextmanager
9
+ from functools import lru_cache
10
+ from typing import Any, AsyncGenerator, Dict, List, Optional, Tuple
11
 
12
+ # Third-party
13
  import aiofiles
14
+ import faiss
15
+ import gcsfs
16
  import polars as pl
17
+ import pickle
18
+ import torch
19
+ from tqdm import tqdm
20
+
21
+ from fastapi import FastAPI, HTTPException, Request, Depends
22
  from fastapi.middleware.cors import CORSMiddleware
23
+ from pydantic import BaseModel, PrivateAttr
24
+ from pydantic_settings import BaseSettings
25
+ from sentence_transformers import CrossEncoder
26
  from starlette.concurrency import run_in_threadpool
27
+ from transformers import (
28
+ AutoModelForCausalLM,
29
+ AutoModelForSeq2SeqLM,
30
+ AutoTokenizer,
31
+ T5ForConditionalGeneration,
32
+ T5Tokenizer,
33
+ pipeline,
34
+ )
35
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  from whoosh import index
 
37
  from whoosh.analysis import StemmingAnalyzer
38
+ from whoosh.fields import ID, Schema, TEXT
39
  from whoosh.qparser import MultifieldParser
 
 
40
 
41
+ # LangChain
42
+ from langchain.schema import BaseRetriever, Document
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  from langchain.text_splitter import RecursiveCharacterTextSplitter
 
 
 
44
  from langchain.memory import ConversationBufferWindowMemory
45
  from langchain.chains import ConversationalRetrievalChain
46
  from langchain.prompts import PromptTemplate
47
+ from langchain.retrievers.document_compressors import DocumentCompressorPipeline
48
+ from langchain_community.document_transformers import EmbeddingsRedundantFilter
49
+ from langchain_community.vectorstores import FAISS as LCFAISS
50
  from langchain_huggingface import HuggingFacePipeline, HuggingFaceEmbeddings
51
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
 
53
  # === Logging ===
54
  logging.basicConfig(level=logging.INFO)