Yashvj123 commited on
Commit
49a4a02
·
verified ·
1 Parent(s): b09574f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -3
app.py CHANGED
@@ -10,7 +10,7 @@ import easyocr
10
  # from langchain_community.document_loaders import UnstructuredImageLoader
11
  from langchain.prompts import PromptTemplate
12
  from langchain.chains import LLMChain
13
- from langchain_huggingface import HuggingFaceEndpoint
14
 
15
  # Set Hugging Face API keys
16
  os.environ["HUGGINGFACEHUB_API_KEY"] = os.getenv("HF")
@@ -83,13 +83,23 @@ if uploaded_file:
83
  """
84
  prompt = PromptTemplate(input_variables=["prescription_text"], template=template)
85
 
86
- model = HuggingFaceEndpoint(
87
  repo_id="mistralai/Mistral-7B-Instruct-v0.3",
88
  provider="novita",
89
  temperature=0.6,
90
  max_new_tokens=300,
91
  task="conversational"
92
  )
 
 
 
 
 
 
 
 
 
 
93
  chain = LLMChain(llm=model, prompt=prompt)
94
 
95
 
@@ -117,7 +127,6 @@ if uploaded_file:
117
  os.remove(dilated_path)
118
 
119
  else:
120
- st.image("https://cdn.dribbble.com/users/285475/screenshots/14711920/media/bd46dc2873f7099e4ef9fd53e6f7f1df.png", width=600)
121
  st.markdown("<center><i>Upload a prescription image to begin analysis.</i></center>", unsafe_allow_html=True)
122
 
123
 
 
10
  # from langchain_community.document_loaders import UnstructuredImageLoader
11
  from langchain.prompts import PromptTemplate
12
  from langchain.chains import LLMChain
13
+ from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
14
 
15
  # Set Hugging Face API keys
16
  os.environ["HUGGINGFACEHUB_API_KEY"] = os.getenv("HF")
 
83
  """
84
  prompt = PromptTemplate(input_variables=["prescription_text"], template=template)
85
 
86
+ llm_model = HuggingFaceEndpoint(
87
  repo_id="mistralai/Mistral-7B-Instruct-v0.3",
88
  provider="novita",
89
  temperature=0.6,
90
  max_new_tokens=300,
91
  task="conversational"
92
  )
93
+
94
+ model = ChatHuggingFace(
95
+ llm=llm_model,
96
+ repo_id="mistralai/Mistral-7B-Instruct-v0.3",
97
+ provider="novita",
98
+ temperature=0.6,
99
+ max_new_tokens=300,
100
+ task="conversational"
101
+ )
102
+
103
  chain = LLMChain(llm=model, prompt=prompt)
104
 
105
 
 
127
  os.remove(dilated_path)
128
 
129
  else:
 
130
  st.markdown("<center><i>Upload a prescription image to begin analysis.</i></center>", unsafe_allow_html=True)
131
 
132