ccibeekeoc42 commited on
Commit
b2a43b9
·
verified ·
1 Parent(s): 23976f4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -1,4 +1,5 @@
1
  import os
 
2
  import torch
3
  from transformers import pipeline
4
 
@@ -46,7 +47,6 @@ client = OpenAI(
46
 
47
  # return "".join(full_response)
48
 
49
- import openai
50
 
51
  def generate_llm_response(text, model_id="ccibeekeoc42/Llama3.1-8b-base-SFT-2024-11-09"):
52
  """Generates LLM response for given text with streaming support, handling GPU cold-start errors."""
 
1
  import os
2
+ import openai
3
  import torch
4
  from transformers import pipeline
5
 
 
47
 
48
  # return "".join(full_response)
49
 
 
50
 
51
  def generate_llm_response(text, model_id="ccibeekeoc42/Llama3.1-8b-base-SFT-2024-11-09"):
52
  """Generates LLM response for given text with streaming support, handling GPU cold-start errors."""