magdap116 commited on
Commit
496dd0b
·
verified ·
1 Parent(s): 6321507

Update app.py

Browse files

local transformers model

Files changed (1) hide show
  1. app.py +12 -3
app.py CHANGED
@@ -6,7 +6,8 @@ import pandas as pd
6
  from smolagents import DuckDuckGoSearchTool,HfApiModel,load_tool, CodeAgent,PythonInterpreterTool,VisitWebpageTool,Tool
7
  import hashlib
8
  import json
9
- from transformers import AutoTokenizer, AutoModelForCausalLM
 
10
  import torch
11
  from tooling import ModelMathTool
12
 
@@ -55,6 +56,10 @@ def cache_answer(question: str, answer: str):
55
  # --- Model Setup ---
56
  MODEL_NAME = 'microsoft/DialoGPT-small'# 'EleutherAI/gpt-neo-2.7B'#'distilbert/distilgpt2'#'deepseek-ai/DeepSeek-R1-Distill-Qwen-7B'#'mistralai/Mistral-7B-Instruct-v0.2'
57
 
 
 
 
 
58
  def load_model():
59
  """Download and load the model and tokenizer."""
60
  try:
@@ -67,7 +72,11 @@ def load_model():
67
 
68
 
69
  print(f"Model {MODEL_NAME} loaded successfully.")
70
- return model, tokenizer
 
 
 
 
71
  except Exception as e:
72
  print(f"Error loading model: {e}")
73
  raise
@@ -81,7 +90,7 @@ class BasicAgent:
81
  def __init__(self):
82
  print("BasicAgent initialized.")
83
  self.agent = CodeAgent(
84
- model=model,
85
  tools=[web_search,python_interpreter,visit_webpage_tool,model_math_tool],
86
  max_steps=1,
87
  verbosity_level=1,
 
6
  from smolagents import DuckDuckGoSearchTool,HfApiModel,load_tool, CodeAgent,PythonInterpreterTool,VisitWebpageTool,Tool
7
  import hashlib
8
  import json
9
+ from transformers import AutoTokenizer, AutoModelForCausalLM , pipeline, TransformersEngine
10
+
11
  import torch
12
  from tooling import ModelMathTool
13
 
 
56
  # --- Model Setup ---
57
  MODEL_NAME = 'microsoft/DialoGPT-small'# 'EleutherAI/gpt-neo-2.7B'#'distilbert/distilgpt2'#'deepseek-ai/DeepSeek-R1-Distill-Qwen-7B'#'mistralai/Mistral-7B-Instruct-v0.2'
58
 
59
+
60
+
61
+
62
+
63
  def load_model():
64
  """Download and load the model and tokenizer."""
65
  try:
 
72
 
73
 
74
  print(f"Model {MODEL_NAME} loaded successfully.")
75
+
76
+ transformers_engine = TransformersEngine(pipeline("text-generation", model=model, tokenizer=tokenizer))
77
+
78
+
79
+ return transformers_engine
80
  except Exception as e:
81
  print(f"Error loading model: {e}")
82
  raise
 
90
  def __init__(self):
91
  print("BasicAgent initialized.")
92
  self.agent = CodeAgent(
93
+ llm_engine=llm_engine,
94
  tools=[web_search,python_interpreter,visit_webpage_tool,model_math_tool],
95
  max_steps=1,
96
  verbosity_level=1,