Burnside commited on
Commit
252c302
·
verified ·
1 Parent(s): 45b4da1

decreased max_token to 1024

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -39,7 +39,7 @@ final_answer = FinalAnswerTool()
39
  # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
40
 
41
  model = HfApiModel(
42
- max_tokens=4096,
43
  temperature=0.5,
44
  #model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded, fall back on 14B or even 7B or 3B locally
45
  model_id='Qwen/Qwen2.5-Coder-3B-Instruct',# it is possible that this model may be overloaded, fall back on 14B or even 7B or 3B locally
 
39
  # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
40
 
41
  model = HfApiModel(
42
+ max_tokens=1024,
43
  temperature=0.5,
44
  #model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded, fall back on 14B or even 7B or 3B locally
45
  model_id='Qwen/Qwen2.5-Coder-3B-Instruct',# it is possible that this model may be overloaded, fall back on 14B or even 7B or 3B locally