khalednabawi11 commited on
Commit
4e5f5cf
·
verified ·
1 Parent(s): 3bbb164

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -5
app.py CHANGED
@@ -257,11 +257,11 @@ app.add_middleware(
257
  )
258
 
259
  generation_config = GenerationConfig(
260
- max_new_tokens=150,
261
- temperature=0.7,
262
- top_k=20,
263
  do_sample=True,
264
- top_p=0.8,
265
  )
266
 
267
  # Create generation pipeline
@@ -334,7 +334,11 @@ async def chat_fn(query: Query):
334
  # Run blocking inference in thread
335
  loop = asyncio.get_event_loop()
336
  response = await loop.run_in_executor(executor,
337
- lambda: pipe(prompt, max_new_tokens=150, temperature=0.7, do_sample=True, top_p=0.9)[0]['generated_text'])
 
 
 
 
338
 
339
  # Parse answer
340
  answer = response.split("Answer:")[-1].strip() if "Answer:" in response else response.split("الإجابة:")[-1].strip()
 
257
  )
258
 
259
  generation_config = GenerationConfig(
260
+ max_new_tokens=200,
261
+ temperature=0.3,
262
+ top_k=50,
263
  do_sample=True,
264
+ top_p=0.9,
265
  )
266
 
267
  # Create generation pipeline
 
334
  # Run blocking inference in thread
335
  loop = asyncio.get_event_loop()
336
  response = await loop.run_in_executor(executor,
337
+ lambda: pipe(prompt, max_new_tokens=200,
338
+ temperature=0.3,
339
+ top_k=50,
340
+ do_sample=True,
341
+ top_p=0.9)[0]['generated_text'])
342
 
343
  # Parse answer
344
  answer = response.split("Answer:")[-1].strip() if "Answer:" in response else response.split("الإجابة:")[-1].strip()