gpaasch commited on
Commit
a9a2808
·
1 Parent(s): a0d3ac7

see a progress counter when submit is it

Browse files
Files changed (1) hide show
  1. src/app.py +23 -17
src/app.py CHANGED
@@ -555,25 +555,31 @@ with gr.Blocks(theme="default") as demo:
555
  return history, "" # Return tuple to clear input
556
 
557
  try:
558
- # Process the symptoms
559
- diagnosis_query = f"""
560
- Based on these symptoms: '{text}'
561
- Provide relevant ICD-10 codes and diagnostic questions.
562
- Focus on clinical implications.
563
- Limit response to 1000 characters.
564
- """
565
- response = symptom_index.as_query_engine().query(diagnosis_query)
566
 
567
- # Clean up memory
568
- cleanup_memory()
 
 
 
 
 
 
 
 
 
 
569
 
570
  new_history = history + [
571
  {"role": "user", "content": text},
572
- {"role": "assistant", "content": format_response_for_user({
573
- "diagnoses": [],
574
- "confidences": [],
575
- "follow_up": str(response)[:1000]
576
- })}
577
  ]
578
  return new_history, "" # Return empty string to clear input
579
  except Exception as e:
@@ -584,9 +590,9 @@ with gr.Blocks(theme="default") as demo:
584
  submit_btn.click(
585
  fn=process_text_input,
586
  inputs=[text_input, chatbot],
587
- outputs=[chatbot, text_input], # Add text_input to outputs
588
  queue=True
589
- ).then( # Chain cleanup
590
  fn=cleanup_memory,
591
  inputs=None,
592
  outputs=None,
 
555
  return history, "" # Return tuple to clear input
556
 
557
  try:
558
+ # Process the symptoms using the configured LLM
559
+ prompt = f"""Given these symptoms: '{text}'
560
+ Please provide:
561
+ 1. Most likely ICD-10 codes
562
+ 2. Confidence levels for each diagnosis
563
+ 3. Key follow-up questions
564
+
565
+ Format as JSON with diagnoses, confidences, and follow_up fields."""
566
 
567
+ response = llm.complete(prompt)
568
+
569
+ try:
570
+ # Try to parse as JSON first
571
+ result = json.loads(response.text)
572
+ except json.JSONDecodeError:
573
+ # If not JSON, wrap in our format
574
+ result = {
575
+ "diagnoses": [],
576
+ "confidences": [],
577
+ "follow_up": str(response.text)[:1000] # Limit response length
578
+ }
579
 
580
  new_history = history + [
581
  {"role": "user", "content": text},
582
+ {"role": "assistant", "content": format_response_for_user(result)}
 
 
 
 
583
  ]
584
  return new_history, "" # Return empty string to clear input
585
  except Exception as e:
 
590
  submit_btn.click(
591
  fn=process_text_input,
592
  inputs=[text_input, chatbot],
593
+ outputs=[chatbot, text_input],
594
  queue=True
595
+ ).success( # Changed from .then to .success for better error handling
596
  fn=cleanup_memory,
597
  inputs=None,
598
  outputs=None,