Chryslerx10 commited on
Commit
b4d29e1
Β·
1 Parent(s): 6f1522f

Update interface.py

Browse files
Files changed (1) hide show
  1. interface.py +8 -7
interface.py CHANGED
@@ -32,7 +32,7 @@ class ChatInterface:
32
  Supports both regular image files and DICOM medical imaging files.
33
  """
34
 
35
- def __init__(self, agent, tools_dict):
36
  """
37
  Initialize the chat interface.
38
 
@@ -48,6 +48,7 @@ class ChatInterface:
48
  # Separate storage for original and display paths
49
  self.original_file_path = None # For LLM (.dcm or other)
50
  self.display_file_path = None # For UI (always viewable format)
 
51
 
52
  def handle_upload(self, file_path: str) -> str:
53
  """
@@ -103,7 +104,7 @@ class ChatInterface:
103
  return history, gr.Textbox(value=message, interactive=False)
104
 
105
  async def process_message(
106
- self, message: str, display_image: Optional[str], chat_history: List[ChatMessage], session_details: dict
107
  ) -> AsyncGenerator[Tuple[List[ChatMessage], Optional[str], str], None]:
108
  """
109
  Process a message and generate responses.
@@ -201,7 +202,7 @@ class ChatInterface:
201
  yield chat_history, self.display_file_path
202
 
203
  finally:
204
- store_chat_history(session_details['username'], session_details['session_id'], chat_history)
205
 
206
  def store_chat_history(username, session_id, chat_history):
207
  """
@@ -413,7 +414,7 @@ def create_demo(agent, tools_dict):
413
  )
414
  bot_msg = chat_msg.then(
415
  interface.process_message,
416
- inputs=[txt, image_display, chatbot, session_details],
417
  outputs=[chatbot, image_display, txt],
418
  )
419
  bot_msg.then(lambda: gr.Textbox(interactive=True), None, [txt])
@@ -424,7 +425,7 @@ def create_demo(agent, tools_dict):
424
  interface.add_message, inputs=[txt, image_display, chatbot], outputs=[chatbot, txt]
425
  ).then(
426
  interface.process_message,
427
- inputs=[txt, image_display, chatbot, session_details],
428
  outputs=[chatbot, image_display, txt],
429
  ).then(lambda: gr.Textbox(interactive=True), None, [txt])
430
 
@@ -434,7 +435,7 @@ def create_demo(agent, tools_dict):
434
  interface.add_message, inputs=[txt, image_display, chatbot], outputs=[chatbot, txt]
435
  ).then(
436
  interface.process_message,
437
- inputs=[txt, image_display, chatbot, session_details],
438
  outputs=[chatbot, image_display, txt],
439
  ).then(lambda: gr.Textbox(interactive=True), None, [txt])
440
 
@@ -444,7 +445,7 @@ def create_demo(agent, tools_dict):
444
  interface.add_message, inputs=[txt, image_display, chatbot], outputs=[chatbot, txt]
445
  ).then(
446
  interface.process_message,
447
- inputs=[txt, image_display, chatbot, session_details],
448
  outputs=[chatbot, image_display, txt],
449
  ).then(lambda: gr.Textbox(interactive=True), None, [txt])
450
 
 
32
  Supports both regular image files and DICOM medical imaging files.
33
  """
34
 
35
+ def __init__(self, agent, tools_dict, session_details):
36
  """
37
  Initialize the chat interface.
38
 
 
48
  # Separate storage for original and display paths
49
  self.original_file_path = None # For LLM (.dcm or other)
50
  self.display_file_path = None # For UI (always viewable format)
51
+ self.session_details = session_details
52
 
53
  def handle_upload(self, file_path: str) -> str:
54
  """
 
104
  return history, gr.Textbox(value=message, interactive=False)
105
 
106
  async def process_message(
107
+ self, message: str, display_image: Optional[str], chat_history: List[ChatMessage]
108
  ) -> AsyncGenerator[Tuple[List[ChatMessage], Optional[str], str], None]:
109
  """
110
  Process a message and generate responses.
 
202
  yield chat_history, self.display_file_path
203
 
204
  finally:
205
+ store_chat_history(self.session_details['username'], self.session_details['session_id'], chat_history)
206
 
207
  def store_chat_history(username, session_id, chat_history):
208
  """
 
414
  )
415
  bot_msg = chat_msg.then(
416
  interface.process_message,
417
+ inputs=[txt, image_display, chatbot],
418
  outputs=[chatbot, image_display, txt],
419
  )
420
  bot_msg.then(lambda: gr.Textbox(interactive=True), None, [txt])
 
425
  interface.add_message, inputs=[txt, image_display, chatbot], outputs=[chatbot, txt]
426
  ).then(
427
  interface.process_message,
428
+ inputs=[txt, image_display, chatbot],
429
  outputs=[chatbot, image_display, txt],
430
  ).then(lambda: gr.Textbox(interactive=True), None, [txt])
431
 
 
435
  interface.add_message, inputs=[txt, image_display, chatbot], outputs=[chatbot, txt]
436
  ).then(
437
  interface.process_message,
438
+ inputs=[txt, image_display, chatbot],
439
  outputs=[chatbot, image_display, txt],
440
  ).then(lambda: gr.Textbox(interactive=True), None, [txt])
441
 
 
445
  interface.add_message, inputs=[txt, image_display, chatbot], outputs=[chatbot, txt]
446
  ).then(
447
  interface.process_message,
448
+ inputs=[txt, image_display, chatbot],
449
  outputs=[chatbot, image_display, txt],
450
  ).then(lambda: gr.Textbox(interactive=True), None, [txt])
451