pberck commited on
Commit
6e6e85d
·
1 Parent(s): 94b3c46

Added context box.

Browse files
app.py CHANGED
@@ -264,6 +264,9 @@ with gr.Blocks(theme=theme) as demo_blocks:
264
  lines=1,
265
  container=False,
266
  )
 
 
 
267
  clear = gr.Button(
268
  "Clear",
269
  # size="sm",
@@ -297,6 +300,7 @@ with gr.Blocks(theme=theme) as demo_blocks:
297
  return "", history # String ends up in textbox, thus empty.
298
 
299
  def newbot(history: list):
 
300
  last = history[-1]
301
  now = datetime.now() # current date and time
302
  date_time = now.strftime("%Y%m%dT%H%M%S")
@@ -310,7 +314,7 @@ with gr.Blocks(theme=theme) as demo_blocks:
310
  role="assistant", content="Please ask another question!"
311
  )
312
  history.append(his)
313
- yield history
314
  return
315
 
316
  context = get_context(user_message)
@@ -327,12 +331,14 @@ with gr.Blocks(theme=theme) as demo_blocks:
327
  DBG("CONTEXT")
328
  context = context[0:ctxkeep]
329
  if ctxkeep > 0:
330
- context_str = "Context:"
331
- for x in context: # note different after reranking
332
  DBG(x)
333
- context_str += x + "\n"
 
334
  prompt = f"Context: {context_str}\nQuestion:{user_message}\n"
335
  else:
 
336
  prompt = f"Context: Use the chat history and your own knowledge.\nQuestion:{user_message}\n"
337
  system_prompt = (
338
  "You are Samuel von Pufendorf. You are in Lund. The year is 1675. "
@@ -380,23 +386,25 @@ with gr.Blocks(theme=theme) as demo_blocks:
380
  partial_message = ""
381
  his = gr.ChatMessage(role="assistant", content="")
382
  history.append(his)
 
383
  for chunk in response:
384
  # DBG("CHUNK:"+repr(chunk))
385
  if chunk.choices and chunk.choices[0].delta.content is not None:
386
  partial_message = partial_message + chunk.choices[0].delta.content
387
  his = gr.ChatMessage(role="assistant", content=partial_message)
388
  history[-1] = his
389
- yield history # partial_message
390
  if chunk.usage:
391
  usage = dict(chunk.usage)
392
  DBG("TOKENS:" + str(usage["total_tokens"]))
393
  DBG(partial_message)
394
  # format_history(history)
 
395
 
396
  msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
397
- newbot, chatbot, chatbot
398
  )
399
- clear.click(lambda: None, None, chatbot, queue=False)
400
 
401
  # with gr.Blocks() as demo:
402
  # chatbot = gr.Chatbot(placeholder="<strong>Your Personal Yes-Man</strong><br>Ask Me Anything")
 
264
  lines=1,
265
  container=False,
266
  )
267
+ with gr.Accordion("Retrieved context (latest answer)", open=False):
268
+ ctx_box = gr.Markdown(label="", show_label=False)
269
+
270
  clear = gr.Button(
271
  "Clear",
272
  # size="sm",
 
300
  return "", history # String ends up in textbox, thus empty.
301
 
302
  def newbot(history: list):
303
+ ctx_text = ""
304
  last = history[-1]
305
  now = datetime.now() # current date and time
306
  date_time = now.strftime("%Y%m%dT%H%M%S")
 
314
  role="assistant", content="Please ask another question!"
315
  )
316
  history.append(his)
317
+ yield history, gr.update()
318
  return
319
 
320
  context = get_context(user_message)
 
331
  DBG("CONTEXT")
332
  context = context[0:ctxkeep]
333
  if ctxkeep > 0:
334
+ context_str = "Context:\n"
335
+ for i, x in enumerate(context): # note different after reranking
336
  DBG(x)
337
+ context_str += "### " + str(i) + "\n" + x + "\n"
338
+ ctx_text = context_str
339
  prompt = f"Context: {context_str}\nQuestion:{user_message}\n"
340
  else:
341
+ ctx_text = "(no retrieved context used)"
342
  prompt = f"Context: Use the chat history and your own knowledge.\nQuestion:{user_message}\n"
343
  system_prompt = (
344
  "You are Samuel von Pufendorf. You are in Lund. The year is 1675. "
 
386
  partial_message = ""
387
  his = gr.ChatMessage(role="assistant", content="")
388
  history.append(his)
389
+ yield history, ctx_text
390
  for chunk in response:
391
  # DBG("CHUNK:"+repr(chunk))
392
  if chunk.choices and chunk.choices[0].delta.content is not None:
393
  partial_message = partial_message + chunk.choices[0].delta.content
394
  his = gr.ChatMessage(role="assistant", content=partial_message)
395
  history[-1] = his
396
+ yield history, gr.update() # partial_message
397
  if chunk.usage:
398
  usage = dict(chunk.usage)
399
  DBG("TOKENS:" + str(usage["total_tokens"]))
400
  DBG(partial_message)
401
  # format_history(history)
402
+ # yield history, ctx_text
403
 
404
  msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
405
+ newbot, chatbot, [chatbot, ctx_box]
406
  )
407
+ clear.click(lambda: (None, ""), None, [chatbot, ctx_box], queue=False)
408
 
409
  # with gr.Blocks() as demo:
410
  # chatbot = gr.Chatbot(placeholder="<strong>Your Personal Yes-Man</strong><br>Ask Me Anything")
vector3_db/a1b2bf9f-4f30-46a6-a6c2-b6ca99effce9/data_level0.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:840b1f90717b4f61fc19b4ad97d5b9a56ad2ae6fa9be39ad09b39b0abb82472b
3
  size 16760000
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8146ecc3e4c3a36ea9b3edc3778630c452f483990ec942d38e8006f4661e430
3
  size 16760000
vector3_db/a1b2bf9f-4f30-46a6-a6c2-b6ca99effce9/length.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f157672d82ccbe5cac80b9525b998b2ad2778c1079a4c7536bb6fd3dcfdf12bb
3
  size 40000
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff80c6ec655fc2ca4f622b1c630607bb775837f7bfa8ff648d72e980f7040e22
3
  size 40000
vector3_db/chroma.sqlite3 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fe959502ce6ddfbd2031eebfd7a6ce106cf2cd4460ece7c395e6dd7db97cdd98
3
  size 11452416
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:466cd1438ed0a1ccd64dfd2bd430844cc62159b875dc1762d5728e531b396670
3
  size 11452416