lunarflu HF Staff commited on
Commit
4ee9bda
·
1 Parent(s): 44aca50

Update falcon.py

Browse files
Files changed (1) hide show
  1. falcon.py +31 -2
falcon.py CHANGED
@@ -6,6 +6,7 @@ falcon_client = info.falcon_client
6
  json = info.json
7
  threadid_conversation = info.threadid_conversation
8
  falcon_userid_threadid_dictionary = info.falcon_userid_threadid_dictionary
 
9
 
10
  async def falcon_command(interaction: discord.Interaction, prompt: str):
11
  try:
@@ -21,7 +22,7 @@ async def falcon_command(interaction: discord.Interaction, prompt: str):
21
  chathistory = falcon_client.predict(
22
  fn_index=5
23
  ) # []
24
- job = falcon_client.submit(prompt, chathistory, info.instructions, 0.8, 0.9, fn_index=1) # This is not blocking, similar to run_in_executor (but better)
25
  while job.done() == False:
26
  status = job.status()
27
  #print(status)
@@ -38,4 +39,32 @@ async def falcon_command(interaction: discord.Interaction, prompt: str):
38
  await thread.send(f"{output_text}")
39
 
40
  except Exception as e:
41
- print(f"Error: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  json = info.json
7
  threadid_conversation = info.threadid_conversation
8
  falcon_userid_threadid_dictionary = info.falcon_userid_threadid_dictionary
9
+ instructions = info.instructions
10
 
11
  async def falcon_command(interaction: discord.Interaction, prompt: str):
12
  try:
 
22
  chathistory = falcon_client.predict(
23
  fn_index=5
24
  ) # []
25
+ job = falcon_client.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1) # This is not blocking, similar to run_in_executor (but better)
26
  while job.done() == False:
27
  status = job.status()
28
  #print(status)
 
39
  await thread.send(f"{output_text}")
40
 
41
  except Exception as e:
42
+ print(f"Error: {e}")
43
+
44
+ async def continue_falcon(message):
45
+ """continues a given conversation based on chathistory"""
46
+ try:
47
+ await message.add_reaction('<a:loading:1114111677990981692>')
48
+ chathistory = threadid_conversation[message.channel.id]
49
+ prompt = message.content
50
+ # generation
51
+ job = falcon_client.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1) # This is not blocking, similar to run_in_executor (but better)
52
+ while job.done() == False:
53
+ status = job.status()
54
+ #print(status)
55
+ else:
56
+ file_paths = job.outputs()
57
+ full_generation = file_paths[-1] # tmp12345678.json
58
+ with open(full_generation, 'r') as file:
59
+ data = json.load(file)
60
+ output_text = data[-1][-1] # we output this as the bot
61
+
62
+ threadid_conversation[message.channel.id] = full_generation # overwrite the old file
63
+ falcon_userid_threadid_dictionary[message.channel.id] = message.author.id
64
+ print(output_text)
65
+ await message.reply(f"{output_text}")
66
+ await message.remove_reaction('<a:loading:1114111677990981692>', client.user)
67
+
68
+ except Exception as e:
69
+ print(f"Error: {e}")
70
+ await message.reply(f"Error: {e} <@811235357663297546> (continue_falcon error)")