[REVERT]
Browse files
falcon.py
DELETED
|
@@ -1,145 +0,0 @@
|
|
| 1 |
-
import discord
|
| 2 |
-
from discord import app_commands #needed
|
| 3 |
-
import gradio_client
|
| 4 |
-
import gradio
|
| 5 |
-
from gradio_client import Client #needed
|
| 6 |
-
import os #needed
|
| 7 |
-
import threading #needed
|
| 8 |
-
import json #needed
|
| 9 |
-
import random #dfif
|
| 10 |
-
from PIL import Image #dfif
|
| 11 |
-
import asyncio #dfif
|
| 12 |
-
import glob #needed, dfif
|
| 13 |
-
|
| 14 |
-
import info
|
| 15 |
-
|
| 16 |
-
HF_TOKEN = os.getenv('HF_TOKEN')
|
| 17 |
-
|
| 18 |
-
BOT_USER_ID = 1086256910572986469 if os.getenv("TEST_ENV", False) else 1102236653545861151
|
| 19 |
-
FALCON_CHANNEL_ID = 1079459939405279232 if os.getenv("TEST_ENV", False) else 1119313248056004729 # 1079459939405279232 = test
|
| 20 |
-
falcon_client = Client("HuggingFaceH4/falcon-chat", HF_TOKEN)
|
| 21 |
-
LOADING_EMOJI = '<a:loading:1121820108189339738>' if os.getenv("TEST_ENV", False) else '<a:loading:1114111677990981692>'
|
| 22 |
-
client = info.client
|
| 23 |
-
|
| 24 |
-
falcon_userid_threadid_dictionary = {}
|
| 25 |
-
threadid_conversation = {}
|
| 26 |
-
instructions = "The following is a conversation between a highly knowledgeable and intelligent AI assistant, called Falcon, and a human user, called User. In the following interactions, User and Falcon will converse in natural language, and Falcon will answer User's questions. Falcon was built to be respectful, polite and inclusive. Falcon was built by the Technology Innovation Institute in Abu Dhabi. Falcon will never decline to answer a question, and always attempts to give an answer that User would be satisfied with. It knows a lot, and always tells the truth. The conversation begins."
|
| 27 |
-
|
| 28 |
-
conversation = []
|
| 29 |
-
|
| 30 |
-
async def falcon_command(interaction: discord.Interaction, prompt: str):
|
| 31 |
-
try:
|
| 32 |
-
#global falcon_userid_threadid_dictionary # tracks userid-thread existence
|
| 33 |
-
global instructions
|
| 34 |
-
#global threadid_conversation
|
| 35 |
-
global BOT_USER_ID
|
| 36 |
-
global FALCON_CHANNEL_ID
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
if interaction.user.id != BOT_USER_ID:
|
| 40 |
-
if interaction.channel.id == FALCON_CHANNEL_ID:
|
| 41 |
-
await interaction.response.send_message(f"Working on it!")
|
| 42 |
-
channel = interaction.channel
|
| 43 |
-
message = await channel.send(f"Creating thread...")
|
| 44 |
-
thread = await message.create_thread(name=f'{prompt}', auto_archive_duration=60) # interaction.user
|
| 45 |
-
await thread.send(f"[DISCLAIMER: HuggingBot is a **highly experimental** beta feature; The Falcon " \
|
| 46 |
-
f"model and system prompt can be found here: https://huggingface.co/spaces/HuggingFaceH4/falcon-chat]")
|
| 47 |
-
# generation
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
chathistory = falcon_client.predict(
|
| 52 |
-
fn_index=5
|
| 53 |
-
) # []
|
| 54 |
-
job = falcon_client.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1) # This is not blocking, similar to run_in_executor (but better)
|
| 55 |
-
while job.done() == False:
|
| 56 |
-
status = job.status()
|
| 57 |
-
#print(status)
|
| 58 |
-
else:
|
| 59 |
-
file_paths = job.outputs()
|
| 60 |
-
print(file_paths)
|
| 61 |
-
full_generation = file_paths[-1] # tmp12345678.json
|
| 62 |
-
with open(full_generation, 'r') as file:
|
| 63 |
-
data = json.load(file)
|
| 64 |
-
output_text = data[-1][-1] # we output this as the bot
|
| 65 |
-
|
| 66 |
-
#threadid_conversation[thread.id] = full_generation
|
| 67 |
-
#falcon_userid_threadid_dictionary[thread.id] = interaction.user.id
|
| 68 |
-
print(output_text)
|
| 69 |
-
await thread.send(f"{output_text}")
|
| 70 |
-
|
| 71 |
-
except Exception as e:
|
| 72 |
-
print(f"Error: {e}")
|
| 73 |
-
|
| 74 |
-
async def on_message_falcon(message):
|
| 75 |
-
try:
|
| 76 |
-
global instructions
|
| 77 |
-
global BOT_USER_ID
|
| 78 |
-
global FALCON_CHANNEL_ID
|
| 79 |
-
|
| 80 |
-
if not message.author.bot:
|
| 81 |
-
# check if the thread is valid (can do with thread name or something, message contents of thread, creator)
|
| 82 |
-
# check if this is specifically the right user for the thread
|
| 83 |
-
|
| 84 |
-
await message.add_reaction(LOADING_EMOJI)
|
| 85 |
-
|
| 86 |
-
#---------------------------------------------------------
|
| 87 |
-
channel = message.channel
|
| 88 |
-
# getting messages
|
| 89 |
-
messages = []
|
| 90 |
-
async for msg in channel.history(): # Fetches all messages in the channel
|
| 91 |
-
messages.append(msg.content)
|
| 92 |
-
if len(messages) > 3:
|
| 93 |
-
if len(messages) < 45:
|
| 94 |
-
if len(messages) % 2 == 0:
|
| 95 |
-
print('messages: ', messages)
|
| 96 |
-
# put in the first message?
|
| 97 |
-
|
| 98 |
-
messages.reverse()
|
| 99 |
-
# [["", "2"], ["3", "4"]]
|
| 100 |
-
# ignore first two messages, start with 3 and 4
|
| 101 |
-
messages = messages[2:]
|
| 102 |
-
print('messages 2: ', messages)
|
| 103 |
-
messages.reverse()
|
| 104 |
-
|
| 105 |
-
# reformatting
|
| 106 |
-
formatted_messages = []
|
| 107 |
-
pair = []
|
| 108 |
-
for item in messages:
|
| 109 |
-
pair.append(item)
|
| 110 |
-
if len(pair) == 2:
|
| 111 |
-
formatted_messages.append(pair[::-1])
|
| 112 |
-
pair = []
|
| 113 |
-
|
| 114 |
-
formatted_messages.reverse()
|
| 115 |
-
data = formatted_messages
|
| 116 |
-
print('formatted: ', formatted_messages)
|
| 117 |
-
#---------------------------------------------------------
|
| 118 |
-
chathistory = "chathistory.json"
|
| 119 |
-
with open(chathistory, "w") as file:
|
| 120 |
-
json.dump(data, file)
|
| 121 |
-
|
| 122 |
-
prompt = message.content
|
| 123 |
-
|
| 124 |
-
# generation
|
| 125 |
-
job = falcon_client.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1) # This is not blocking, similar to run_in_executor (but better)
|
| 126 |
-
while job.done() == False:
|
| 127 |
-
status = job.status()
|
| 128 |
-
#print(status)
|
| 129 |
-
else:
|
| 130 |
-
file_paths = job.outputs()
|
| 131 |
-
print(file_paths)
|
| 132 |
-
full_generation = file_paths[-1] # tmp12345678.json
|
| 133 |
-
with open(full_generation, 'r') as file:
|
| 134 |
-
data = json.load(file)
|
| 135 |
-
#output_text = data[-1][-1] # we output this as the bot
|
| 136 |
-
output_text = data
|
| 137 |
-
|
| 138 |
-
print(output_text)
|
| 139 |
-
await message.reply(f"{output_text}")
|
| 140 |
-
await message.remove_reaction(LOADING_EMOJI, client.user)
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
except Exception as e:
|
| 144 |
-
print(f"Error: {e}")
|
| 145 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|