somewhat horrifying tweak to bridge mechanisms, AI

This commit is contained in:
osmarks 2024-02-09 16:50:45 +00:00
parent de835dfe08
commit 24760c5baf
4 changed files with 41 additions and 5 deletions

View File

@ -2,7 +2,7 @@
pytio==0.3.1
aiohttp==3.8.3
aiosqlite==0.17.0
nextcord==2.0.0b4
nextcord==2.3.2
numpy==1.23
prometheus-async==19.2.0
prometheus-client==0.15.0
@ -10,4 +10,5 @@ pydot==1.4.2
toml==0.10.2
requests==2.28.1
python-dateutil==2.8.2
irc==20.1.0
irc==20.1.0
parsedatetime

View File

@ -177,5 +177,29 @@ AutoBotRobot is operated by gollark/osmarks.
await ctx.send("\n".join(map(lambda x: f"{x[0]} x{x[1]}", results)))
@commands.command(help="Highly advanced AI Asisstant.")
async def ai(self, ctx, *, query=None):
prompt = []
async for message in ctx.channel.history(limit=20):
display_name = message.author.display_name
if message.author == self.bot.user:
display_name = util.config["ai"]["own_name"]
content = message.content
if content.startswith(ctx.prefix + "ai"):
content = content.removeprefix(ctx.prefix + "ai").lstrip()
if not content and message.embeds:
content = message.embeds[0].title
elif not content and message.attachments:
content = "[attachments]"
if not content:
continue
prompt.append(f"{display_name}: {content}\n\n")
if sum(len(x) for x in prompt) > util.config["ai"]["max_len"]:
break
prompt.reverse()
prompt.append(util.config["ai"]["own_name"] + ": ")
generation = await util.generate(self.session, "".join(prompt))
if generation: await ctx.send(generation)
def setup(bot):
bot.add_cog(GeneralCommands(bot))

View File

@ -104,7 +104,10 @@ class Telephone(commands.Cog):
else:
text = f"<{msg.author.name}> {text}"
await channel.send(text[:2000], allowed_mentions=discord.AllowedMentions(everyone=False, roles=False, users=False))
await send_raw(render_formatting(channel, msg.message)[:2000])
content = render_formatting(channel, msg.message)[:2000]
if channel_id in util.config["bridge_show_src"] and msg.source[0] == "discord":
content = f"<#{msg.source[1]}> " + content
await send_raw(content)
if attachments_text: await send_raw(attachments_text)
else:
logging.warning("Channel %d not found", channel_id)
@ -181,7 +184,6 @@ When you want to end a call, use hangup.
await ctx.send(f"Successfully deleted.")
pass
async def find_recent(self, chs, query):
one_week = timedelta(seconds=60*60*24*7)
one_week_ago = datetime.now() - one_week
@ -393,7 +395,7 @@ When you want to end a call, use hangup.
@telephone.command(brief="Dump links out of current channel.")
async def graph(self, ctx):
graph = pydot.Dot("linkgraph", ratio="fill")
graph = pydot.Dot("linkgraph", ratio="fill", overlap="false")
seen = set()
seen_edges = set()
def node_name(x):

View File

@ -15,6 +15,7 @@ import time
import math
import pytz
import collections
import aiohttp
config = {}
@ -335,3 +336,11 @@ def chunks(source, length):
for i in range(0, len(source), length):
yield source[i : i+length]
async def generate(response: aiohttp.ClientSession, prompt):
async with response.post(config["ai"]["llm_backend"], json={
"prompt": prompt,
"max_tokens": 200,
"stop": ["\n\n"]
}) as res:
data = await res.json()
return data["choices"][0]["text"]