From 8a1316aeef24cd479c0e863d65dcd9eadaf7a847 Mon Sep 17 00:00:00 2001 From: cameron Date: Fri, 10 May 2024 20:03:39 -0400 Subject: [PATCH] Internet search and document summarization --- plugins/botchat/plugin.py | 62 ++++++++++++++++++++++++++- plugins/botchat/prompts/default.txt | 27 +++++++++++- plugins/botchat/prompts/summarize.txt | 9 ++++ 3 files changed, 95 insertions(+), 3 deletions(-) create mode 100644 plugins/botchat/prompts/summarize.txt diff --git a/plugins/botchat/plugin.py b/plugins/botchat/plugin.py index 0b6ab3c..4705bc5 100644 --- a/plugins/botchat/plugin.py +++ b/plugins/botchat/plugin.py @@ -7,7 +7,9 @@ import yaml import random import os import logging +import html2text import re +import datetime logger=logging.getLogger("plugin.botchat") plugin_folder=os.path.dirname(os.path.realpath(__file__)) @@ -23,6 +25,20 @@ def ci_replace(text, replace_str, new_str): result = compiled.sub(new_str, text) return result +async def summarize(text): + """ + Uses the LLM to summarize the given text + + :param text: text to summarize + :return: returns the summarized text + """ + logger.info("Prompting LLM for text summary") + summary_file = os.path.join(prompts_folder, "summarize.txt") + with open(summary_file, 'r') as summary_file: + summary_prompt = summary_file.read() + summary_prompt = summary_prompt.replace("", text) + return await prompt_llm(summary_prompt) + async def prompt_llm(prompt): """ Prompts the upstream LLM for a completion of the given prompt @@ -83,8 +99,24 @@ async def log_history(ctx, history): # history_file.write(history) pass +async def search_searx(query): + """ + Searches the given query on SearX and returns an LLM summary + + :param query: search query + """ + search_url="https://metasearx.com/" + async with aiohttp.ClientSession(search_url) as session: + search_params = { "q": query } + async with session.get("/", data=search_params) as resp: + logger.info(f"Search response status {resp.status}") + response=await resp.text() + summary=await summarize(html2text.html2text(response)) + logger.info(f"Search summary {summary}") + return summary + @commands.command(name='llm') -async def llm_response(ctx): +async def llm_response(ctx, additional_context=""): """ Sends a response from the bot to the chat context in {ctx} @@ -98,16 +130,35 @@ async def llm_response(ctx): history_str = '\n'.join(history_arr) full_prompt = prompt.replace("", history_str) full_prompt = full_prompt.replace("", bot_name) + full_prompt = full_prompt.replace("", str(datetime.date.today())) + full_prompt = full_prompt.replace("