81 lines
2.7 KiB
Python
81 lines
2.7 KiB
Python
# utils/core.py
|
|
|
|
import asyncio
|
|
import aiohttp
|
|
import re
|
|
|
|
import globals
|
|
from langchain_community.vectorstores import FAISS
|
|
from langchain_text_splitters import CharacterTextSplitter, RecursiveCharacterTextSplitter
|
|
from langchain_core.documents import Document
|
|
|
|
|
|
# switch_model() removed - llama-swap handles model switching automatically
|
|
|
|
|
|
async def is_miku_addressed(message) -> bool:
|
|
# Check if this is a DM (no guild)
|
|
if message.guild is None:
|
|
# In DMs, always respond to every message
|
|
return True
|
|
|
|
# Safety check: ensure guild and guild.me exist
|
|
if not message.guild or not message.guild.me:
|
|
print(f"⚠️ Warning: Invalid guild or guild.me in message from {message.author}")
|
|
return False
|
|
|
|
# If message contains a ping for Miku, return true
|
|
if message.guild.me in message.mentions:
|
|
return True
|
|
|
|
# If message is a reply, check the referenced message author
|
|
if message.reference:
|
|
try:
|
|
referenced_msg = await message.channel.fetch_message(message.reference.message_id)
|
|
if referenced_msg.author == message.guild.me:
|
|
return True
|
|
except Exception as e:
|
|
print(f"⚠️ Could not fetch referenced message: {e}")
|
|
|
|
cleaned = message.content.strip()
|
|
|
|
return bool(re.search(
|
|
r'(?<![\w\(])(?:[^\w\s]{0,2}\s*)?miku(?:\s*[^\w\s]{0,2})?(?=,|\s*,|[!\.?\s]*$)',
|
|
cleaned,
|
|
re.IGNORECASE
|
|
))
|
|
|
|
# Vectorstore functionality disabled - not needed with current structured context approach
|
|
# If you need embeddings in the future, you can use a different embedding provider
|
|
# For now, the bot uses structured prompts from context_manager.py
|
|
|
|
# def load_miku_knowledge():
|
|
# with open("miku_lore.txt", "r", encoding="utf-8") as f:
|
|
# text = f.read()
|
|
#
|
|
# from langchain_text_splitters import RecursiveCharacterTextSplitter
|
|
#
|
|
# text_splitter = RecursiveCharacterTextSplitter(
|
|
# chunk_size=520,
|
|
# chunk_overlap=50,
|
|
# separators=["\n\n", "\n", ".", "!", "?", ",", " ", ""]
|
|
# )
|
|
#
|
|
# docs = [Document(page_content=chunk) for chunk in text_splitter.split_text(text)]
|
|
#
|
|
# vectorstore = FAISS.from_documents(docs, embeddings)
|
|
# return vectorstore
|
|
#
|
|
# def load_miku_lyrics():
|
|
# with open("miku_lyrics.txt", "r", encoding="utf-8") as f:
|
|
# lyrics_text = f.read()
|
|
#
|
|
# text_splitter = CharacterTextSplitter(chunk_size=520, chunk_overlap=50)
|
|
# docs = [Document(page_content=chunk) for chunk in text_splitter.split_text(lyrics_text)]
|
|
#
|
|
# vectorstore = FAISS.from_documents(docs, embeddings)
|
|
# return vectorstore
|
|
#
|
|
# miku_vectorstore = load_miku_knowledge()
|
|
# miku_lyrics_vectorstore = load_miku_lyrics()
|