diff --git a/bot/api.py b/bot/api.py index cb21e13..751db97 100644 --- a/bot/api.py +++ b/bot/api.py @@ -1746,9 +1746,9 @@ def get_autonomous_stats(): @app.get("/conversation/{user_id}") def get_conversation(user_id: str): - if user_id in globals.conversation_history: - return {"conversation": list(globals.conversation_history[user_id])} - return {"conversation": []} + """Get conversation history for a user/channel (uses centralized ConversationHistory).""" + messages = conversation_history.get_recent_messages(user_id) + return {"conversation": [{"author": author, "content": content, "is_bot": is_bot} for author, content, is_bot in messages]} # ========== Figurine DM Subscription APIs ========== @app.get("/figurines/subscribers") diff --git a/bot/commands/actions.py b/bot/commands/actions.py index 4c01b9d..8ebd553 100644 --- a/bot/commands/actions.py +++ b/bot/commands/actions.py @@ -4,6 +4,7 @@ import asyncio import globals from utils.moods import load_mood_description from utils.scheduled import send_bedtime_reminder +from utils.conversation_history import conversation_history from utils.logger import get_logger logger = get_logger('commands') @@ -32,7 +33,7 @@ def calm_miku() -> str: def reset_conversation(user_id): - globals.conversation_history[str(user_id)].clear() + conversation_history.clear_channel(str(user_id)) async def force_sleep() -> str: diff --git a/bot/globals.py b/bot/globals.py index 788d37d..934e463 100644 --- a/bot/globals.py +++ b/bot/globals.py @@ -1,14 +1,10 @@ # globals.py import os -from collections import defaultdict, deque import discord from apscheduler.schedulers.asyncio import AsyncIOScheduler scheduler = AsyncIOScheduler() -# Stores last 5 exchanges per user (as deque) -conversation_history = defaultdict(lambda: deque(maxlen=5)) - DISCORD_BOT_TOKEN = os.getenv("DISCORD_BOT_TOKEN") # Autonomous V2 Debug Mode (set to True to see detailed decision logging) diff --git a/bot/utils/bipolar_mode.py b/bot/utils/bipolar_mode.py index 639916a..03bb595 100644 --- a/bot/utils/bipolar_mode.py +++ b/bot/utils/bipolar_mode.py @@ -1034,8 +1034,8 @@ async def run_argument(channel: discord.TextChannel, client, trigger_context: st # Clean up argument conversation history try: - conversation_history.clear_history(argument_user_id) - except: + conversation_history.clear_channel(argument_user_id) + except Exception: pass # History cleanup is not critical end_argument(channel_id) diff --git a/bot/utils/figurine_notifier.py b/bot/utils/figurine_notifier.py index d1db068..3e3f22a 100644 --- a/bot/utils/figurine_notifier.py +++ b/bot/utils/figurine_notifier.py @@ -5,8 +5,8 @@ from datetime import datetime from typing import List, Dict, Any, Tuple import discord -import globals +from utils.conversation_history import conversation_history from utils.twitter_fetcher import fetch_figurine_tweets_latest from utils.image_handling import analyze_image_with_qwen, download_and_encode_image from utils.llm import query_llama @@ -204,15 +204,11 @@ async def send_figurine_dm_to_user(client: discord.Client, user_id: int, tweet: # Log the comment message dm_logger.log_user_message(user, comment_message, is_bot_message=True) - # IMPORTANT: Also add to globals.conversation_history for LLM context + # Add to conversation history for LLM context (uses centralized ConversationHistory) user_id_str = str(user_id) - - # Add the tweet URL as a "system message" about what Miku just sent (use original URL for context) tweet_context = f"[I just sent you this figurine tweet: {tweet_url}]" - - # Add the figurine comment to conversation history - # Use empty user prompt since this was initiated by Miku - globals.conversation_history.setdefault(user_id_str, []).append((tweet_context, miku_comment)) + conversation_history.add_message(channel_id=user_id_str, author_name="Miku", content=tweet_context, is_bot=True) + conversation_history.add_message(channel_id=user_id_str, author_name="Miku", content=miku_comment, is_bot=True) logger.debug(f"Messages logged to both DM history and conversation context for user {user_id}") diff --git a/bot/utils/llm.py b/bot/utils/llm.py index dbdaaed..b898592 100644 --- a/bot/utils/llm.py +++ b/bot/utils/llm.py @@ -475,10 +475,6 @@ Please respond in a way that reflects this emotional tone.{pfp_context}""" is_bot=True ) - # Also save to legacy globals for backward compatibility (skip error messages) - if user_prompt and user_prompt.strip() and reply and reply.strip() and reply != "Someone tell Koko-nii there is a problem with my AI.": - globals.conversation_history[user_id].append((user_prompt, reply)) - return reply else: error_text = await response.text()