Initial commit: Miku Discord Bot

This commit is contained in:
2025-12-07 17:15:09 +02:00
commit 8c74ad5260
206 changed files with 50125 additions and 0 deletions

View File

@@ -0,0 +1,317 @@
# autonomous.py
import random
import time
import json
import os
from datetime import datetime
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from discord import Status
from discord import TextChannel
from difflib import SequenceMatcher
import globals
from utils.llm import query_ollama
from utils.moods import MOOD_EMOJIS
from utils.twitter_fetcher import fetch_miku_tweets
from utils.image_handling import analyze_image_with_qwen, download_and_encode_image
scheduler = AsyncIOScheduler()
_last_autonomous_messages = [] # rotating buffer of last general messages
MAX_HISTORY = 10
_last_user_engagements = {} # user_id -> timestamp
LAST_SENT_TWEETS_FILE = "memory/last_sent_tweets.json"
LAST_SENT_TWEETS = []
def setup_autonomous_speaking():
scheduler.add_job(miku_autonomous_tick, "interval", minutes=10)
scheduler.add_job(miku_detect_and_join_conversation, "interval", minutes=3)
scheduler.start()
print("🤖 Autonomous Miku is active!")
async def miku_autonomous_tick(action_type="general", force=False, force_action=None):
if not force and random.random() > 0.2: # 20% chance to act
return
if force_action:
action_type = force_action
else:
action_type = random.choice(["general", "engage_user", "share_tweet"])
if action_type == "general":
await miku_say_something_general()
elif action_type == "engage_user":
await miku_engage_random_user()
else:
await share_miku_tweet()
async def miku_say_something_general():
channel = globals.client.get_channel(globals.AUTONOMOUS_CHANNEL_ID)
if not channel:
print("⚠️ Autonomous channel not found.")
return
mood = globals.CURRENT_MOOD_NAME
time_of_day = get_time_of_day()
emoji = MOOD_EMOJIS.get(mood, "")
history_summary = "\n".join(f"- {msg}" for msg in _last_autonomous_messages[-5:]) if _last_autonomous_messages else "None yet."
prompt = (
f"Miku is feeling {mood}. It's currently {time_of_day}. "
f"Write a short, natural message that Miku might say out of the blue in a chat. "
f"She might greet everyone, make a cute observation, ask a silly question, or say something funny. "
f"Make sure it feels casual and spontaneous, like a real person might say.\n\n"
f"Here are some things Miku recently said, do not repeat them or say anything too similar:\n{history_summary}"
)
for attempt in range(3): # retry up to 3 times if message is too similar
message = await query_ollama(prompt, user_id=f"miku-general-{int(time.time())}")
if not is_too_similar(message, _last_autonomous_messages):
break
print("🔁 Response was too similar to past messages, retrying...")
try:
await channel.send(message)
print(f"💬 Miku said something general in #{channel.name}")
except Exception as e:
print(f"⚠️ Failed to send autonomous message: {e}")
async def miku_engage_random_user():
guild = globals.client.get_guild(globals.TARGET_GUILD_ID)
if not guild:
print("⚠️ Target guild not found.")
return
channel = globals.client.get_channel(globals.AUTONOMOUS_CHANNEL_ID)
if not channel:
print("⚠️ Autonomous channel not found.")
return
members = [
m for m in guild.members
if m.status in {Status.online, Status.idle, Status.dnd} and not m.bot
]
time_of_day = get_time_of_day()
# Include the invisible user except during late night
specific_user_id = 214857593045254151 # Your invisible user's ID
specific_user = guild.get_member(specific_user_id)
if specific_user:
if specific_user.status != Status.offline or "late night" not in time_of_day:
if specific_user not in members:
members.append(specific_user)
if not members:
print("😴 No available members to talk to.")
return
target = random.choice(members)
now = time.time()
last_time = _last_user_engagements.get(target.id, 0)
if now - last_time < 43200: # 12 hours in seconds
print(f"⏱️ Recently engaged {target.display_name}, switching to general message.")
await miku_say_something_general()
return
activity_name = None
if target.activities:
for a in target.activities:
if hasattr(a, 'name') and a.name:
activity_name = a.name
break
mood = globals.CURRENT_MOOD_NAME
emoji = MOOD_EMOJIS.get(mood, "")
is_invisible = target.status == Status.offline
display_name = target.display_name
prompt = (
f"Miku is feeling {mood} {emoji} during the {time_of_day}. "
f"She notices {display_name}'s current status is {target.status.name}. "
)
if is_invisible:
prompt += (
f"Miku suspects that {display_name} is being sneaky and invisible 👻. "
f"She wants to playfully call them out in a fun, teasing, but still affectionate way. "
)
elif activity_name:
prompt += (
f"They appear to be playing or doing: {activity_name}. "
f"Miku wants to comment on this and start a friendly conversation."
)
else:
prompt += (
f"Miku wants to casually start a conversation with them, maybe ask how they're doing, what they're up to, or even talk about something random with them."
)
prompt += (
f"\nThe message should be short and reflect Mikus current mood."
)
try:
message = await query_ollama(prompt, user_id=f"miku-engage-{int(time.time())}")
await channel.send(f"{target.mention} {message}")
print(f"👤 Miku engaged {display_name}")
_last_user_engagements[target.id] = time.time()
except Exception as e:
print(f"⚠️ Failed to engage user: {e}")
async def miku_detect_and_join_conversation():
channel = globals.client.get_channel(globals.AUTONOMOUS_CHANNEL_ID)
if not isinstance(channel, TextChannel):
print("⚠️ Autonomous channel is invalid or not found.")
return
# Fetch last 20 messages (for filtering)
try:
messages = [msg async for msg in channel.history(limit=20)]
except Exception as e:
print(f"⚠️ Failed to fetch channel history: {e}")
return
# Filter to messages in last 10 minutes from real users (not bots)
recent_msgs = [
msg for msg in messages
if not msg.author.bot
and (datetime.now(msg.created_at.tzinfo) - msg.created_at).total_seconds() < 600
]
user_ids = set(msg.author.id for msg in recent_msgs)
if len(recent_msgs) < 5 or len(user_ids) < 2:
# Not enough activity
return
if random.random() > 0.5:
return # 50% chance to engage
# Use last 10 messages for context (oldest to newest)
convo_lines = reversed(recent_msgs[:10])
history_text = "\n".join(
f"{msg.author.display_name}: {msg.content}" for msg in convo_lines
)
mood = globals.CURRENT_MOOD_NAME
emoji = MOOD_EMOJIS.get(mood, "")
prompt = (
f"Miku is watching a conversation happen in the chat. Her current mood is {mood} {emoji}. "
f"She wants to say something relevant, playful, or insightful based on what people are talking about.\n\n"
f"Here's the conversation:\n{history_text}\n\n"
f"Write a short reply that feels natural and adds to the discussion. It should reflect Mikus mood and personality."
)
try:
reply = await query_ollama(prompt, user_id=f"miku-chat-{int(time.time())}")
await channel.send(reply)
print(f"💬 Miku joined an ongoing conversation.")
except Exception as e:
print(f"⚠️ Failed to interject in conversation: {e}")
async def share_miku_tweet():
channel = globals.client.get_channel(globals.AUTONOMOUS_CHANNEL_ID)
tweets = await fetch_miku_tweets(limit=5)
if not tweets:
print("📭 No good tweets found.")
return
fresh_tweets = [t for t in tweets if t["url"] not in LAST_SENT_TWEETS]
if not fresh_tweets:
print("⚠️ All fetched tweets were recently sent. Reusing tweets.")
fresh_tweets = tweets
tweet = random.choice(fresh_tweets)
LAST_SENT_TWEETS.append(tweet["url"])
if len(LAST_SENT_TWEETS) > 50:
LAST_SENT_TWEETS.pop(0)
save_last_sent_tweets()
# Prepare prompt
mood = globals.CURRENT_MOOD_NAME
emoji = MOOD_EMOJIS.get(mood, "")
base_prompt = f"Here's a tweet from @{tweet['username']}:\n\n{tweet['text']}\n\nComment on it in a fun Miku style! Miku's current mood is {mood} {emoji}. Make sure the comment reflects Miku's mood and personality."
# Optionally analyze first image
first_img_url = tweet["media"][0]
base64_img = await download_and_encode_image(first_img_url)
if base64_img:
img_desc = await analyze_image_with_qwen(base64_img)
base_prompt += f"\n\nThe image looks like this: {img_desc}"
miku_comment = await query_ollama(base_prompt, user_id="autonomous")
# Post to Discord
await channel.send(f"{tweet['url']}")
await channel.send(miku_comment)
async def handle_custom_prompt(user_prompt: str):
channel = globals.client.get_channel(globals.AUTONOMOUS_CHANNEL_ID)
if not channel:
print("⚠️ Autonomous channel not found.")
return False
mood = globals.CURRENT_MOOD_NAME
emoji = MOOD_EMOJIS.get(mood, "")
time_of_day = get_time_of_day()
# Wrap users idea in Miku context
prompt = (
f"Miku is feeling {mood} {emoji} during the {time_of_day}. "
f"She has been instructed to: \"{user_prompt.strip()}\"\n\n"
f"Write a short, natural message as Miku that follows this instruction. "
f"Make it feel spontaneous, emotionally in character, and aligned with her mood and personality. Decide if the time of day is relevant to this request or not and if it is not, do not mention it."
)
try:
message = await query_ollama(prompt, user_id=f"manual-{int(time.time())}")
await channel.send(message)
print("🎤 Miku responded to custom prompt.")
_last_autonomous_messages.append(message)
return True
except Exception as e:
print(f"❌ Failed to send custom autonomous message: {e}")
return False
def load_last_sent_tweets():
global LAST_SENT_TWEETS
if os.path.exists(LAST_SENT_TWEETS_FILE):
try:
with open(LAST_SENT_TWEETS_FILE, "r", encoding="utf-8") as f:
LAST_SENT_TWEETS = json.load(f)
except Exception as e:
print(f"⚠️ Failed to load last sent tweets: {e}")
LAST_SENT_TWEETS = []
else:
LAST_SENT_TWEETS = []
def save_last_sent_tweets():
try:
with open(LAST_SENT_TWEETS_FILE, "w", encoding="utf-8") as f:
json.dump(LAST_SENT_TWEETS, f)
except Exception as e:
print(f"⚠️ Failed to save last sent tweets: {e}")
def get_time_of_day():
hour = datetime.now().hour + 3
if 5 <= hour < 12:
return "morning"
elif 12 <= hour < 18:
return "afternoon"
elif 18 <= hour < 22:
return "evening"
return "late night. Miku wonders if anyone is still awake"
def is_too_similar(new_message, history, threshold=0.85):
for old in history:
ratio = SequenceMatcher(None, new_message.lower(), old.lower()).ratio()
if ratio > threshold:
return True
return False

View File

@@ -0,0 +1,106 @@
# utils/core.py
import asyncio
import aiohttp
import re
import globals
from langchain_community.vectorstores import FAISS
from langchain.text_splitter import CharacterTextSplitter, RecursiveCharacterTextSplitter
from langchain.schema import Document
async def switch_model(model_name: str, timeout: int = 600):
if globals.current_model == model_name:
print(f"🔁 Model '{model_name}' already loaded.")
return
# Unload all other models to clear VRAM
async with aiohttp.ClientSession() as session:
async with session.get(f"{globals.OLLAMA_URL}/api/show") as resp:
if resp.status == 200:
data = await resp.json()
loaded_models = data.get("models", [])
for model in loaded_models:
if model["name"] != model_name:
print(f"🔁 Unloading model: {model['name']}")
await session.post(f"{globals.OLLAMA_URL}/api/stop", json={"name": model["name"]})
else:
print("⚠️ Failed to check currently loaded models.")
print(f"🔄 Switching to model '{model_name}'...")
async with aiohttp.ClientSession() as session:
await session.post(f"{globals.OLLAMA_URL}/api/stop")
# Warm up the new model (dummy call to preload it)
payload = {
"model": model_name,
"prompt": "Hello",
"stream": False
}
headers = {"Content-Type": "application/json"}
# Poll until /api/generate returns 200
async with aiohttp.ClientSession() as session:
for _ in range(timeout):
async with session.post(f"{globals.OLLAMA_URL}/api/generate", json=payload, headers=headers) as resp:
if resp.status == 200:
globals.current_model = model_name
print(f"✅ Model {model_name} ready!")
return
await asyncio.sleep(1) # Wait a second before trying again
raise TimeoutError(f"Timed out waiting for model '{model_name}' to become available.")
async def is_miku_addressed(message) -> bool:
# If message contains a ping for Miku, return true
if message.guild.me in message.mentions:
return True
# If message is a reply, check the referenced message author
if message.reference:
try:
referenced_msg = await message.channel.fetch_message(message.reference.message_id)
if referenced_msg.author == message.guild.me: # or globals.client.user if you use client
return True
except Exception as e:
print(f"⚠️ Could not fetch referenced message: {e}")
cleaned = message.content.strip()
return bool(re.search(
r'(?<![\w\(])(?:[^\w\s]{0,2}\s*)?miku(?:\s*[^\w\s]{0,2})?(?=,|\s*,|[!\.?\s]*$)',
cleaned,
re.IGNORECASE
))
# Load and index once at startup
def load_miku_knowledge():
with open("miku_lore.txt", "r", encoding="utf-8") as f:
text = f.read()
from langchain.text_splitter import RecursiveCharacterTextSplitter
text_splitter = RecursiveCharacterTextSplitter(
chunk_size=520,
chunk_overlap=50,
separators=["\n\n", "\n", ".", "!", "?", ",", " ", ""]
)
docs = [Document(page_content=chunk) for chunk in text_splitter.split_text(text)]
vectorstore = FAISS.from_documents(docs, globals.embeddings)
return vectorstore
def load_miku_lyrics():
with open("miku_lyrics.txt", "r", encoding="utf-8") as f:
lyrics_text = f.read()
text_splitter = CharacterTextSplitter(chunk_size=520, chunk_overlap=50)
docs = [Document(page_content=chunk) for chunk in text_splitter.split_text(lyrics_text)]
vectorstore = FAISS.from_documents(docs, globals.embeddings)
return vectorstore
miku_vectorstore = load_miku_knowledge()
miku_lyrics_vectorstore = load_miku_lyrics()

View File

@@ -0,0 +1,72 @@
# utils/image_handling.py
import aiohttp
import base64
import globals
from utils.core import switch_model
from utils.core import miku_vectorstore
async def download_and_encode_image(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
if resp.status != 200:
return None
img_bytes = await resp.read()
return base64.b64encode(img_bytes).decode('utf-8')
async def analyze_image_with_qwen(base64_img):
await switch_model("moondream")
payload = {
"model": "moondream",
"prompt": "Describe this image in detail.",
"images": [base64_img],
"stream": False
}
headers = {"Content-Type": "application/json"}
async with aiohttp.ClientSession() as session:
async with session.post(f"{globals.OLLAMA_URL}/api/generate", json=payload, headers=headers) as response:
if response.status == 200:
data = await response.json()
return data.get("response", "No description.")
else:
return f"Error: {response.status}"
async def rephrase_as_miku(qwen_output, user_prompt):
await switch_model(globals.OLLAMA_MODEL) # likely llama3
with open("miku_prompt.txt", "r", encoding="utf-8") as f:
system_prompt = f.read()
relevant_docs_lore = miku_vectorstore.similarity_search(qwen_output, k=3)
context = "\n\n".join([doc.page_content for doc in relevant_docs_lore])
full_prompt = (
f"{context}\n\n"
f"The user asked: \"{user_prompt}\"\n"
f"The image contains: \"{qwen_output}\"\n\n"
f"Respond like Miku: cheerful, helpful, and opinionated when asked.\n\n"
f"Miku is currently feeling: {globals.CURRENT_MOOD}\n Please respond in a way that reflects this emotional tone.\n\n"
f"Miku:"
)
payload = {
"model": globals.OLLAMA_MODEL,
"prompt": full_prompt,
"system": system_prompt,
"stream": False
}
headers = {"Content-Type": "application/json"}
async with aiohttp.ClientSession() as session:
async with session.post(f"{globals.OLLAMA_URL}/api/generate", json=payload, headers=headers) as response:
if response.status == 200:
data = await response.json()
return data.get("response", "No response.")
else:
return f"Error: {response.status}"

View File

@@ -0,0 +1,49 @@
# utils/kindness.py
import random
import globals
from utils.llm import query_ollama # Adjust path as needed
async def detect_and_react_to_kindness(message, after_reply=False):
if message.id in globals.kindness_reacted_messages:
return # Already reacted — skip
content = message.content.lower()
emoji = random.choice(globals.HEART_REACTIONS)
# 1. Keyword-based detection
if any(keyword in content for keyword in globals.KINDNESS_KEYWORDS):
try:
await message.add_reaction(emoji)
globals.kindness_reacted_messages.add(message.id)
message.kindness_reacted = True # Mark as done
print("✅ Kindness detected via keywords. Reacted immediately.")
except Exception as e:
print(f"⚠️ Error adding reaction: {e}")
return
# 2. If not after_reply, defer model-based check
if not after_reply:
print("🗝️ No kindness via keywords. Deferring...")
return
# 3. Model-based detection
try:
prompt = (
"The following message was sent to Miku the bot. "
"Does it sound like the user is being explicitly kind or affectionate toward Miku? "
"Answer with 'yes' or 'no' only.\n\n"
f"Message: \"{message.content}\""
)
result = await query_ollama(prompt, user_id="kindness-check")
if result.strip().lower().startswith("yes"):
await message.add_reaction(emoji)
globals.kindness_reacted_messages.add(message.id)
print("✅ Kindness detected via model. Reacted.")
else:
print("🧊 No kindness detected.")
except Exception as e:
print(f"⚠️ Error during kindness analysis: {e}")

View File

@@ -0,0 +1,70 @@
# utils/llm.py
import aiohttp
import datetime
import globals
from utils.core import miku_vectorstore, miku_lyrics_vectorstore
from collections import defaultdict
conversation_history = defaultdict(list)
async def query_ollama(user_prompt, user_id):
relevant_docs_lore = miku_vectorstore.similarity_search(user_prompt, k=3)
relevant_docs_lyrics = miku_lyrics_vectorstore.similarity_search(user_prompt, k=3)
context_lore = "\n".join([doc.page_content for doc in relevant_docs_lore])
context_lyrics = "\n".join([doc.page_content for doc in relevant_docs_lyrics])
combined_docs = relevant_docs_lore + relevant_docs_lyrics
context = "\n\n".join([doc.page_content for doc in combined_docs])
# Persona definition
with open("miku_prompt.txt", "r", encoding="utf-8") as f:
system_prompt = f.read()
# Append angry wake-up note if JUST_WOKEN_UP flag is set
if getattr(globals, 'JUST_WOKEN_UP', False):
system_prompt += (
"\n\n🚨IMPORTANT NOTE🚨: Miku was just rudely woken up by the user with the current prompt of theirs.\nShe is fuming with anger and should respond harshly and coldly.\nDo NOT show kindness or friendliness in this response. Feel free to put certain words in all capital letters to emphasize your frustration."
)
# Add angry wake-up context if in forced angry state
if globals.FORCED_ANGRY_UNTIL:
now = datetime.datetime.utcnow()
if now < globals.FORCED_ANGRY_UNTIL:
system_prompt += (
"\n\n[NOTE]: Miku is currently angry because she was rudely woken up from sleep by the user. "
"Her responses should reflect irritation and coldness towards the user."
)
# Build conversation history
history = list(globals.conversation_history.get(user_id, []))[-8:] # limit to last 8 exchanges
history_text = "\n".join([f"User: {u}\nMiku: {m}" for u, m in history])
# Combine prompt
full_prompt = (
f"{context_lore}\n\n{context_lyrics}\n\n"
f"{history_text}\nMiku is currently feeling: {globals.CURRENT_MOOD}\nPlease respond in a way that reflects this emotional tone.\nUser: {user_prompt}\nMiku:"
)
globals.LAST_FULL_PROMPT = full_prompt # ← track latest prompt
headers = {'Content-Type': 'application/json'}
payload = {
"model": globals.OLLAMA_MODEL,
"prompt": full_prompt,
"system": system_prompt,
"stream": False
}
async with aiohttp.ClientSession() as session:
async with session.post(f"{globals.OLLAMA_URL}/api/generate", json=payload, headers=headers) as response:
if response.status == 200:
data = await response.json()
reply = data.get("response", "No response.")
# Save to conversation history
globals.conversation_history[user_id].append((user_prompt, reply))
return reply
else:
return f"Error: {response.status}"

View File

@@ -0,0 +1,70 @@
# utils/media.py
import subprocess
async def overlay_username_with_ffmpeg(base_video_path, output_path, username):
font_path = "/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf"
text = f"@{username}"
# Define your six positions (x, y)
positions = {
1: ("250", "370"),
2: ("330", "130"),
3: ("300", "90"),
4: ("380", "180"),
5: ("365", "215"),
6: ("55", "365"),
7: ("290", "130"),
8: ("320", "210"),
9: ("310", "240"),
10: ("400", "240")
}
# Each entry: (start_time, end_time, position_index)
text_entries = [
(4.767, 5.367, 1, "username"),
(5.4, 5.967, 2, "username"),
(6.233, 6.833, 3, "username"),
(6.967, 7.6, 4, "username"),
(7.733, 8.367, 5, "username"),
(8.667, 9.133, 6, "username"),
(9.733, 10.667, 7, "username"),
(11.6, 12.033, 8, "@everyone"),
(12.067, 13.0, 9, "@everyone"),
(13.033, 14.135, 10, "@everyone"),
]
# Build drawtext filters
drawtext_filters = []
for start, end, pos_id, text_type in text_entries:
x_coord, y_coord = positions[pos_id]
# Determine actual text content
text_content = f"@{username}" if text_type == "username" else text_type
x = f"{x_coord} - text_w/2"
y = f"{y_coord} - text_h/2"
filter_str = (
f"drawtext=text='{text_content}':"
f"fontfile='{font_path}':"
f"fontcolor=black:fontsize=30:x={x}:y={y}:"
f"enable='between(t,{start},{end})'"
)
drawtext_filters.append(filter_str)
vf_string = ",".join(drawtext_filters)
ffmpeg_command = [
"ffmpeg",
"-i", base_video_path,
"-vf", vf_string,
"-codec:a", "copy",
output_path
]
try:
subprocess.run(ffmpeg_command, check=True)
print("✅ Video processed successfully with username overlays.")
except subprocess.CalledProcessError as e:
print(f"⚠️ FFmpeg error: {e}")

View File

@@ -0,0 +1,169 @@
# utils/moods.py
import random
import discord
import os
import asyncio
from discord.ext import tasks
import globals
MOOD_EMOJIS = {
"asleep": "💤",
"neutral": "",
"bubbly": "🫧",
"sleepy": "🌙",
"curious": "👀",
"shy": "👉👈",
"serious": "👔",
"excited": "",
"melancholy": "🍷",
"flirty": "🫦",
"romantic": "💌",
"irritated": "😒",
"angry": "💢",
"silly": "🪿"
}
def load_mood_description(mood_name: str) -> str:
path = os.path.join("moods", f"{mood_name}.txt")
try:
with open(path, "r", encoding="utf-8") as f:
return f.read().strip()
except FileNotFoundError:
print(f"⚠️ Mood file '{mood_name}' not found. Falling back to default.")
return load_mood_description("neutral")
def detect_mood_shift(response_text):
mood_keywords = {
"asleep": [
"good night", "goodnight", "sweet dreams", "going to bed", "I will go to bed", "zzz~", "sleep tight"
],
"neutral": [
"okay", "sure", "alright", "i see", "understood", "hmm",
"sounds good", "makes sense", "alrighty", "fine", "got it"
],
"bubbly": [
"so excited", "feeling bubbly", "super cheerful", "yay!", "", "nya~",
"kyaa~", "heehee", "bouncy", "so much fun", "im glowing!", "nee~", "teehee", "I'm so happy"
],
"sleepy": [
"i'm sleepy", "getting tired", "yawn", "so cozy", "zzz", "nap time",
"just five more minutes", "snooze", "cuddle up", "dozing off", "so warm"
],
"curious": [
"i'm curious", "want to know more", "why?", "hmm?", "tell me more", "interesting!",
"whats that?", "how does it work?", "i wonder", "fascinating", "??", "🧐", "👀", "🤔"
],
"shy": [
"um...", "sorry if that was weird", "im kind of shy", "eep", "i hope thats okay", "im nervous",
"blushes", "oh no", "hiding face", "i dont know what to say", "heh...", "/////"
],
"serious": [
"lets be serious", "focus on the topic", "this is important", "i mean it", "be honest",
"we need to talk", "listen carefully", "lets not joke", "truthfully", "lets be real"
],
"excited": [
"OMG", "this is amazing", "im so hyped", "YAY!", "lets go!", "incredible!!!",
"AHHH!", "best day ever", "this is it!", "totally pumped", "i cant wait", "🔥🔥🔥", "i'm excited", "Wahaha"
],
"melancholy": [
"feeling nostalgic", "kind of sad", "just thinking a lot", "like rain on glass", "memories",
"bittersweet", "sigh", "quiet day", "blue vibes", "longing", "melancholy", "softly"
],
"flirty": [
"hey cutie", "arent you sweet", "teasing you~", "wink wink", "is that a blush?", "giggle~",
"come closer", "miss me?", "you like that, huh?", "🥰", "flirt mode activated", "youre kinda cute"
],
"romantic": [
"you mean a lot to me", "my heart", "i adore you", "so beautiful", "so close", "love letter",
"my dearest", "forever yours", "im falling for you", "sweetheart", "💖", "you're my everything"
],
"irritated": [
"ugh", "seriously?", "can we not", "whatever", "i'm annoyed", "you dont get it",
"rolling my eyes", "why do i even bother", "ugh, again?", "🙄", "dont start", "this again?"
],
"angry": [
"stop it", "enough!", "thats not okay", "im mad", "i said no", "dont push me",
"you crossed the line", "furious", "this is unacceptable", "😠", "im done", "dont test me"
],
"silly": [
"lol", "lmao", "silly", "hahaha", "goofy", "quack", "honk", "random", "what is happening", "nonsense", "😆", "🤣", "😂", "😄", "🐔", "🪿"
]
}
for mood, phrases in mood_keywords.items():
if mood == "asleep" and globals.CURRENT_MOOD_NAME != "sleepy":
print(f"❎ Mood 'asleep' skipped - mood isn't 'sleepy', it's '{globals.CURRENT_MOOD_NAME}'")
continue # Only allow transition to asleep from sleepy
for phrase in phrases:
if phrase.lower() in response_text.lower():
print(f"*️⃣ Mood keyword triggered: {phrase}")
return mood
return None
async def set_sleep_state(sleeping: bool):
await globals.client.change_presence(status=discord.Status.invisible) if sleeping else await globals.client.change_presence(status=discord.Status.online)
await nickname_mood_emoji()
async def nickname_mood_emoji():
mood = globals.CURRENT_MOOD_NAME.lower()
print(f"🔍 Mood is: {mood}")
emoji = MOOD_EMOJIS.get(mood, "")
nickname = f"Hatsune Miku{emoji}"
for guild in globals.client.guilds:
me = guild.get_member(globals.BOT_USER.id)
if me is not None:
try:
await me.edit(nick=nickname)
print(f"💱 Changed nickname to {nickname}")
if mood == "asleep":
await globals.client.change_presence(status=discord.Status.invisible)
else:
await globals.client.change_presence(status=discord.Status.online)
except discord.Forbidden:
print(f"⚠️ Missing permission to change nickname in guild: {guild.name}")
except discord.HTTPException as e:
print(f"⚠️ Failed to change nickname in {guild.name}: {e}")
async def clear_angry_mood_after_delay():
await asyncio.sleep(40 * 60) # 40 minutes
print("🕒 Angry mood cooldown expired. Miku is calming down to neutral.")
globals.CURRENT_MOOD_NAME = "neutral"
globals.CURRENT_MOOD = load_mood_description("neutral")
globals.FORCED_ANGRY_UNTIL = None
await nickname_mood_emoji()
@tasks.loop(hours=1)
async def rotate_mood():
try:
print("🔁 Mood rotation task running...")
if globals.FORCED_ANGRY_UNTIL:
now = datetime.datetime.utcnow()
if now < globals.FORCED_ANGRY_UNTIL:
print("⏰ Mood rotation skipped (angry mode).")
return
else:
globals.FORCED_ANGRY_UNTIL = None
old_mood_name = globals.CURRENT_MOOD_NAME
new_mood_name = old_mood_name
attempts = 0
while new_mood_name == old_mood_name and attempts < 5:
new_mood_name = random.choice(globals.AVAILABLE_MOODS)
attempts += 1
globals.CURRENT_MOOD_NAME = new_mood_name
globals.CURRENT_MOOD = load_mood_description(new_mood_name)
print(f"⏰ Mood auto-rotated to: {new_mood_name}")
await nickname_mood_emoji()
except Exception as e:
print(f"❌ Exception in rotate_mood: {e}")

View File

@@ -0,0 +1,159 @@
# utils/scheduled.py
import random
import json
import os
import time
from datetime import datetime, timedelta
from apscheduler.triggers.date import DateTrigger
from discord import Status, ActivityType
import globals
from utils.llm import query_ollama
from utils.core import switch_model # If you moved switch_model into a separate utils file
from globals import scheduler
BEDTIME_TRACKING_FILE = "last_bedtime_targets.json"
async def send_monday_video():
await switch_model(globals.OLLAMA_MODEL)
# Generate a motivational message
prompt = "It's Miku Monday! Give me an energetic and heartfelt Miku Monday morning message to inspire someone for the week ahead."
response = await query_ollama(prompt, user_id="weekly-motivation")
video_url = "http://zip.koko210cloud.xyz/u/zEgU7Z.mp4"
target_channel_ids = [
761014220707332107,
1140377617237807266
]
for channel_id in target_channel_ids:
channel = globals.client.get_channel(channel_id)
if channel is None:
print(f"❌ Could not find channel with ID {channel_id}. Make sure the bot is in the server.")
return
try:
await channel.send(content=response)
# Send video link
await channel.send(f"[Happy Miku Monday!]({video_url})")
print(f"✅ Sent Monday video to channel ID {channel_id}")
except Exception as e:
print(f"⚠️ Failed to send video to channel ID {channel_id}: {e}")
def load_last_bedtime_targets():
if not os.path.exists(BEDTIME_TRACKING_FILE):
return {}
try:
with open(BEDTIME_TRACKING_FILE, "r") as f:
return json.load(f)
except Exception as e:
print(f"⚠️ Failed to load bedtime tracking file: {e}")
return {}
_last_bedtime_targets = load_last_bedtime_targets()
def save_last_bedtime_targets(data):
try:
with open(BEDTIME_TRACKING_FILE, "w") as f:
json.dump(data, f)
except Exception as e:
print(f"⚠️ Failed to save bedtime tracking file: {e}")
async def send_bedtime_reminder():
await switch_model(globals.OLLAMA_MODEL)
for channel_id in globals.BEDTIME_CHANNEL_IDS:
channel = globals.client.get_channel(channel_id)
if not channel:
print(f"⚠️ Channel ID {channel_id} not found.")
continue
guild = channel.guild
# Filter online members (excluding bots)
online_members = [
member for member in guild.members
if member.status in {Status.online, Status.idle, Status.dnd}
and not member.bot
]
specific_user_id = 214857593045254151 # target user ID
specific_user = guild.get_member(specific_user_id)
if specific_user and specific_user not in online_members:
online_members.append(specific_user)
if not online_members:
print(f"😴 No online members to ping in {guild.name}")
continue
# Avoid repeating the same person unless they're the only one
last_target_id = _last_bedtime_targets.get(str(guild.id))
eligible_members = [m for m in online_members if m.id != last_target_id]
if not eligible_members:
eligible_members = online_members # fallback if only one user
chosen_one = random.choice(online_members)
# 🎯 Status-aware phrasing
status_map = {
Status.online: "",
Status.idle: "Be sure to include the following information on their status too: Their profile status is currently idle. This implies they're not on their computer now, but are still awake.",
Status.dnd: "Be sure to include the following information on their status too: Their current profile status is 'Do Not Disturb.' This implies they are very absorbed in what they're doing. But it's still important for them to know when to stop for the day and get some sleep, right?",
Status.offline: "Be sure to include the following information on their status too: Their profile status is currently offline, but is it really? It's very likely they've just set it to invisible to avoid being seen that they're staying up so late!"
}
status_note = status_map.get(chosen_one.status, "")
# 🎮 Activity-aware phrasing
activity_note = ""
if chosen_one.activities:
for activity in chosen_one.activities:
if activity.type == ActivityType.playing:
activity_note = f"You should also include the following information on their current activity on their profile too: They are playing **{activity.name}** right now. It's getting late, though. Maybe it's time to pause, leave the rest of the game for tomorrow and rest..."
break
elif activity.type == ActivityType.streaming:
activity_note = f"You should also include the following information on their current activity on their profile too: They are steaming **{activity.name}** at this hour? They should know it's getting way too late for streams."
break
elif activity.type == ActivityType.watching:
activity_note = f"You should also include the following information on their current activity on their profile too: They are watching **{activity.name}** right now. That's cozy, but it's not good to binge so late."
break
elif activity.type == ActivityType.listening:
activity_note = f"You should also include the following information on their current activity on their profile too: They are listening to **{activity.name}** right now. Sounds like they're better off putting appropriate music to fall asleep to."
break
# Generate bedtime message
prompt = (
f"Write a sweet, funny, or encouraging bedtime message to remind someone it's getting late and they should sleep. "
f"Make it short and wholesome, as if Miku is genuinely worried about their well-being. Imply that it's not good staying up so late."
f"{status_note}"
f"{activity_note}"
f"Miku is currently feeling: {globals.CURRENT_MOOD}\nPlease word in a way that reflects this emotional tone."
)
bedtime_message = await query_ollama(prompt, user_id=f"bedtime-miku-{int(time.time())}")
try:
await channel.send(f"{chosen_one.mention}, {bedtime_message}")
print(f"🌙 Sent bedtime reminder to {chosen_one.display_name} in {guild.name}")
# Save for next run
_last_bedtime_targets[str(guild.id)] = chosen_one.id
save_last_bedtime_targets(_last_bedtime_targets)
except Exception as e:
print(f"⚠️ Failed to send bedtime reminder in {guild.name}: {e}")
def schedule_random_bedtime():
now = datetime.now()
target_time = now.replace(hour=20, minute=30, second=0, microsecond=0)
# If it's already past 23:30 today, schedule for tomorrow
if now > target_time:
target_time += timedelta(days=1)
# Add random offset (029 mins)
offset_minutes = random.randint(0, 29)
run_time = target_time + timedelta(minutes=offset_minutes)
scheduler.add_job(send_bedtime_reminder, trigger=DateTrigger(run_date=run_time))
print(f"⏰ Bedtime reminder scheduled for {run_time.strftime('%Y-%m-%d %H:%M:%S')}")

View File

@@ -0,0 +1,88 @@
# utils/twitter_fetcher.py
import asyncio
import json
from twscrape import API, gather, Account
from playwright.async_api import async_playwright
from pathlib import Path
COOKIE_PATH = Path(__file__).parent / "x.com.cookies.json"
async def extract_media_urls(page, tweet_url):
print(f"🔍 Visiting tweet page: {tweet_url}")
try:
await page.goto(tweet_url, timeout=15000)
await page.wait_for_timeout(1000)
media_elements = await page.query_selector_all("img[src*='pbs.twimg.com/media']")
urls = set()
for element in media_elements:
src = await element.get_attribute("src")
if src:
cleaned = src.split("&name=")[0] + "&name=large"
urls.add(cleaned)
print(f"🖼️ Found {len(urls)} media URLs on tweet: {tweet_url}")
return list(urls)
except Exception as e:
print(f"❌ Playwright error on {tweet_url}: {e}")
return []
async def fetch_miku_tweets(limit=5):
# Load cookies from JSON file
with open(COOKIE_PATH, "r", encoding="utf-8") as f:
cookie_list = json.load(f)
cookie_header = "; ".join(f"{c['name']}={c['value']}" for c in cookie_list)
# Add the account to twscrape
api = API()
await api.pool.add_account(
username="HSankyuu39",
password="x", # placeholder (won't be used)
email="x", # optional
email_password="x", # optional
cookies=cookie_header
)
await api.pool.login_all()
print(f"🔎 Searching for Miku tweets (limit={limit})...")
query = 'Hatsune Miku OR 初音ミク has:images after:2025'
tweets = await gather(api.search(query, limit=limit, kv={"product": "Top"}))
print(f"📄 Found {len(tweets)} tweets, launching browser...")
async with async_playwright() as p:
browser = await p.firefox.launch(headless=True)
context = await browser.new_context()
await context.route("**/*", lambda route, request: (
route.abort() if any([
request.resource_type in ["font", "stylesheet"],
"analytics" in request.url,
"googletagmanager" in request.url,
"ads-twitter" in request.url,
]) else route.continue_()
))
page = await context.new_page()
results = []
for i, tweet in enumerate(tweets, 1):
username = tweet.user.username
tweet_url = f"https://twitter.com/{username}/status/{tweet.id}"
print(f"🧵 Processing tweet {i}/{len(tweets)} from @{username}")
media_urls = await extract_media_urls(page, tweet_url)
if media_urls:
results.append({
"username": username,
"text": tweet.rawContent,
"url": tweet_url,
"media": media_urls
})
await browser.close()
print(f"✅ Finished! Returning {len(results)} tweet(s) with media.")
return results

View File

@@ -0,0 +1,93 @@
[
{
"name": "guest_id",
"value": "v1%3A175335261565935646",
"domain": ".x.com",
"path": "/",
"expires": 1787567015,
"httpOnly": false,
"secure": true
},
{
"name": "__cf_bm",
"value": "peEr.Nm4OW1emOL5NdT16m6HD2VYwawwJujiqUudNJQ-1753352615-1.0.1.1-3IXQhpRSENb_iuyW8ewWbWeJasGBdhWik64PysrppjGxQNRuu.JHvBCIoHRPyKrWhi6fCuI9zSejV_ssEhzXxLoIX2P5RQL09I.u5bMWcJc",
"domain": ".x.com",
"path": "/",
"expires": 1753354415,
"httpOnly": true,
"secure": true
},
{
"name": "gt",
"value": "1948328199806390440",
"domain": ".x.com",
"path": "/",
"expires": 1753361615,
"httpOnly": false,
"secure": true
},
{
"name": "kdt",
"value": "e77B2PlTfQgzp1DPppkCiycs1TwUTQy1Q40922K3",
"domain": ".x.com",
"path": "/",
"expires": 1787567165,
"httpOnly": true,
"secure": true
},
{
"name": "twid",
"value": "u%3D1947614492390563840",
"domain": ".x.com",
"path": "/",
"expires": 1784888769,
"httpOnly": false,
"secure": true
},
{
"name": "ct0",
"value": "50d81af17e7d6a888f39bb541f60faf03975906d7286f7ff0591508aaf4a3bc9b4c74b9cec8b2742d36820c83d91733d5fbf67003dbf012dea1eee28a43087ea9a2b8b741a10475db90a53a009b3ed4d",
"domain": ".x.com",
"path": "/",
"expires": 1787567166,
"httpOnly": false,
"secure": true,
"sameSite": "Lax"
},
{
"name": "auth_token",
"value": "dcf6988e914fb6dc212e7f7b4fc53001eadd41ef",
"domain": ".x.com",
"path": "/",
"expires": 1787567165,
"httpOnly": true,
"secure": true
},
{
"name": "att",
"value": "1-5m5mkN7tHzFQpOxdhPj2WGwFxnj3UQVgEXJ3iuNg",
"domain": ".x.com",
"path": "/",
"expires": 1753439167,
"httpOnly": true,
"secure": true
},
{
"name": "lang",
"value": "en",
"domain": "x.com",
"path": "/",
"expires": -1,
"httpOnly": false,
"secure": false
},
{
"name": "d_prefs",
"value": "MjoxLGNvbnNlbnRfdmVyc2lvbjoyLHRleHRfdmVyc2lvbjoxMDAw",
"domain": ".x.com",
"path": "/",
"expires": 1768904770,
"httpOnly": false,
"secure": true
}
]