Remove all Ollama remnants and complete migration to llama.cpp
- Remove Ollama-specific files (Dockerfile.ollama, entrypoint.sh) - Replace all query_ollama imports and calls with query_llama - Remove langchain-ollama dependency from requirements.txt - Update all utility files (autonomous, kindness, image_generation, etc.) - Update README.md documentation references - Maintain backward compatibility alias in llm.py
This commit is contained in:
@@ -11,7 +11,7 @@ from discord import Status, ActivityType
|
||||
|
||||
import globals
|
||||
from server_manager import server_manager
|
||||
from utils.llm import query_ollama
|
||||
from utils.llm import query_llama
|
||||
from utils.dm_interaction_analyzer import dm_analyzer
|
||||
|
||||
BEDTIME_TRACKING_FILE = "last_bedtime_targets.json"
|
||||
@@ -27,7 +27,7 @@ async def send_monday_video_for_server(guild_id: int):
|
||||
|
||||
# Generate a motivational message
|
||||
prompt = "It's Miku Monday! Give me an energetic and heartfelt Miku Monday morning message to inspire someone for the week ahead."
|
||||
response = await query_ollama(prompt, user_id=f"weekly-motivation-{guild_id}", guild_id=guild_id)
|
||||
response = await query_llama(prompt, user_id=f"weekly-motivation-{guild_id}", guild_id=guild_id)
|
||||
|
||||
video_url = "http://zip.koko210cloud.xyz/u/zEgU7Z.mp4"
|
||||
|
||||
@@ -158,7 +158,7 @@ async def send_bedtime_reminder_for_server(guild_id: int, client=None):
|
||||
f"Miku is currently feeling: {server_config.current_mood_description or 'neutral'}\nPlease word in a way that reflects this emotional tone."
|
||||
)
|
||||
|
||||
bedtime_message = await query_ollama(prompt, user_id=f"bedtime-{guild_id}", guild_id=guild_id)
|
||||
bedtime_message = await query_llama(prompt, user_id=f"bedtime-{guild_id}", guild_id=guild_id)
|
||||
|
||||
try:
|
||||
await channel.send(f"{chosen_one.mention} {bedtime_message}")
|
||||
|
||||
Reference in New Issue
Block a user