Remove all Ollama remnants and complete migration to llama.cpp
- Remove Ollama-specific files (Dockerfile.ollama, entrypoint.sh) - Replace all query_ollama imports and calls with query_llama - Remove langchain-ollama dependency from requirements.txt - Update all utility files (autonomous, kindness, image_generation, etc.) - Update README.md documentation references - Maintain backward compatibility alias in llm.py
This commit is contained in:
@@ -9,7 +9,7 @@ from datetime import datetime, timedelta
|
||||
from typing import List, Dict, Optional
|
||||
import discord
|
||||
import globals
|
||||
from utils.llm import query_ollama
|
||||
from utils.llm import query_llama
|
||||
from utils.dm_logger import dm_logger
|
||||
|
||||
# Directories
|
||||
@@ -167,7 +167,7 @@ Respond ONLY with the JSON object, no other text."""
|
||||
|
||||
# Query the LLM
|
||||
try:
|
||||
response = await query_ollama(
|
||||
response = await query_llama(
|
||||
analysis_prompt,
|
||||
user_id=f"analyzer-{user_id}",
|
||||
guild_id=None,
|
||||
|
||||
Reference in New Issue
Block a user