Spaces:
Sleeping
Sleeping
File size: 18,050 Bytes
e889bb7 f3b6eeb 03a2414 f3b6eeb 03a2414 e889bb7 03a2414 e889bb7 f3b6eeb e889bb7 03a2414 e889bb7 03a2414 e889bb7 03a2414 e889bb7 f3b6eeb e889bb7 03a2414 e889bb7 03a2414 e889bb7 f3b6eeb 03a2414 f3b6eeb e889bb7 03a2414 13f9441 c9ee5e6 ef71876 e889bb7 ef71876 13f9441 03a2414 e889bb7 03a2414 e889bb7 c9ee5e6 e889bb7 13f9441 e889bb7 03a2414 e889bb7 03a2414 ae3c8d7 c9ee5e6 13f9441 e889bb7 13f9441 e889bb7 13f9441 e889bb7 13f9441 03a2414 13f9441 c9ee5e6 13f9441 03a2414 e889bb7 13f9441 03a2414 13f9441 e889bb7 13f9441 03a2414 13f9441 03a2414 13f9441 ae3c8d7 13f9441 03a2414 e889bb7 03a2414 13f9441 e889bb7 03a2414 e889bb7 13f9441 e889bb7 13f9441 ae3c8d7 03a2414 13f9441 f3b6eeb 03a2414 13f9441 e889bb7 13f9441 03a2414 13f9441 ae3c8d7 13f9441 03a2414 e889bb7 13f9441 e889bb7 13f9441 e889bb7 13f9441 e889bb7 13f9441 e889bb7 f3b6eeb e889bb7 03a2414 f3b6eeb 13f9441 e889bb7 03a2414 13f9441 03a2414 13f9441 03a2414 13f9441 f3b6eeb 03a2414 13f9441 e889bb7 03a2414 13f9441 03a2414 13f9441 e889bb7 f3b6eeb e889bb7 13f9441 e889bb7 13f9441 e889bb7 13f9441 e889bb7 f3b6eeb ef71876 13f9441 ef71876 ae3c8d7 ef71876 13f9441 ef71876 13f9441 ef71876 e889bb7 03a2414 13f9441 ef71876 e889bb7 13f9441 e889bb7 ef71876 e889bb7 13f9441 ef71876 13f9441 ef71876 13f9441 ef71876 13f9441 ef71876 13f9441 ef71876 13f9441 ef71876 13f9441 ef71876 13f9441 ef71876 13f9441 ef71876 13f9441 ef71876 e889bb7 13f9441 e889bb7 13f9441 e889bb7 13f9441 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 |
# memory_logic.py
import os
import json
import time
from datetime import datetime
import logging
import re
import threading
# Conditionally import heavy dependencies
try:
from sentence_transformers import SentenceTransformer
import faiss
import numpy as np
except ImportError:
SentenceTransformer, faiss, np = None, None, None
logging.warning("SentenceTransformers, FAISS, or NumPy not installed. Semantic search will be unavailable.")
try:
import sqlite3
except ImportError:
sqlite3 = None
logging.warning("sqlite3 module not available. SQLite backend will be unavailable.")
try:
from datasets import load_dataset, Dataset
except ImportError:
load_dataset, Dataset = None, None
logging.warning("datasets library not installed. Hugging Face Dataset backend will be unavailable.")
logger = logging.getLogger(__name__)
# Suppress verbose logs from dependencies
for lib_name in ["sentence_transformers", "faiss", "datasets", "huggingface_hub"]:
if logging.getLogger(lib_name): # Check if logger exists
logging.getLogger(lib_name).setLevel(logging.WARNING)
# --- Configuration (Read directly from environment variables) ---
STORAGE_BACKEND = os.getenv("STORAGE_BACKEND", "HF_DATASET").upper() #HF_DATASET, RAM, SQLITE
SQLITE_DB_PATH = os.getenv("SQLITE_DB_PATH", "app_data/ai_memory.db") # Changed default path
HF_TOKEN = os.getenv("HF_TOKEN")
HF_MEMORY_DATASET_REPO = os.getenv("HF_MEMORY_DATASET_REPO", "broadfield-dev/ai-brain") # Example
HF_RULES_DATASET_REPO = os.getenv("HF_RULES_DATASET_REPO", "broadfield-dev/ai-rules") # Example
# --- Globals for RAG within this module ---
_embedder = None
_dimension = 384 # Default, will be set by embedder
_faiss_memory_index = None
_memory_items_list = [] # Stores JSON strings of memory objects for RAM, or loaded from DB/HF
_faiss_rules_index = None
_rules_items_list = [] # Stores rule text strings
_initialized = False
_init_lock = threading.Lock()
# --- Helper: SQLite Connection ---
def _get_sqlite_connection():
if not sqlite3:
raise ImportError("sqlite3 module is required for SQLite backend but not found.")
db_dir = os.path.dirname(SQLITE_DB_PATH)
if db_dir and not os.path.exists(db_dir):
os.makedirs(db_dir, exist_ok=True)
return sqlite3.connect(SQLITE_DB_PATH, timeout=10)
def _init_sqlite_tables():
if STORAGE_BACKEND != "SQLITE" or not sqlite3:
return
try:
with _get_sqlite_connection() as conn:
cursor = conn.cursor()
cursor.execute("""
CREATE TABLE IF NOT EXISTS memories (
id INTEGER PRIMARY KEY AUTOINCREMENT,
memory_json TEXT NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS rules (
id INTEGER PRIMARY KEY AUTOINCREMENT,
rule_text TEXT NOT NULL UNIQUE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
conn.commit()
logger.info("SQLite tables for memories and rules checked/created.")
except Exception as e:
logger.error(f"SQLite table initialization error: {e}", exc_info=True)
def _build_faiss_index(items_list, text_extraction_fn):
"""Builds a FAISS index from a list of items."""
if not _embedder:
logger.error("Cannot build FAISS index: Embedder not available.")
return None
index = faiss.IndexFlatL2(_dimension)
if not items_list:
return index
logger.info(f"Building FAISS index for {len(items_list)} items...")
texts_to_embed = [text_extraction_fn(item) for item in items_list]
try:
embeddings = _embedder.encode(texts_to_embed, convert_to_tensor=False, show_progress_bar=False)
embeddings_np = np.array(embeddings, dtype=np.float32)
if embeddings_np.ndim == 2 and embeddings_np.shape[0] == len(items_list):
index.add(embeddings_np)
logger.info(f"FAISS index built successfully with {index.ntotal} items.")
else:
logger.error(f"FAISS build failed: Embeddings shape error. Expected ({len(items_list)}, {_dimension}), Got {getattr(embeddings_np, 'shape', 'N/A')}")
return faiss.IndexFlatL2(_dimension) # Return empty index on failure
except Exception as e:
logger.error(f"Error building FAISS index: {e}", exc_info=True)
return faiss.IndexFlatL2(_dimension) # Return empty index on failure
return index
# --- Initialization ---
def initialize_memory_system():
global _initialized, _embedder, _dimension, _faiss_memory_index, _memory_items_list, _faiss_rules_index, _rules_items_list
with _init_lock:
if _initialized:
return
logger.info(f"Initializing memory system with backend: {STORAGE_BACKEND}")
init_start_time = time.time()
if not SentenceTransformer or not faiss or not np:
logger.error("Core RAG libraries not available. Cannot initialize semantic memory.")
return
if not _embedder:
try:
logger.info("Loading SentenceTransformer model (all-MiniLM-L6-v2)...")
_embedder = SentenceTransformer('all-MiniLM-L6-v2', cache_folder="./sentence_transformer_cache")
_dimension = _embedder.get_sentence_embedding_dimension() or 384
except Exception as e:
logger.critical(f"FATAL: Error loading SentenceTransformer: {e}", exc_info=True)
return
if STORAGE_BACKEND == "SQLITE": _init_sqlite_tables()
# Load Memories from persistent storage
temp_memories_json = []
if STORAGE_BACKEND == "SQLITE":
try: temp_memories_json = [row[0] for row in _get_sqlite_connection().execute("SELECT memory_json FROM memories")]
except Exception as e: logger.error(f"Error loading memories from SQLite: {e}")
elif STORAGE_BACKEND == "HF_DATASET":
try:
logger.info(f"Loading memories from HF Dataset: {HF_MEMORY_DATASET_REPO}")
dataset = load_dataset(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, trust_remote_code=True)
if "train" in dataset and "memory_json" in dataset["train"].column_names:
temp_memories_json = [m for m in dataset["train"]["memory_json"] if isinstance(m, str) and m.strip()]
logger.info(f"Loaded {len(temp_memories_json)} valid memories from HF Dataset.")
else: logger.warning(f"HF Dataset {HF_MEMORY_DATASET_REPO} has no 'train' split or 'memory_json' column.")
except Exception as e: logger.error(f"Error loading memories from HF Dataset: {e}", exc_info=True)
_memory_items_list = temp_memories_json
# Build Memory FAISS Index
_faiss_memory_index = _build_faiss_index(
_memory_items_list,
lambda m: f"User: {json.loads(m).get('user_input', '')}\nAI: {json.loads(m).get('bot_response', '')}\nTakeaway: {json.loads(m).get('metrics', {}).get('takeaway', 'N/A')}"
)
# Load Rules from persistent storage
temp_rules_text = []
if STORAGE_BACKEND == "SQLITE":
try: temp_rules_text = [row[0] for row in _get_sqlite_connection().execute("SELECT rule_text FROM rules")]
except Exception as e: logger.error(f"Error loading rules from SQLite: {e}")
elif STORAGE_BACKEND == "HF_DATASET":
try:
logger.info(f"Loading rules from HF Dataset: {HF_RULES_DATASET_REPO}")
dataset = load_dataset(HF_RULES_DATASET_REPO, token=HF_TOKEN, trust_remote_code=True)
if "train" in dataset and "rule_text" in dataset["train"].column_names:
temp_rules_text = [r for r in dataset["train"]["rule_text"] if isinstance(r, str) and r.strip()]
logger.info(f"Loaded {len(temp_rules_text)} valid rules from HF Dataset.")
else: logger.warning(f"HF Dataset {HF_RULES_DATASET_REPO} has no 'train' split or 'rule_text' column.")
except Exception as e: logger.error(f"Error loading rules from HF Dataset: {e}", exc_info=True)
_rules_items_list = sorted(list(set(temp_rules_text)))
# Build Rules FAISS Index
_faiss_rules_index = _build_faiss_index(_rules_items_list, lambda r: r)
_initialized = True
logger.info(f"Memory system initialization complete in {time.time() - init_start_time:.2f}s")
def _verify_and_rebuild_if_needed(index, items_list, text_extraction_fn):
"""Self-healing function to ensure FAISS index is synced with the item list."""
if not index or index.ntotal != len(items_list):
logger.warning(
f"FAISS index mismatch detected (Index: {index.ntotal if index else 'None'}, List: {len(items_list)}). "
"Rebuilding index from in-memory cache."
)
return _build_faiss_index(items_list, text_extraction_fn)
return index
# --- Memory Operations (Semantic) ---
def add_memory_entry(user_input: str, metrics: dict, bot_response: str) -> tuple[bool, str]:
global _memory_items_list, _faiss_memory_index
if not _initialized: initialize_memory_system()
if not _embedder or not _faiss_memory_index:
return False, "Memory system not ready for adding entries."
memory_obj = {"user_input": user_input, "metrics": metrics, "bot_response": bot_response, "timestamp": datetime.utcnow().isoformat()}
memory_json_str = json.dumps(memory_obj)
text_to_embed = f"User: {user_input}\nAI: {bot_response}\nTakeaway: {metrics.get('takeaway', 'N/A')}"
try:
embedding = _embedder.encode([text_to_embed], convert_to_tensor=False)
embedding_np = np.array(embedding, dtype=np.float32)
_faiss_memory_index.add(embedding_np)
_memory_items_list.append(memory_json_str)
if STORAGE_BACKEND == "SQLITE":
with _get_sqlite_connection() as conn:
conn.execute("INSERT INTO memories (memory_json) VALUES (?)", (memory_json_str,)); conn.commit()
elif STORAGE_BACKEND == "HF_DATASET":
Dataset.from_dict({"memory_json": list(_memory_items_list)}).push_to_hub(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, private=True)
logger.info(f"Added memory. Cache size: {len(_memory_items_list)}, FAISS size: {_faiss_memory_index.ntotal}")
return True, "Memory added successfully."
except Exception as e:
logger.error(f"Error adding memory entry: {e}", exc_info=True)
return False, f"Error adding memory: {e}"
def retrieve_memories_semantic(query: str, k: int = 3) -> list[dict]:
global _faiss_memory_index
if not _initialized: initialize_memory_system()
# Self-healing: Verify index is synced with cache, rebuild if not.
_faiss_memory_index = _verify_and_rebuild_if_needed(
_faiss_memory_index, _memory_items_list,
lambda m: f"User: {json.loads(m).get('user_input', '')}\nAI: {json.loads(m).get('bot_response', '')}\nTakeaway: {json.loads(m).get('metrics', {}).get('takeaway', 'N/A')}"
)
if not _faiss_memory_index or _faiss_memory_index.ntotal == 0:
logger.debug("Cannot retrieve memories: index is empty.")
return []
try:
query_embedding = _embedder.encode([query], convert_to_tensor=False)
query_embedding_np = np.array(query_embedding, dtype=np.float32)
distances, indices = _faiss_memory_index.search(query_embedding_np, min(k, _faiss_memory_index.ntotal))
results = [json.loads(_memory_items_list[i]) for i in indices[0] if 0 <= i < len(_memory_items_list)]
logger.info(f"Retrieved {len(results)} memories for query: '{query[:50]}...'")
return results
except Exception as e:
logger.error(f"Error retrieving memories semantically: {e}", exc_info=True)
return []
# --- Rule (Insight) Operations (Semantic) ---
def add_rule_entry(rule_text: str) -> tuple[bool, str]:
global _rules_items_list, _faiss_rules_index
if not _initialized: initialize_memory_system()
rule_text = rule_text.strip()
if not rule_text or "duplicate" == rule_text or rule_text in _rules_items_list:
return False, "duplicate or invalid"
if not re.match(r"\[(CORE_RULE|RESPONSE_PRINCIPLE|BEHAVIORAL_ADJUSTMENT|GENERAL_LEARNING)\|([\d\.]+?)\]", rule_text, re.I):
return False, "Invalid rule format."
try:
embedding = _embedder.encode([rule_text], convert_to_tensor=False)
embedding_np = np.array(embedding, dtype=np.float32)
_faiss_rules_index.add(embedding_np)
_rules_items_list.append(rule_text)
_rules_items_list.sort()
if STORAGE_BACKEND == "SQLITE":
with _get_sqlite_connection() as conn:
conn.execute("INSERT OR IGNORE INTO rules (rule_text) VALUES (?)", (rule_text,)); conn.commit()
elif STORAGE_BACKEND == "HF_DATASET":
Dataset.from_dict({"rule_text": list(_rules_items_list)}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)
return True, "Rule added successfully."
except Exception as e:
logger.error(f"Error adding rule entry: {e}", exc_info=True)
return False, f"Error adding rule: {e}"
def retrieve_rules_semantic(query: str, k: int = 5) -> list[str]:
global _faiss_rules_index
if not _initialized: initialize_memory_system()
_faiss_rules_index = _verify_and_rebuild_if_needed(_faiss_rules_index, _rules_items_list, lambda r: r)
if not _faiss_rules_index or _faiss_rules_index.ntotal == 0: return []
try:
query_embedding = _embedder.encode([query], convert_to_tensor=False)
query_embedding_np = np.array(query_embedding, dtype=np.float32)
distances, indices = _faiss_rules_index.search(query_embedding_np, min(k, _faiss_rules_index.ntotal))
return [_rules_items_list[i] for i in indices[0] if 0 <= i < len(_rules_items_list)]
except Exception as e:
logger.error(f"Error retrieving rules semantically: {e}", exc_info=True)
return []
def remove_rule_entry(rule_text_to_delete: str) -> bool:
global _rules_items_list, _faiss_rules_index
if not _initialized: initialize_memory_system()
rule_text_to_delete = rule_text_to_delete.strip()
if rule_text_to_delete not in _rules_items_list: return False
try:
_rules_items_list.remove(rule_text_to_delete)
_faiss_rules_index = _build_faiss_index(_rules_items_list, lambda r: r)
if STORAGE_BACKEND == "SQLITE":
with _get_sqlite_connection() as conn:
conn.execute("DELETE FROM rules WHERE rule_text = ?", (rule_text_to_delete,)); conn.commit()
elif STORAGE_BACKEND == "HF_DATASET":
Dataset.from_dict({"rule_text": list(_rules_items_list)}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)
return True
except Exception as e:
logger.error(f"Error removing rule entry: {e}", exc_info=True)
return False
# --- Utility functions to get all data (for UI display, etc.) ---
def get_all_rules_cached() -> list[str]:
if not _initialized: initialize_memory_system()
return list(_rules_items_list)
def get_all_memories_cached() -> list[dict]:
if not _initialized: initialize_memory_system()
return [json.loads(m) for m in _memory_items_list if m]
def clear_all_memory_data_backend() -> bool:
global _memory_items_list, _faiss_memory_index
if not _initialized: initialize_memory_system()
_memory_items_list = []
if _faiss_memory_index: _faiss_memory_index.reset()
try:
if STORAGE_BACKEND == "SQLITE":
with _get_sqlite_connection() as conn: conn.execute("DELETE FROM memories"); conn.commit()
elif STORAGE_BACKEND == "HF_DATASET":
Dataset.from_dict({"memory_json": []}).push_to_hub(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, private=True)
logger.info("All memories cleared.")
return True
except Exception as e:
logger.error(f"Error clearing all memory data: {e}"); return False
def clear_all_rules_data_backend() -> bool:
global _rules_items_list, _faiss_rules_index
if not _initialized: initialize_memory_system()
_rules_items_list = []
if _faiss_rules_index: _faiss_rules_index.reset()
try:
if STORAGE_BACKEND == "SQLITE":
with _get_sqlite_connection() as conn: conn.execute("DELETE FROM rules"); conn.commit()
elif STORAGE_BACKEND == "HF_DATASET":
Dataset.from_dict({"rule_text": []}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)
logger.info("All rules cleared.")
return True
except Exception as e:
logger.error(f"Error clearing all rules data: {e}"); return False
FAISS_MEMORY_PATH = os.path.join(os.getenv("FAISS_STORAGE_PATH", "app_data/faiss_indices"), "memory_index.faiss")
FAISS_RULES_PATH = os.path.join(os.getenv("FAISS_STORAGE_PATH", "app_data/faiss_indices"), "rules_index.faiss")
def save_faiss_indices_to_disk():
if not _initialized or not faiss: return
faiss_dir = os.path.dirname(FAISS_MEMORY_PATH)
if not os.path.exists(faiss_dir): os.makedirs(faiss_dir, exist_ok=True)
if _faiss_memory_index and _faiss_memory_index.ntotal > 0:
faiss.write_index(_faiss_memory_index, FAISS_MEMORY_PATH)
if _faiss_rules_index and _faiss_rules_index.ntotal > 0:
faiss.write_index(_faiss_rules_index, FAISS_RULES_PATH)
def load_faiss_indices_from_disk():
global _faiss_memory_index, _faiss_rules_index
if not _initialized or not faiss: return
if os.path.exists(FAISS_MEMORY_PATH):
_faiss_memory_index = faiss.read_index(FAISS_MEMORY_PATH)
if os.path.exists(FAISS_RULES_PATH):
_faiss_rules_index = faiss.read_index(FAISS_RULES_PATH) |