File size: 26,330 Bytes
8c665a7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
# memory_logic.py
import os
import json
import time
from datetime import datetime
import logging
import re
import threading

# Conditionally import heavy dependencies
try:
    from sentence_transformers import SentenceTransformer
    import faiss
    import numpy as np
except ImportError:
    SentenceTransformer, faiss, np = None, None, None
    logging.warning("SentenceTransformers, FAISS, or NumPy not installed. Semantic search will be unavailable.")

try:
    import sqlite3
except ImportError:
    sqlite3 = None
    logging.warning("sqlite3 module not available. SQLite backend will be unavailable.")

try:
    from datasets import load_dataset, Dataset
except ImportError:
    load_dataset, Dataset = None, None
    logging.warning("datasets library not installed. Hugging Face Dataset backend will be unavailable.")


logger = logging.getLogger(__name__)
# Suppress verbose logs from dependencies
for lib_name in ["sentence_transformers", "faiss", "datasets", "huggingface_hub"]:
    if logging.getLogger(lib_name): # Check if logger exists
        logging.getLogger(lib_name).setLevel(logging.WARNING)


# --- Configuration (Read directly from environment variables) ---
STORAGE_BACKEND = os.getenv("STORAGE_BACKEND", "HF_DATASET").upper() #HF_DATASET, RAM, SQLITE
SQLITE_DB_PATH = os.getenv("SQLITE_DB_PATH", "app_data/ai_memory.db") # Changed default path
HF_TOKEN = os.getenv("HF_TOKEN")
HF_MEMORY_DATASET_REPO = os.getenv("HF_MEMORY_DATASET_REPO", "broadfield-dev/ai-brain") # Example
HF_RULES_DATASET_REPO = os.getenv("HF_RULES_DATASET_REPO", "broadfield-dev/ai-rules")     # Example

# --- Globals for RAG within this module ---
_embedder = None
_dimension = 384 # Default, will be set by embedder
_faiss_memory_index = None
_memory_items_list = []  # Stores JSON strings of memory objects for RAM, or loaded from DB/HF
_faiss_rules_index = None
_rules_items_list = []   # Stores rule text strings

_initialized = False
_init_lock = threading.Lock()

# --- Helper: SQLite Connection ---
def _get_sqlite_connection():
    if not sqlite3:
        raise ImportError("sqlite3 module is required for SQLite backend but not found.")
    db_dir = os.path.dirname(SQLITE_DB_PATH)
    if db_dir and not os.path.exists(db_dir):
        os.makedirs(db_dir, exist_ok=True)
    return sqlite3.connect(SQLITE_DB_PATH, timeout=10) # Added timeout

def _init_sqlite_tables():
    if STORAGE_BACKEND != "SQLITE" or not sqlite3:
        return
    try:
        with _get_sqlite_connection() as conn:
            cursor = conn.cursor()
            # Stores JSON string of the memory object
            cursor.execute("""
                CREATE TABLE IF NOT EXISTS memories (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    memory_json TEXT NOT NULL,
                    # Optionally add embedding here if not using separate FAISS index
                    # embedding BLOB, 
                    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
                )
            """)
            # Stores the rule text directly
            cursor.execute("""
                CREATE TABLE IF NOT EXISTS rules (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    rule_text TEXT NOT NULL UNIQUE,
                    # embedding BLOB,
                    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
                )
            """)
            conn.commit()
        logger.info("SQLite tables for memories and rules checked/created.")
    except Exception as e:
        logger.error(f"SQLite table initialization error: {e}", exc_info=True)

# --- Initialization ---
def initialize_memory_system():
    global _initialized, _embedder, _dimension, _faiss_memory_index, _memory_items_list, _faiss_rules_index, _rules_items_list
    
    with _init_lock:
        if _initialized:
            logger.info("Memory system already initialized.")
            return

        logger.info(f"Initializing memory system with backend: {STORAGE_BACKEND}")
        init_start_time = time.time()

        # 1. Load Sentence Transformer Model (always needed for semantic operations)
        if not SentenceTransformer or not faiss or not np:
            logger.error("Core RAG libraries (SentenceTransformers, FAISS, NumPy) not available. Cannot initialize semantic memory.")
            _initialized = False # Mark as not properly initialized
            return
        
        if not _embedder:
            try:
                logger.info("Loading SentenceTransformer model (all-MiniLM-L6-v2)...")
                _embedder = SentenceTransformer('all-MiniLM-L6-v2', cache_folder="./sentence_transformer_cache")
                _dimension = _embedder.get_sentence_embedding_dimension() or 384
                logger.info(f"SentenceTransformer loaded. Dimension: {_dimension}")
            except Exception as e:
                logger.critical(f"FATAL: Error loading SentenceTransformer: {e}", exc_info=True)
                _initialized = False
                return # Cannot proceed without embedder

        # 2. Initialize SQLite if used
        if STORAGE_BACKEND == "SQLITE":
            _init_sqlite_tables()

        # 3. Load Memories
        logger.info("Loading memories...")
        temp_memories_json = []
        if STORAGE_BACKEND == "RAM":
            _memory_items_list = [] # Start fresh for RAM backend
        elif STORAGE_BACKEND == "SQLITE" and sqlite3:
            try:
                with _get_sqlite_connection() as conn:
                    temp_memories_json = [row[0] for row in conn.execute("SELECT memory_json FROM memories ORDER BY created_at ASC")]
            except Exception as e: logger.error(f"Error loading memories from SQLite: {e}")
        elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset and load_dataset:
            try:
                logger.info(f"Attempting to load memories from HF Dataset: {HF_MEMORY_DATASET_REPO}")
                dataset = load_dataset(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, trust_remote_code=True) # Add download_mode if needed
                if "train" in dataset and "memory_json" in dataset["train"].column_names: # Assuming 'memory_json' column
                    temp_memories_json = [m_json for m_json in dataset["train"]["memory_json"] if isinstance(m_json, str)]
                else: logger.warning(f"HF Dataset {HF_MEMORY_DATASET_REPO} for memories not found or 'memory_json' column missing.")
            except Exception as e: logger.error(f"Error loading memories from HF Dataset ({HF_MEMORY_DATASET_REPO}): {e}")
        
        _memory_items_list = temp_memories_json
        logger.info(f"Loaded {len(_memory_items_list)} memory items from {STORAGE_BACKEND}.")

        # 4. Build/Load FAISS Memory Index
        _faiss_memory_index = faiss.IndexFlatL2(_dimension)
        if _memory_items_list:
            logger.info(f"Building FAISS index for {len(_memory_items_list)} memories...")
            # Extract text to embed from memory JSON objects
            texts_to_embed_mem = []
            for mem_json_str in _memory_items_list:
                try:
                    mem_obj = json.loads(mem_json_str)
                    # Consistent embedding strategy: user input + bot response + takeaway
                    text = f"User: {mem_obj.get('user_input','')}\nAI: {mem_obj.get('bot_response','')}\nTakeaway: {mem_obj.get('metrics',{}).get('takeaway','N/A')}"
                    texts_to_embed_mem.append(text)
                except json.JSONDecodeError:
                    logger.warning(f"Skipping malformed memory JSON for FAISS indexing: {mem_json_str[:100]}")
            
            if texts_to_embed_mem:
                try:
                    embeddings = _embedder.encode(texts_to_embed_mem, convert_to_tensor=False, show_progress_bar=False) # convert_to_numpy=True
                    embeddings_np = np.array(embeddings, dtype=np.float32)
                    if embeddings_np.ndim == 2 and embeddings_np.shape[0] == len(texts_to_embed_mem) and embeddings_np.shape[1] == _dimension:
                        _faiss_memory_index.add(embeddings_np)
                    else: logger.error(f"Memory embeddings shape error. Expected ({len(texts_to_embed_mem)}, {_dimension}), Got {embeddings_np.shape if hasattr(embeddings_np, 'shape') else 'N/A'}")
                except Exception as e_faiss_mem: logger.error(f"Error building FAISS memory index: {e_faiss_mem}")
        logger.info(f"FAISS memory index built. Total items: {_faiss_memory_index.ntotal if _faiss_memory_index else 'N/A'}")


        # 5. Load Rules
        logger.info("Loading rules...")
        temp_rules_text = []
        if STORAGE_BACKEND == "RAM":
            _rules_items_list = []
        elif STORAGE_BACKEND == "SQLITE" and sqlite3:
            try:
                with _get_sqlite_connection() as conn:
                    temp_rules_text = [row[0] for row in conn.execute("SELECT rule_text FROM rules ORDER BY created_at ASC")]
            except Exception as e: logger.error(f"Error loading rules from SQLite: {e}")
        elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset and load_dataset:
            try:
                logger.info(f"Attempting to load rules from HF Dataset: {HF_RULES_DATASET_REPO}")
                dataset = load_dataset(HF_RULES_DATASET_REPO, token=HF_TOKEN, trust_remote_code=True)
                if "train" in dataset and "rule_text" in dataset["train"].column_names:
                    temp_rules_text = [r_text for r_text in dataset["train"]["rule_text"] if isinstance(r_text, str) and r_text.strip()]
                else: logger.warning(f"HF Dataset {HF_RULES_DATASET_REPO} for rules not found or 'rule_text' column missing.")
            except Exception as e: logger.error(f"Error loading rules from HF Dataset ({HF_RULES_DATASET_REPO}): {e}")
        
        _rules_items_list = sorted(list(set(temp_rules_text))) # Ensure unique and sorted
        logger.info(f"Loaded {len(_rules_items_list)} rule items from {STORAGE_BACKEND}.")

        # 6. Build/Load FAISS Rules Index
        _faiss_rules_index = faiss.IndexFlatL2(_dimension)
        if _rules_items_list:
            logger.info(f"Building FAISS index for {len(_rules_items_list)} rules...")
            if _rules_items_list: # Check again in case it became empty after filtering
                try:
                    embeddings = _embedder.encode(_rules_items_list, convert_to_tensor=False, show_progress_bar=False)
                    embeddings_np = np.array(embeddings, dtype=np.float32)
                    if embeddings_np.ndim == 2 and embeddings_np.shape[0] == len(_rules_items_list) and embeddings_np.shape[1] == _dimension:
                        _faiss_rules_index.add(embeddings_np)
                    else: logger.error(f"Rule embeddings shape error. Expected ({len(_rules_items_list)}, {_dimension}), Got {embeddings_np.shape if hasattr(embeddings_np, 'shape') else 'N/A'}")
                except Exception as e_faiss_rule: logger.error(f"Error building FAISS rule index: {e_faiss_rule}")
        logger.info(f"FAISS rules index built. Total items: {_faiss_rules_index.ntotal if _faiss_rules_index else 'N/A'}")

        _initialized = True
        logger.info(f"Memory system initialization complete in {time.time() - init_start_time:.2f}s")


# --- Memory Operations (Semantic) ---
def add_memory_entry(user_input: str, metrics: dict, bot_response: str) -> tuple[bool, str]:
    """Adds a memory entry to the configured backend and FAISS index."""
    global _memory_items_list, _faiss_memory_index
    if not _initialized: initialize_memory_system()
    if not _embedder or not _faiss_memory_index:
        return False, "Memory system or embedder not initialized for adding memory."

    memory_obj = {
        "user_input": user_input,
        "metrics": metrics,
        "bot_response": bot_response,
        "timestamp": datetime.utcnow().isoformat()
    }
    memory_json_str = json.dumps(memory_obj)
    
    text_to_embed = f"User: {user_input}\nAI: {bot_response}\nTakeaway: {metrics.get('takeaway', 'N/A')}"
    
    try:
        embedding = _embedder.encode([text_to_embed], convert_to_tensor=False)
        embedding_np = np.array(embedding, dtype=np.float32).reshape(1, -1)

        if embedding_np.shape != (1, _dimension):
            logger.error(f"Memory embedding shape error: {embedding_np.shape}. Expected (1, {_dimension})")
            return False, "Embedding shape error."

        # Add to FAISS
        _faiss_memory_index.add(embedding_np)
        
        # Add to in-memory list
        _memory_items_list.append(memory_json_str)
        
        # Add to persistent storage
        if STORAGE_BACKEND == "SQLITE" and sqlite3:
            with _get_sqlite_connection() as conn:
                conn.execute("INSERT INTO memories (memory_json) VALUES (?)", (memory_json_str,))
                conn.commit()
        elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
            # This can be slow, consider batching or async push
            logger.info(f"Pushing {len(_memory_items_list)} memories to HF Hub: {HF_MEMORY_DATASET_REPO}")
            Dataset.from_dict({"memory_json": list(_memory_items_list)}).push_to_hub(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, private=True) # Ensure 'private' as needed
        
        logger.info(f"Added memory. RAM: {len(_memory_items_list)}, FAISS: {_faiss_memory_index.ntotal}")
        return True, "Memory added successfully."
    except Exception as e:
        logger.error(f"Error adding memory entry: {e}", exc_info=True)
        # TODO: Potential rollback logic if FAISS add succeeded but backend failed (complex)
        return False, f"Error adding memory: {e}"

def retrieve_memories_semantic(query: str, k: int = 3) -> list[dict]:
    """Retrieves k most relevant memories using semantic search."""
    if not _initialized: initialize_memory_system()
    if not _embedder or not _faiss_memory_index or _faiss_memory_index.ntotal == 0:
        logger.debug("Cannot retrieve memories: Embedder, FAISS index not ready, or index is empty.")
        return []

    try:
        query_embedding = _embedder.encode([query], convert_to_tensor=False)
        query_embedding_np = np.array(query_embedding, dtype=np.float32).reshape(1, -1)

        if query_embedding_np.shape[1] != _dimension:
            logger.error(f"Query embedding dimension mismatch. Expected {_dimension}, got {query_embedding_np.shape[1]}")
            return []
            
        distances, indices = _faiss_memory_index.search(query_embedding_np, min(k, _faiss_memory_index.ntotal))
        
        results = []
        for i in indices[0]:
            if 0 <= i < len(_memory_items_list):
                try:
                    results.append(json.loads(_memory_items_list[i]))
                except json.JSONDecodeError:
                    logger.warning(f"Could not parse memory JSON from list at index {i}")
            else:
                logger.warning(f"FAISS index {i} out of bounds for memory_items_list (len: {len(_memory_items_list)})")

        logger.debug(f"Retrieved {len(results)} memories semantically for query: '{query[:50]}...'")
        return results
    except Exception as e:
        logger.error(f"Error retrieving memories semantically: {e}", exc_info=True)
        return []


# --- Rule (Insight) Operations (Semantic) ---
def add_rule_entry(rule_text: str) -> tuple[bool, str]:
    """Adds a rule if valid and not a duplicate. Updates backend and FAISS."""
    global _rules_items_list, _faiss_rules_index
    if not _initialized: initialize_memory_system()
    if not _embedder or not _faiss_rules_index:
        return False, "Rule system or embedder not initialized."

    rule_text = rule_text.strip()
    if not rule_text: return False, "Rule text cannot be empty."
    if not re.match(r"\[(CORE_RULE|RESPONSE_PRINCIPLE|BEHAVIORAL_ADJUSTMENT|GENERAL_LEARNING)\|([\d\.]+?)\](.*)", rule_text, re.I|re.DOTALL):
        return False, "Invalid rule format."
    if rule_text in _rules_items_list:
        return False, "duplicate"

    try:
        embedding = _embedder.encode([rule_text], convert_to_tensor=False)
        embedding_np = np.array(embedding, dtype=np.float32).reshape(1, -1)

        if embedding_np.shape != (1, _dimension):
            return False, "Rule embedding shape error."

        _faiss_rules_index.add(embedding_np)
        _rules_items_list.append(rule_text)
        _rules_items_list.sort()

        if STORAGE_BACKEND == "SQLITE" and sqlite3:
            with _get_sqlite_connection() as conn:
                conn.execute("INSERT OR IGNORE INTO rules (rule_text) VALUES (?)", (rule_text,))
                conn.commit()
        elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
            logger.info(f"Pushing {len(_rules_items_list)} rules to HF Hub: {HF_RULES_DATASET_REPO}")
            Dataset.from_dict({"rule_text": list(_rules_items_list)}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)
        
        logger.info(f"Added rule. RAM: {len(_rules_items_list)}, FAISS: {_faiss_rules_index.ntotal}")
        return True, "Rule added successfully."
    except Exception as e:
        logger.error(f"Error adding rule entry: {e}", exc_info=True)
        # Basic rollback if FAISS add succeeded
        if rule_text in _rules_items_list and _faiss_rules_index.ntotal > 0: # Crude check
             # A full rollback would involve rebuilding FAISS index from _rules_items_list before append.
             # For simplicity, this is omitted here. State could be inconsistent on error.
             pass
        return False, f"Error adding rule: {e}"

def retrieve_rules_semantic(query: str, k: int = 5) -> list[str]:
    """Retrieves k most relevant rules using semantic search."""
    if not _initialized: initialize_memory_system()
    if not _embedder or not _faiss_rules_index or _faiss_rules_index.ntotal == 0:
        return []
    try:
        query_embedding = _embedder.encode([query], convert_to_tensor=False)
        query_embedding_np = np.array(query_embedding, dtype=np.float32).reshape(1, -1)
        
        if query_embedding_np.shape[1] != _dimension: return []

        distances, indices = _faiss_rules_index.search(query_embedding_np, min(k, _faiss_rules_index.ntotal))
        results = [_rules_items_list[i] for i in indices[0] if 0 <= i < len(_rules_items_list)]
        logger.debug(f"Retrieved {len(results)} rules semantically for query: '{query[:50]}...'")
        return results
    except Exception as e:
        logger.error(f"Error retrieving rules semantically: {e}", exc_info=True)
        return []

def remove_rule_entry(rule_text_to_delete: str) -> bool:
    """Removes a rule from backend and rebuilds FAISS for rules."""
    global _rules_items_list, _faiss_rules_index
    if not _initialized: initialize_memory_system()
    if not _embedder or not _faiss_rules_index: return False

    rule_text_to_delete = rule_text_to_delete.strip()
    if rule_text_to_delete not in _rules_items_list:
        return False # Not found

    try:
        _rules_items_list.remove(rule_text_to_delete)
        _rules_items_list.sort() # Maintain sorted order

        # Rebuild FAISS index for rules (simplest way to ensure consistency after removal)
        new_faiss_rules_index = faiss.IndexFlatL2(_dimension)
        if _rules_items_list:
            embeddings = _embedder.encode(_rules_items_list, convert_to_tensor=False)
            embeddings_np = np.array(embeddings, dtype=np.float32)
            if embeddings_np.ndim == 2 and embeddings_np.shape[0] == len(_rules_items_list) and embeddings_np.shape[1] == _dimension:
                 new_faiss_rules_index.add(embeddings_np)
            else: # Should not happen if list is consistent
                logger.error("Error rebuilding FAISS for rules after removal: Embedding shape error. State might be inconsistent.")
                # Attempt to revert _rules_items_list (add back the rule)
                _rules_items_list.append(rule_text_to_delete)
                _rules_items_list.sort()
                return False # Indicate failure
        _faiss_rules_index = new_faiss_rules_index
        
        # Remove from persistent storage
        if STORAGE_BACKEND == "SQLITE" and sqlite3:
            with _get_sqlite_connection() as conn:
                conn.execute("DELETE FROM rules WHERE rule_text = ?", (rule_text_to_delete,))
                conn.commit()
        elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
            Dataset.from_dict({"rule_text": list(_rules_items_list)}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)
        
        logger.info(f"Removed rule. RAM: {len(_rules_items_list)}, FAISS: {_faiss_rules_index.ntotal}")
        return True
    except Exception as e:
        logger.error(f"Error removing rule entry: {e}", exc_info=True)
        # Potential partial failure, state might be inconsistent.
        return False

# --- Utility functions to get all data (for UI display, etc.) ---
def get_all_rules_cached() -> list[str]:
    if not _initialized: initialize_memory_system()
    return list(_rules_items_list)

def get_all_memories_cached() -> list[dict]:
    if not _initialized: initialize_memory_system()
    # Convert JSON strings to dicts for easier use by UI
    mem_dicts = []
    for mem_json_str in _memory_items_list:
        try: mem_dicts.append(json.loads(mem_json_str))
        except: pass # Ignore parse errors for display
    return mem_dicts

def clear_all_memory_data_backend() -> bool:
    """Clears all memories from backend and resets in-memory FAISS/list."""
    global _memory_items_list, _faiss_memory_index
    if not _initialized: initialize_memory_system()
    
    success = True
    try:
        if STORAGE_BACKEND == "SQLITE" and sqlite3:
            with _get_sqlite_connection() as conn: conn.execute("DELETE FROM memories"); conn.commit()
        elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
            # Deleting from HF usually means pushing an empty dataset
            Dataset.from_dict({"memory_json": []}).push_to_hub(HF_MEMORY_DATASET_REPO, token=HF_TOKEN, private=True)
        
        _memory_items_list = []
        if _faiss_memory_index: _faiss_memory_index.reset() # Clear FAISS index
        logger.info("All memories cleared from backend and in-memory stores.")
    except Exception as e:
        logger.error(f"Error clearing all memory data: {e}")
        success = False
    return success

def clear_all_rules_data_backend() -> bool:
    """Clears all rules from backend and resets in-memory FAISS/list."""
    global _rules_items_list, _faiss_rules_index
    if not _initialized: initialize_memory_system()
    
    success = True
    try:
        if STORAGE_BACKEND == "SQLITE" and sqlite3:
            with _get_sqlite_connection() as conn: conn.execute("DELETE FROM rules"); conn.commit()
        elif STORAGE_BACKEND == "HF_DATASET" and HF_TOKEN and Dataset:
            Dataset.from_dict({"rule_text": []}).push_to_hub(HF_RULES_DATASET_REPO, token=HF_TOKEN, private=True)

        _rules_items_list = []
        if _faiss_rules_index: _faiss_rules_index.reset()
        logger.info("All rules cleared from backend and in-memory stores.")
    except Exception as e:
        logger.error(f"Error clearing all rules data: {e}")
        success = False
    return success

# Optional: Function to save FAISS indices to disk (from ai-learn, if needed for persistence between app runs with RAM backend)
FAISS_MEMORY_PATH = os.path.join(os.getenv("FAISS_STORAGE_PATH", "app_data/faiss_indices"), "memory_index.faiss")
FAISS_RULES_PATH = os.path.join(os.getenv("FAISS_STORAGE_PATH", "app_data/faiss_indices"), "rules_index.faiss")

def save_faiss_indices_to_disk():
    if not _initialized or not faiss: return
    
    faiss_dir = os.path.dirname(FAISS_MEMORY_PATH)
    if not os.path.exists(faiss_dir): os.makedirs(faiss_dir, exist_ok=True)

    if _faiss_memory_index and _faiss_memory_index.ntotal > 0:
        try:
            faiss.write_index(_faiss_memory_index, FAISS_MEMORY_PATH)
            logger.info(f"Memory FAISS index saved to disk ({_faiss_memory_index.ntotal} items).")
        except Exception as e: logger.error(f"Error saving memory FAISS index: {e}")
    
    if _faiss_rules_index and _faiss_rules_index.ntotal > 0:
        try:
            faiss.write_index(_faiss_rules_index, FAISS_RULES_PATH)
            logger.info(f"Rules FAISS index saved to disk ({_faiss_rules_index.ntotal} items).")
        except Exception as e: logger.error(f"Error saving rules FAISS index: {e}")

def load_faiss_indices_from_disk():
    global _faiss_memory_index, _faiss_rules_index
    if not _initialized or not faiss: return

    if os.path.exists(FAISS_MEMORY_PATH) and _faiss_memory_index: # Check if index object exists
        try:
            logger.info(f"Loading memory FAISS index from {FAISS_MEMORY_PATH}...")
            _faiss_memory_index = faiss.read_index(FAISS_MEMORY_PATH)
            logger.info(f"Memory FAISS index loaded ({_faiss_memory_index.ntotal} items).")
            # Consistency check: FAISS ntotal vs len(_memory_items_list)
            if _faiss_memory_index.ntotal != len(_memory_items_list) and len(_memory_items_list) > 0:
                 logger.warning(f"Memory FAISS index count ({_faiss_memory_index.ntotal}) differs from loaded texts ({len(_memory_items_list)}). Consider rebuilding FAISS.")
        except Exception as e: logger.error(f"Error loading memory FAISS index: {e}. Will use fresh index.")
    
    if os.path.exists(FAISS_RULES_PATH) and _faiss_rules_index:
        try:
            logger.info(f"Loading rules FAISS index from {FAISS_RULES_PATH}...")
            _faiss_rules_index = faiss.read_index(FAISS_RULES_PATH)
            logger.info(f"Rules FAISS index loaded ({_faiss_rules_index.ntotal} items).")
            if _faiss_rules_index.ntotal != len(_rules_items_list) and len(_rules_items_list) > 0:
                 logger.warning(f"Rules FAISS index count ({_faiss_rules_index.ntotal}) differs from loaded texts ({len(_rules_items_list)}). Consider rebuilding FAISS.")
        except Exception as e: logger.error(f"Error loading rules FAISS index: {e}. Will use fresh index.")