File size: 12,517 Bytes
ab4e093
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5be0e59
ab4e093
 
 
 
 
5be0e59
ab4e093
 
 
 
 
5be0e59
ab4e093
 
5be0e59
ab4e093
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
"""
Database manager for the AI Knowledge Distillation Platform
"""

import sqlite3
import logging
from pathlib import Path
from typing import Dict, Any, List, Optional
from datetime import datetime

logger = logging.getLogger(__name__)

class DatabaseManager:
    """
    Centralized database manager for all platform data
    """
    
    def __init__(self, db_dir: str = "database"):
        """
        Initialize database manager
        
        Args:
            db_dir: Directory for database files
        """
        self.db_dir = Path(db_dir)
        self.db_dir.mkdir(parents=True, exist_ok=True)
        
        # Database file paths
        self.tokens_db = self.db_dir / "tokens.db"
        self.training_db = self.db_dir / "training_sessions.db"
        self.performance_db = self.db_dir / "performance_metrics.db"
        self.medical_db = self.db_dir / "medical_datasets.db"
        
        # Initialize all databases
        self._init_all_databases()
        
        logger.info("Database Manager initialized")
    
    def _init_all_databases(self):
        """Initialize all database schemas"""
        self._init_tokens_database()
        self._init_training_database()
        self._init_performance_database()
        self._init_medical_database()
    
    def _init_tokens_database(self):
        """Initialize tokens database"""
        with sqlite3.connect(self.tokens_db) as conn:
            conn.execute('''
                CREATE TABLE IF NOT EXISTS tokens (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    name TEXT UNIQUE NOT NULL,
                    token_type TEXT NOT NULL,
                    encrypted_token TEXT NOT NULL,
                    is_default BOOLEAN DEFAULT FALSE,
                    description TEXT,
                    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                    last_used TIMESTAMP,
                    usage_count INTEGER DEFAULT 0,
                    is_active BOOLEAN DEFAULT TRUE
                )
            ''')
            
            conn.execute('''
                CREATE TABLE IF NOT EXISTS token_usage_log (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    token_name TEXT NOT NULL,
                    operation TEXT NOT NULL,
                    timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                    success BOOLEAN,
                    error_message TEXT
                )
            ''')
            
            conn.commit()
    
    def _init_training_database(self):
        """Initialize training sessions database"""
        with sqlite3.connect(self.training_db) as conn:
            conn.execute('''
                CREATE TABLE IF NOT EXISTS training_sessions (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    session_id TEXT UNIQUE NOT NULL,
                    teacher_model TEXT NOT NULL,
                    student_model TEXT NOT NULL,
                    dataset_name TEXT,
                    training_type TEXT NOT NULL,
                    status TEXT DEFAULT 'initialized',
                    progress REAL DEFAULT 0.0,
                    current_step INTEGER DEFAULT 0,
                    total_steps INTEGER,
                    current_loss REAL,
                    best_loss REAL,
                    learning_rate REAL,
                    batch_size INTEGER,
                    temperature REAL,
                    alpha REAL,
                    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                    started_at TIMESTAMP,
                    completed_at TIMESTAMP,
                    error_message TEXT,
                    config_json TEXT
                )
            ''')
            
            conn.execute('''
                CREATE TABLE IF NOT EXISTS training_logs (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    session_id TEXT NOT NULL,
                    step INTEGER NOT NULL,
                    loss REAL,
                    learning_rate REAL,
                    memory_usage_mb REAL,
                    timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                    additional_metrics TEXT
                )
            ''')
            
            conn.commit()
    
    def _init_performance_database(self):
        """Initialize performance metrics database"""
        with sqlite3.connect(self.performance_db) as conn:
            conn.execute('''
                CREATE TABLE IF NOT EXISTS system_metrics (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                    cpu_usage_percent REAL,
                    memory_usage_mb REAL,
                    memory_usage_percent REAL,
                    available_memory_gb REAL,
                    disk_usage_percent REAL,
                    temperature_celsius REAL
                )
            ''')
            
            conn.execute('''
                CREATE TABLE IF NOT EXISTS model_performance (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    model_name TEXT NOT NULL,
                    operation TEXT NOT NULL,
                    duration_seconds REAL,
                    memory_peak_mb REAL,
                    throughput_samples_per_second REAL,
                    accuracy REAL,
                    timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                    additional_metrics TEXT
                )
            ''')
            
            conn.commit()
    
    def _init_medical_database(self):
        """Initialize medical datasets database"""
        with sqlite3.connect(self.medical_db) as conn:
            conn.execute('''
                CREATE TABLE IF NOT EXISTS medical_datasets (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    dataset_name TEXT UNIQUE NOT NULL,
                    repo_id TEXT NOT NULL,
                    description TEXT,
                    size_gb REAL,
                    num_samples INTEGER,
                    modalities TEXT,
                    specialties TEXT,
                    languages TEXT,
                    last_accessed TIMESTAMP,
                    access_count INTEGER DEFAULT 0,
                    is_cached BOOLEAN DEFAULT FALSE,
                    cache_path TEXT,
                    metadata_json TEXT
                )
            ''')
            
            conn.execute('''
                CREATE TABLE IF NOT EXISTS dicom_files (
                    id INTEGER PRIMARY KEY AUTOINCREMENT,
                    file_path TEXT UNIQUE NOT NULL,
                    patient_id TEXT,
                    study_date TEXT,
                    modality TEXT,
                    file_size_mb REAL,
                    processed BOOLEAN DEFAULT FALSE,
                    processed_at TIMESTAMP,
                    metadata_json TEXT
                )
            ''')
            
            conn.commit()
    
    def get_connection(self, db_name: str) -> sqlite3.Connection:
        """Get database connection"""
        db_map = {
            'tokens': self.tokens_db,
            'training': self.training_db,
            'performance': self.performance_db,
            'medical': self.medical_db
        }
        
        if db_name not in db_map:
            raise ValueError(f"Unknown database: {db_name}")
        
        return sqlite3.connect(db_map[db_name])
    
    def execute_query(self, db_name: str, query: str, params: tuple = ()) -> List[tuple]:
        """Execute query and return results"""
        with self.get_connection(db_name) as conn:
            cursor = conn.execute(query, params)
            return cursor.fetchall()
    
    def execute_update(self, db_name: str, query: str, params: tuple = ()) -> int:
        """Execute update query and return affected rows"""
        with self.get_connection(db_name) as conn:
            cursor = conn.execute(query, params)
            conn.commit()
            return cursor.rowcount
    
    def backup_databases(self, backup_dir: str = "backups") -> Dict[str, str]:
        """Create backup of all databases"""
        backup_path = Path(backup_dir)
        backup_path.mkdir(parents=True, exist_ok=True)
        
        timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
        backup_files = {}
        
        db_files = {
            'tokens': self.tokens_db,
            'training': self.training_db,
            'performance': self.performance_db,
            'medical': self.medical_db
        }
        
        for db_name, db_file in db_files.items():
            if db_file.exists():
                backup_file = backup_path / f"{db_name}_{timestamp}.db"
                
                # Copy database file
                import shutil
                shutil.copy2(db_file, backup_file)
                
                backup_files[db_name] = str(backup_file)
                logger.info(f"Backed up {db_name} database to {backup_file}")
        
        return backup_files
    
    def get_database_stats(self) -> Dict[str, Any]:
        """Get statistics about all databases"""
        stats = {}
        
        db_files = {
            'tokens': self.tokens_db,
            'training': self.training_db,
            'performance': self.performance_db,
            'medical': self.medical_db
        }
        
        for db_name, db_file in db_files.items():
            if db_file.exists():
                file_size_mb = db_file.stat().st_size / (1024**2)
                
                # Get table counts
                try:
                    with self.get_connection(db_name) as conn:
                        cursor = conn.execute(
                            "SELECT name FROM sqlite_master WHERE type='table'"
                        )
                        tables = [row[0] for row in cursor.fetchall()]
                        
                        table_counts = {}
                        for table in tables:
                            cursor = conn.execute(f"SELECT COUNT(*) FROM {table}")
                            count = cursor.fetchone()[0]
                            table_counts[table] = count
                        
                        stats[db_name] = {
                            'file_size_mb': file_size_mb,
                            'tables': table_counts,
                            'total_records': sum(table_counts.values())
                        }
                except Exception as e:
                    stats[db_name] = {
                        'file_size_mb': file_size_mb,
                        'error': str(e)
                    }
            else:
                stats[db_name] = {
                    'file_size_mb': 0,
                    'status': 'not_created'
                }
        
        return stats
    
    def cleanup_old_data(self, days_to_keep: int = 30) -> Dict[str, int]:
        """Cleanup old data from databases"""
        cutoff_date = datetime.now().timestamp() - (days_to_keep * 24 * 3600)
        cleanup_stats = {}
        
        try:
            # Cleanup old performance metrics
            with self.get_connection('performance') as conn:
                cursor = conn.execute(
                    "DELETE FROM system_metrics WHERE timestamp < ?",
                    (cutoff_date,)
                )
                cleanup_stats['system_metrics'] = cursor.rowcount
                conn.commit()
            
            # Cleanup old training logs
            with self.get_connection('training') as conn:
                cursor = conn.execute(
                    "DELETE FROM training_logs WHERE timestamp < ?",
                    (cutoff_date,)
                )
                cleanup_stats['training_logs'] = cursor.rowcount
                conn.commit()
            
            # Cleanup old token usage logs
            with self.get_connection('tokens') as conn:
                cursor = conn.execute(
                    "DELETE FROM token_usage_log WHERE timestamp < ?",
                    (cutoff_date,)
                )
                cleanup_stats['token_usage_log'] = cursor.rowcount
                conn.commit()
            
            logger.info(f"Cleaned up old data: {cleanup_stats}")
            
        except Exception as e:
            logger.error(f"Error cleaning up old data: {e}")
            cleanup_stats['error'] = str(e)
        
        return cleanup_stats