File size: 13,905 Bytes
76a5df7
 
 
 
 
 
 
 
 
 
 
64f4912
76a5df7
 
 
 
 
 
 
 
 
f20194e
76a5df7
 
 
 
 
 
 
 
f20194e
76a5df7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64f4912
f20194e
 
1c7f676
76a5df7
 
 
 
 
 
 
 
 
 
 
 
64f4912
 
 
76a5df7
 
 
 
 
 
 
 
 
 
f20194e
76a5df7
 
 
 
 
 
 
 
 
 
 
f20194e
76a5df7
 
 
 
 
 
 
 
 
 
 
 
 
 
f20194e
226b931
76a5df7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f20194e
226b931
76a5df7
 
 
 
 
 
 
 
 
 
f20194e
76a5df7
226b931
 
76a5df7
f20194e
76a5df7
f20194e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76a5df7
 
 
 
 
 
 
 
f20194e
 
76a5df7
 
 
f20194e
 
76a5df7
 
 
f20194e
 
76a5df7
 
f20194e
 
76a5df7
 
 
f20194e
 
76a5df7
 
 
 
 
 
 
 
 
 
 
 
 
b77c84c
76a5df7
 
f20194e
 
 
 
 
76a5df7
f20194e
 
b77c84c
76a5df7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f20194e
76a5df7
f20194e
 
76a5df7
 
 
 
 
f20194e
 
76a5df7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f20194e
76a5df7
 
 
 
 
 
 
 
 
 
 
 
f20194e
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
#!/usr/bin/env python3
"""
ResearchMate Settings
Centralized configuration management for ResearchMate
"""

import os
import json
import logging
from pathlib import Path
from typing import Dict, Any, Optional, List
from dataclasses import dataclass, asdict, field
from dotenv import load_dotenv

# Load environment variables
load_dotenv()

@dataclass
class ServerConfig:
    """Server configuration settings"""
    host: str = "0.0.0.0"
    port: int = 7860  # HF Spaces default
    debug: bool = False
    reload: bool = False
    workers: int = 1
    log_level: str = "info"

@dataclass
class DatabaseConfig:
    """Database configuration settings"""
    chroma_persist_dir: str = "/tmp/researchmate/chroma_persist"
    collection_name: str = "research_documents"
    similarity_threshold: float = 0.7
    max_results: int = 10
    embedding_model: str = "all-MiniLM-L6-v2"

@dataclass
class AIModelConfig:
    """AI model configuration settings"""
    model_name: str = "llama-3.3-70b-versatile"
    temperature: float = 0.7
    max_tokens: int = 4096
    top_p: float = 0.9
    frequency_penalty: float = 0.0
    presence_penalty: float = 0.0
    timeout: int = 30

@dataclass
class UploadConfig:
    """File upload configuration settings"""
    max_file_size: int = 50 * 1024 * 1024  # 50MB
    allowed_extensions: List[str] = field(default_factory=lambda: [".pdf", ".txt", ".md", ".docx", ".doc"])
    upload_directory: str = "/tmp/researchmate/uploads"
    temp_directory: str = "/tmp/researchmate/tmp"

@dataclass
class SearchConfig:
    """Search configuration settings"""
    max_results: int = 10
    similarity_threshold: float = 0.7
    enable_reranking: bool = True
    chunk_size: int = 1000
    chunk_overlap: int = 200

@dataclass
class SecurityConfig:
    """Security configuration settings"""
    cors_origins: List[str] = field(default_factory=lambda: ["*"])
    cors_methods: List[str] = field(default_factory=lambda: ["*"])
    cors_headers: List[str] = field(default_factory=lambda: ["*"])
    rate_limit_enabled: bool = True
    rate_limit_requests: int = 100
    rate_limit_period: int = 60  # seconds

@dataclass
class LoggingConfig:
    """Logging configuration settings"""
    level: str = "INFO"
    format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
    file_enabled: bool = True
    file_path: str = "/tmp/researchmate/logs/app.log"
    max_file_size: int = 10 * 1024 * 1024  # 10MB
    backup_count: int = 5
    console_enabled: bool = True

class Settings:
    """Main settings class for ResearchMate"""
    
    def __init__(self, config_file: Optional[str] = None):
        self.config_file = config_file or self._get_default_config_file()
        self.project_root = Path(__file__).parent.parent
        
        # Initialize configuration objects with HF Spaces-safe defaults
        self.server = ServerConfig()
        self.database = DatabaseConfig()
        self.ai_model = AIModelConfig()
        self.upload = UploadConfig()
        self.search = SearchConfig()
        self.security = SecurityConfig()
        self.logging = LoggingConfig()
        
        # Load configuration
        self._load_config()
        self._validate_config()
    
    def _get_default_config_file(self) -> str:
        """Get default configuration file path"""
        # Always use writable config directory for HF Spaces
        config_dir = os.environ.get('CONFIG_DIR', '/tmp/researchmate/config')
        return str(Path(config_dir) / "settings.json")
    
    def _load_config(self):
        """Load configuration from file and environment variables"""
        # Load from file if exists
        config_path = Path(self.config_file)
        if config_path.exists():
            try:
                with open(config_path, 'r') as f:
                    config_data = json.load(f)
                self._apply_config_data(config_data)
            except Exception as e:
                logging.warning(f"Failed to load config file: {e}")
        
        # Override with environment variables
        self._load_from_env()
    
    def _apply_config_data(self, config_data: Dict[str, Any]):
        """Apply configuration data to settings objects"""
        for section, data in config_data.items():
            if hasattr(self, section):
                section_obj = getattr(self, section)
                for key, value in data.items():
                    if hasattr(section_obj, key):
                        setattr(section_obj, key, value)
    
    def _load_from_env(self):
        """Load configuration from environment variables"""
        # Server configuration
        self.server.host = os.getenv("HOST", self.server.host)
        self.server.port = int(os.getenv("PORT", self.server.port))
        self.server.debug = os.getenv("DEBUG", "false").lower() == "true"
        self.server.reload = os.getenv("RELOAD", "false").lower() == "true"
        self.server.workers = int(os.getenv("WORKERS", self.server.workers))
        self.server.log_level = os.getenv("LOG_LEVEL", self.server.log_level)
        
        # Database configuration - ALWAYS use writable tmp paths
        self.database.chroma_persist_dir = os.getenv("CHROMA_DIR", "/tmp/researchmate/chroma_persist")
        self.database.collection_name = os.getenv("COLLECTION_NAME", self.database.collection_name)
        self.database.similarity_threshold = float(os.getenv("SIMILARITY_THRESHOLD", self.database.similarity_threshold))
        self.database.max_results = int(os.getenv("MAX_RESULTS", self.database.max_results))
        
        # AI model configuration
        self.ai_model.model_name = os.getenv("MODEL_NAME", self.ai_model.model_name)
        self.ai_model.temperature = float(os.getenv("TEMPERATURE", self.ai_model.temperature))
        self.ai_model.max_tokens = int(os.getenv("MAX_TOKENS", self.ai_model.max_tokens))
        self.ai_model.timeout = int(os.getenv("MODEL_TIMEOUT", self.ai_model.timeout))
        
        # Upload configuration - ALWAYS use writable tmp paths
        self.upload.max_file_size = int(os.getenv("MAX_FILE_SIZE", self.upload.max_file_size))
        self.upload.upload_directory = os.getenv("UPLOADS_DIR", "/tmp/researchmate/uploads")
        self.upload.temp_directory = os.getenv("TEMP_DIR", "/tmp/researchmate/tmp")
        
        # Logging configuration - ALWAYS use writable tmp paths
        self.logging.level = os.getenv("LOG_LEVEL", self.logging.level)
        self.logging.file_path = os.getenv("LOG_FILE", "/tmp/researchmate/logs/app.log")
        
        # Ensure no hardcoded /data paths slip through
        self._sanitize_paths()
    
    def _sanitize_paths(self):
        """Ensure no paths point to non-writable locations"""
        # List of paths that should be writable
        writable_paths = [
            ('database.chroma_persist_dir', '/tmp/researchmate/chroma_persist'),
            ('upload.upload_directory', '/tmp/researchmate/uploads'),
            ('upload.temp_directory', '/tmp/researchmate/tmp'),
            ('logging.file_path', '/tmp/researchmate/logs/app.log'),
        ]
        
        for path_attr, fallback in writable_paths:
            obj, attr = path_attr.split('.')
            current_path = getattr(getattr(self, obj), attr)
            
            # Check if path is in a potentially non-writable location
            if (current_path.startswith('/data') or 
                current_path.startswith('./data') or 
                current_path.startswith('/app/data') or
                not current_path.startswith('/tmp/')):
                
                print(f"⚠ Warning: Changing {path_attr} from {current_path} to {fallback}")
                setattr(getattr(self, obj), attr, fallback)
    
    def _validate_config(self):
        """Validate configuration settings"""
        # Validate required environment variables
        required_env_vars = ["GROQ_API_KEY"]
        missing_vars = [var for var in required_env_vars if not os.getenv(var)]
        
        if missing_vars:
            print(f"⚠ Warning: Missing environment variables: {', '.join(missing_vars)}")
            print("Some features may not work without these variables")
        
        # Validate server configuration
        if not (1 <= self.server.port <= 65535):
            print(f"⚠ Warning: Invalid port {self.server.port}, using 7860")
            self.server.port = 7860
        
        # Validate AI model configuration
        if not (0.0 <= self.ai_model.temperature <= 2.0):
            print(f"⚠ Warning: Invalid temperature {self.ai_model.temperature}, using 0.7")
            self.ai_model.temperature = 0.7
        
        if not (1 <= self.ai_model.max_tokens <= 32768):
            print(f"⚠ Warning: Invalid max_tokens {self.ai_model.max_tokens}, using 4096")
            self.ai_model.max_tokens = 4096
        
        # Validate database configuration
        if not (0.0 <= self.database.similarity_threshold <= 1.0):
            print(f"⚠ Warning: Invalid similarity_threshold {self.database.similarity_threshold}, using 0.7")
            self.database.similarity_threshold = 0.7
        
        # Create directories if they don't exist
        self._create_directories()
    
    def _create_directories(self):
        """Create necessary directories"""
        directories = [
            self.database.chroma_persist_dir,
            self.upload.upload_directory,
            self.upload.temp_directory,
            Path(self.logging.file_path).parent,
            Path(self.config_file).parent
        ]
        
        for directory in directories:
            try:
                path = Path(directory)
                path.mkdir(parents=True, exist_ok=True)
                # Ensure write permissions
                path.chmod(0o777)
                print(f"βœ“ Created/verified directory: {directory}")
            except Exception as e:
                print(f"⚠ Warning: Could not create directory {directory}: {e}")
                # Continue without raising error
    
    def save_config(self):
        """Save current configuration to file"""
        config_data = {
            "server": asdict(self.server),
            "database": asdict(self.database),
            "ai_model": asdict(self.ai_model),
            "upload": asdict(self.upload),
            "search": asdict(self.search),
            "security": asdict(self.security),
            "logging": asdict(self.logging)
        }
        
        config_path = Path(self.config_file)
        try:
            config_path.parent.mkdir(parents=True, exist_ok=True)
            
            with open(config_path, 'w') as f:
                json.dump(config_data, f, indent=2)
            print(f"βœ“ Configuration saved to: {config_path}")
        except Exception as e:
            print(f"⚠ Warning: Could not save config file: {e}")
            # Don't raise the error for config saving
    
    def get_groq_api_key(self) -> str:
        """Get Groq API key from environment"""
        api_key = os.getenv("GROQ_API_KEY")
        if not api_key:
            print("⚠ Warning: GROQ_API_KEY environment variable is not set")
            return "dummy_key"  # Return dummy key to prevent crashes
        return api_key
    
    def get_database_url(self) -> str:
        """Get database connection URL"""
        return f"sqlite:///{self.database.chroma_persist_dir}/chroma.db"
    
    def get_static_url(self) -> str:
        """Get static files URL"""
        return "/static"
    
    def get_templates_dir(self) -> str:
        """Get templates directory"""
        return str(self.project_root / "src" / "templates")
    
    def get_static_dir(self) -> str:
        """Get static files directory"""
        return str(self.project_root / "src" / "static")
    
    def get_upload_dir(self) -> str:
        """Get upload directory"""
        return self.upload.upload_directory
    
    def is_development(self) -> bool:
        """Check if running in development mode"""
        return os.getenv("ENVIRONMENT", "production").lower() == "development"
    
    def is_production(self) -> bool:
        """Check if running in production mode"""
        return not self.is_development()
    
    def __str__(self) -> str:
        """String representation of settings"""
        return f"ResearchMate Settings (Config: {self.config_file})"
    
    def __repr__(self) -> str:
        """Detailed representation of settings"""
        return f"Settings(config_file='{self.config_file}')"

# Global settings instance
settings = Settings()

# Convenience functions
def get_settings() -> Settings:
    """Get the global settings instance"""
    return settings

def reload_settings():
    """Reload settings from configuration file"""
    global settings
    settings = Settings(settings.config_file)

def create_default_config():
    """Create a default configuration file"""
    default_settings = Settings()
    default_settings.save_config()
    return default_settings.config_file

if __name__ == "__main__":
    # Test the settings
    print("ResearchMate Settings Test")
    print("=" * 40)
    
    try:
        settings = get_settings()
        print(f"βœ“ Settings loaded successfully")
        print(f"Config file: {settings.config_file}")
        print(f"Server: {settings.server.host}:{settings.server.port}")
        print(f"AI Model: {settings.ai_model.model_name}")
        print(f"Database: {settings.database.chroma_persist_dir}")
        print(f"Upload dir: {settings.get_upload_dir()}")
        print(f"Groq API Key: {'Set' if settings.get_groq_api_key() else 'Not set'}")
        print(f"Environment: {'Development' if settings.is_development() else 'Production'}")
        
        # Save configuration
        settings.save_config()
        
    except Exception as e:
        print(f"❌ Error: {e}")
        import traceback
        traceback.print_exc()