File size: 854 Bytes
89c5c85
 
be902cc
 
d3f13d2
be902cc
d3f13d2
1985a15
be902cc
351e092
be902cc
6d050fd
 
d3f13d2
 
 
 
 
 
 
 
 
 
be902cc
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import os

# --- Paths ---
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
DATA_PATH = os.path.join(BASE_DIR, "data", "synthetic_transactions_samples_5000.csv")

MODEL_SAVE_DIR = os.path.join(BASE_DIR, "models")
LABEL_ENCODERS_PATH = os.path.join(MODEL_SAVE_DIR, "label_encoders.pkl")
TFIDF_VECTORIZER_PATH = os.path.join(MODEL_SAVE_DIR, "tfidf_vectorizer.pkl")
MODEL_PATH = os.path.join(MODEL_SAVE_DIR, "lgbm_model.pkl")
PREDICTIONS_SAVE_DIR = os.path.join(BASE_DIR, "predictions")

# --- Columns ---
TEXT_COLUMN = "Sanction_Context"
LABEL_COLUMNS = [
    "Red_Flag_Reason",
    "Maker_Action",
    "Escalation_Level",
    "Risk_Category",
    "Risk_Drivers",
    "Investigation_Outcome"
]

# --- TF-IDF Settings ---
TFIDF_MAX_FEATURES = 5000
NGRAM_RANGE = (1, 2)
USE_STOPWORDS = True

# --- Train/Test Split ---
RANDOM_STATE = 42
TEST_SIZE = 0.2