model: target: models.Tiffusion.tiffusion.Tiffusion params: seq_length: 240 feature_size: 3 n_layer_enc: 6 n_layer_dec: 4 d_model: 128 # 4 X 16 timesteps: 500 # diffusion timesteps sampling_timesteps: 500 loss_type: 'l1' beta_schedule: 'cosine' n_heads: 8 mlp_hidden_times: 4 attn_pd: 0.0 resid_pd: 0.0 kernel_size: 1 padding_size: 0 control_signal: [] # - classifier-based-sum-control # - classifier-free-sum-control # - range-wise-peak-control solver: base_lr: 2.0e-5 max_epochs: 2230 # 11150 results_folder: ../../../data/ckpt_baseline gradient_accumulate_every: 2 save_cycle: 223 # 1115 # max_epochs // 5 ema: decay: 0.995 update_interval: 10 scheduler: target: engine.lr_sch.ReduceLROnPlateauWithWarmup params: factor: 0.65 patience: 200 min_lr: 1.0e-5 threshold: 1.0e-1 threshold_mode: rel warmup_lr: 8.0e-4 warmup: 100 verbose: False dataloader: train_dataset: target: utils.data_utils.real_datasets.RevenueDataset params: name: revenue proportion: 0.8 # Set to rate < 1 if training conditional generation # data_root: ./Data/datasets/stock_data.csv data_root: ../../../data/daily.csv window: 240 # seq_length save2npy: True neg_one_to_one: True seed: 2024 period: train test_dataset: target: utils.data_utils.real_datasets.RevenueDataset params: name: revenue proportion: 0.8 # rate data_root: ../../../data/daily.csv window: 240 # seq_length save2npy: True neg_one_to_one: True seed: 2024 period: test style: separate # distribution: geometric distribution: uniform missing_ratio: 0.5 coefficient: 1.0e-2 step_size: 5.0e-2 sampling_steps: 500 batch_size: 64 sample_size: 256 shuffle: True