{ "report_every": 100, "tensorboard": true, "tgt_vocab_size": 20000, "tensorboard_log_dir": "tensorboard", "tensorboard_log_dir_dated": "tensorboard/Mar-16_23-09-35", "tgt_vocab": "en.eole.vocab", "valid_metrics": [ "BLEU" ], "vocab_size_multiple": 8, "src_vocab": "de.eole.vocab", "src_vocab_size": 20000, "transforms": [ "sentencepiece", "filtertoolong" ], "save_data": "de-en/data", "share_vocab": false, "n_sample": 0, "overwrite": true, "seed": 1234, "training": { "num_workers": 0, "normalization": "tokens", "learning_rate": 2.0, "bucket_size": 128000, "train_steps": 200000, "world_size": 1, "accum_count": [ 16 ], "param_init_method": "xavier_uniform", "max_grad_norm": 2.0, "optim": "pagedadamw8bit", "decay_method": "noam", "batch_size_multiple": 8, "gpu_ranks": [ 0 ], "label_smoothing": 0.1, "warmup_steps": 5000, "adam_beta2": 0.998, "batch_type": "tokens", "dropout": [ 0.1 ], "accum_steps": [ 0 ], "prefetch_factor": 32, "batch_size": 8192, "average_decay": 0.0001, "save_checkpoint_steps": 1000, "valid_batch_size": 8192, "model_path": "model", "valid_steps": 1000, "train_from": "model", "attention_dropout": [ 0.0 ], "dropout_steps": [ 0 ], "keep_checkpoint": 4, "compute_dtype": "torch.float16" }, "model": { "add_estimator": false, "mlp_activation_fn": "gelu", "share_embeddings": false, "norm_eps": 1e-06, "transformer_ff": 4096, "heads": 8, "share_decoder_embeddings": true, "layer_norm": "standard", "add_qkvbias": false, "hidden_size": 1024, "add_ffnbias": true, "architecture": "transformer", "position_encoding_type": "SinusoidalInterleaved", "embeddings": { "word_vec_size": 1024, "tgt_word_vec_size": 1024, "position_encoding_type": "SinusoidalInterleaved", "src_word_vec_size": 1024 }, "encoder": { "encoder_type": "transformer", "heads": 8, "layer_norm": "standard", "hidden_size": 1024, "add_qkvbias": false, "mlp_activation_fn": "gelu", "add_ffnbias": true, "n_positions": null, "norm_eps": 1e-06, "layers": 8, "src_word_vec_size": 1024, "transformer_ff": 4096, "position_encoding_type": "SinusoidalInterleaved" }, "decoder": { "layer_norm": "standard", "heads": 8, "add_qkvbias": false, "hidden_size": 1024, "decoder_type": "transformer", "mlp_activation_fn": "gelu", "tgt_word_vec_size": 1024, "add_ffnbias": true, "n_positions": null, "norm_eps": 1e-06, "layers": 2, "transformer_ff": 4096, "position_encoding_type": "SinusoidalInterleaved" } }, "transforms_configs": { "sentencepiece": { "src_subword_model": "${MODEL_PATH}/de.spm.model", "tgt_subword_model": "${MODEL_PATH}/en.spm.model" }, "filtertoolong": { "src_seq_length": 256, "tgt_seq_length": 256 } }, "data": { "corpus_1": { "transforms": [ "sentencepiece", "filtertoolong" ], "path_tgt": "hf://quickmt/quickmt-train.de-en/en", "path_src": "hf://quickmt/quickmt-train.de-en/de", "path_sco": "hf://quickmt/quickmt-train.de-en/sco", "path_align": null }, "valid": { "transforms": [ "sentencepiece", "filtertoolong" ], "path_src": "dev.de", "path_tgt": "dev.en", "path_align": null } } }