model: name: "OrcaleSeek" version: "1.0.0" type: "text-classification" architecture: "transformer" training: learning_rate: 2e-5 batch_size: 16 max_sequence_length: 512 num_epochs: 5 inference: max_length: 128 temperature: 0.7 top_p: 0.9 do_sample: true deployment: supported_frameworks: ["pytorch", "onnx"] min_memory_gb: 4