| { |
| "module": "keras_hub.src.models.gemma.gemma_causal_lm", |
| "class_name": "GemmaCausalLM", |
| "config": { |
| "backbone": { |
| "module": "keras_hub.src.models.gemma.gemma_backbone", |
| "class_name": "GemmaBackbone", |
| "config": { |
| "name": "gemma_backbone", |
| "trainable": true, |
| "vocabulary_size": 256000, |
| "num_layers": 26, |
| "num_query_heads": 8, |
| "num_key_value_heads": 4, |
| "hidden_dim": 2304, |
| "intermediate_dim": 18432, |
| "head_dim": 256, |
| "layer_norm_epsilon": 1e-06, |
| "dropout": 0, |
| "query_head_dim_normalize": true, |
| "use_post_ffw_norm": true, |
| "use_post_attention_norm": true, |
| "final_logit_soft_cap": 30.0, |
| "attention_logit_soft_cap": 50.0, |
| "sliding_window_size": 4096, |
| "use_sliding_window_attention": true |
| }, |
| "registered_name": "keras_hub>GemmaBackbone" |
| }, |
| "preprocessor": { |
| "module": "keras_hub.src.models.gemma.gemma_causal_lm_preprocessor", |
| "class_name": "GemmaCausalLMPreprocessor", |
| "config": { |
| "name": "gemma_causal_lm_preprocessor", |
| "trainable": true, |
| "dtype": { |
| "module": "keras", |
| "class_name": "DTypePolicy", |
| "config": { |
| "name": "float32" |
| }, |
| "registered_name": null |
| }, |
| "tokenizer": { |
| "module": "keras_hub.src.models.gemma.gemma_tokenizer", |
| "class_name": "GemmaTokenizer", |
| "config": { |
| "name": "gemma_tokenizer", |
| "trainable": true, |
| "dtype": { |
| "module": "keras", |
| "class_name": "DTypePolicy", |
| "config": { |
| "name": "int32" |
| }, |
| "registered_name": null |
| }, |
| "config_file": "tokenizer.json", |
| "proto": null, |
| "sequence_length": null, |
| "add_bos": false, |
| "add_eos": false |
| }, |
| "registered_name": "keras_hub>GemmaTokenizer" |
| }, |
| "config_file": "preprocessor.json", |
| "sequence_length": 64, |
| "add_start_token": true, |
| "add_end_token": true |
| }, |
| "registered_name": "keras_hub>GemmaCausalLMPreprocessor" |
| }, |
| "name": "gemma_causal_lm" |
| }, |
| "registered_name": "keras_hub>GemmaCausalLM" |
| } |