Upload config.json with huggingface_hub
Browse files- config.json +3 -2
    	
        config.json
    CHANGED
    
    | @@ -1,5 +1,5 @@ | |
| 1 | 
             
            {
         | 
| 2 | 
            -
              "_name_or_path": " | 
| 3 | 
             
              "activation_function": "gelu",
         | 
| 4 | 
             
              "architectures": [
         | 
| 5 | 
             
                "GPT2LMHeadModel"
         | 
| @@ -30,5 +30,6 @@ | |
| 30 | 
             
              "torch_dtype": "float32",
         | 
| 31 | 
             
              "transformers_version": "4.18.0",
         | 
| 32 | 
             
              "use_cache": true,
         | 
| 33 | 
            -
              "vocab_size": 51200
         | 
|  | |
| 34 | 
             
            }
         | 
|  | |
| 1 | 
             
            {
         | 
| 2 | 
            +
              "_name_or_path": "gpt_small_config.json",
         | 
| 3 | 
             
              "activation_function": "gelu",
         | 
| 4 | 
             
              "architectures": [
         | 
| 5 | 
             
                "GPT2LMHeadModel"
         | 
|  | |
| 30 | 
             
              "torch_dtype": "float32",
         | 
| 31 | 
             
              "transformers_version": "4.18.0",
         | 
| 32 | 
             
              "use_cache": true,
         | 
| 33 | 
            +
              "vocab_size": 51200,
         | 
| 34 | 
            +
              "prefix": "[CLS]"
         | 
| 35 | 
             
            }
         | 
