Upload tokenizer
Browse files- special_tokens_map.json +1 -7
- tokenizer.json +1 -1
- tokenizer_config.json +1 -1
    	
        special_tokens_map.json
    CHANGED
    
    | @@ -13,13 +13,7 @@ | |
| 13 | 
             
                "rstrip": false,
         | 
| 14 | 
             
                "single_word": false
         | 
| 15 | 
             
              },
         | 
| 16 | 
            -
              "pad_token":  | 
| 17 | 
            -
                "content": "<|placeholder6|>",
         | 
| 18 | 
            -
                "lstrip": false,
         | 
| 19 | 
            -
                "normalized": false,
         | 
| 20 | 
            -
                "rstrip": true,
         | 
| 21 | 
            -
                "single_word": false
         | 
| 22 | 
            -
              },
         | 
| 23 | 
             
              "unk_token": {
         | 
| 24 | 
             
                "content": "<unk>",
         | 
| 25 | 
             
                "lstrip": false,
         | 
|  | |
| 13 | 
             
                "rstrip": false,
         | 
| 14 | 
             
                "single_word": false
         | 
| 15 | 
             
              },
         | 
| 16 | 
            +
              "pad_token": "<|placeholder6|>",
         | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
| 17 | 
             
              "unk_token": {
         | 
| 18 | 
             
                "content": "<unk>",
         | 
| 19 | 
             
                "lstrip": false,
         | 
    	
        tokenizer.json
    CHANGED
    
    | @@ -116,7 +116,7 @@ | |
| 116 | 
             
                  "content": "<|placeholder6|>",
         | 
| 117 | 
             
                  "single_word": false,
         | 
| 118 | 
             
                  "lstrip": false,
         | 
| 119 | 
            -
                  "rstrip":  | 
| 120 | 
             
                  "normalized": false,
         | 
| 121 | 
             
                  "special": true
         | 
| 122 | 
             
                },
         | 
|  | |
| 116 | 
             
                  "content": "<|placeholder6|>",
         | 
| 117 | 
             
                  "single_word": false,
         | 
| 118 | 
             
                  "lstrip": false,
         | 
| 119 | 
            +
                  "rstrip": false,
         | 
| 120 | 
             
                  "normalized": false,
         | 
| 121 | 
             
                  "special": true
         | 
| 122 | 
             
                },
         | 
    	
        tokenizer_config.json
    CHANGED
    
    | @@ -103,7 +103,7 @@ | |
| 103 | 
             
                  "content": "<|placeholder6|>",
         | 
| 104 | 
             
                  "lstrip": false,
         | 
| 105 | 
             
                  "normalized": false,
         | 
| 106 | 
            -
                  "rstrip":  | 
| 107 | 
             
                  "single_word": false,
         | 
| 108 | 
             
                  "special": true
         | 
| 109 | 
             
                },
         | 
|  | |
| 103 | 
             
                  "content": "<|placeholder6|>",
         | 
| 104 | 
             
                  "lstrip": false,
         | 
| 105 | 
             
                  "normalized": false,
         | 
| 106 | 
            +
                  "rstrip": false,
         | 
| 107 | 
             
                  "single_word": false,
         | 
| 108 | 
             
                  "special": true
         | 
| 109 | 
             
                },
         | 

