{ "tokenizer_class": "GPT2Tokenizer", "bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "unk_token": "<|endoftext|>", "pad_token": null, "model_max_length": 1024 }