{ "backend": "tokenizers", "clean_up_tokenization_spaces": false, "do_lower_case": false, "eos_token": "<|endoftext|>", "is_local": false, "model_max_length": 128000, "pad_token": "<|endoftext|>", "padding_side": "left", "remove_space": false, "tokenizer_class": "TokenizersBackend" }