Update tokenizer_config.json
Browse files- tokenizer_config.json +2 -1
tokenizer_config.json
CHANGED
@@ -852,5 +852,6 @@
|
|
852 |
"clean_up_tokenization_spaces": false,
|
853 |
"extra_special_tokens": {},
|
854 |
"model_max_length": 1000000000000000019884624838656,
|
855 |
-
"tokenizer_class": "PreTrainedTokenizerFast"
|
|
|
856 |
}
|
|
|
852 |
"clean_up_tokenization_spaces": false,
|
853 |
"extra_special_tokens": {},
|
854 |
"model_max_length": 1000000000000000019884624838656,
|
855 |
+
"tokenizer_class": "PreTrainedTokenizerFast",
|
856 |
+
"model_input_names": ["input_ids", "attention_mask"]
|
857 |
}
|