value = self.tokenizer.convert_tokens_to_string(lex_tokens) File "/opt/conda/envs/mttod/lib/python3.10/site-packages/transformers/models/t5/tokenization_t5.py", line 425, in convert_tokens_to_string tokens[0] = tokens[0].lstrip(SPIECE_UNDERLINE) ...