def conver_example(example,tokenizer):
tokenized=tokenizer(example["text"])
tokenized["label"]=[example["label"]]
return tokenized
from functools import partial
trans_func=partial(conver_example,tokenizer)
train_ds.map(trans_func)
train_ds[0]
trans_func=partial(conver_example,tokenizer)改写为trans_func=partial(conver_example,tokenizer=tokenizer)