Spaces:
Sleeping
Sleeping
| from transformers import AutoTokenizer, AutoModelForMaskedLM | |
| tokenizer = AutoTokenizer.from_pretrained("bert-base-chinese") | |
| tokens = tokenizer.tokenize("明天搜索") | |
| print(tokens) |