Spaces:
Sleeping
Sleeping
| import os | |
| from transformers import AutoTokenizer, T5Tokenizer | |
| os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" | |
| CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) | |
| TOKENIZER_DIR = os.path.join(CURRENT_DIR, "ChatYuan-large-v2") | |
| # tokenizer = AutoTokenizer.from_pretrained(TOKENIZER_DIR) | |
| tokenizer = T5Tokenizer.from_pretrained(TOKENIZER_DIR) | |