testFastAPI / src /hf2.py
KhairulAmirinUM's picture
i hate hf
ae4c85e
raw
history blame contribute delete
408 Bytes
from transformers import BertTokenizer, BertForSequenceClassification,TextClassificationPipeline, AutoModelForSequenceClassification
# Load tokenizer and model from the fine-tuned directory
model_path = 'intent_classification/TinyBERT_106_V2' # can try other checkpoints
tokenizer = BertTokenizer.from_pretrained('huawei-noah/TinyBERT_General_4L_312D')
tokenizer.save_pretrained(model_path)
print('finish')