bert 模型
import tensorflow_hub as hub
import tensorflow as tf
!pip install tensorflow_text
import tensorflow_text as text
'''bert_model_hub='https://hub.tensorflow.google.cn/tensorflow/bert_zh_L-12_H-768_A-12/2'''
bert_model_hub='https://hub.tensorflow.google.cn/google/experts/bert/wiki_books/mnli/2'
max_length=100
text_test=['这个电影不是很好看']
bert_preprocess_model='https://tfhub.dev/tensorflow/bert_en_uncased_preprocess/1'
bert_preprocess_model=hub.KerasLayer(bert_preprocess_model)
text_preprocessed=bert_preprocess_model(text_test)
bert_model=hub.KerasLayer(bert_model_hub)
encoder=bert_model(text_preprocessed)
#encoder是一个字典,包括sequence、pooler out
tansformer _pytorch 中用法
from transformers import AutoTokenizer, AutoModel
tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased")
model = AutoModel.from_pretrained("bert-base-uncased")
inputs = tokenizer("Hello world!", return_tensors="pt")
outputs = model(**inputs)
print(inputs)
print(outputs.keys())
pooler_output=outputs['pooler_output']
print(pooler_output.shape)
在tensorflow中用法
from transformers import BertTokenizer, TFBertModel
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
model = TFBertModel.from_pretrained("bert-base-uncased")
text = "我喜欢你"
encoded_input = tokenizer(text, return_tensors='tf')
output = model(encoded_input)