Update for latest transformers, add modeling_bert.py:116:
if name in ['output_weights', 'output_bias']:
name = 'classifier/' + name
and convert_bert_original_tf_checkpoint_to_pytorch.py
config.num_labels = 1089 # 微调的标签个数,由自身微调的bert决定
print(f"Building PyTorch model from configuration: {config}")
model = BertForSequenceClassification(config)