Nezha中文预训练模型

from transformers import AutoTokenizer, AutoModelForMaskedLM

tokenizer = AutoTokenizer.from_pretrained("peterchou/nezha-chinese-base")
from transformers import AutoModel
nezha=AutoModel.from_pretrained("peterchou/nezha-chinese-base")
model = AutoModelForMaskedLM.from_pretrained("peterchou/nezha-chinese-base")
text='知识能够命运,知识能够创造财富'
tokens=tokenizer.tokenize(text)
print(tokens)
inputs=tokenizer(text,return_tensors="pt")
print(inputs)
out=model(inputs['input_ids'],inputs['token_type_ids'],inputs['attention_mask'])
print(out)
out2=nezha(inputs['input_ids'],inputs['token_type_ids'],inputs['attention_mask'])
print(out2)
print(out2.keys())
print(type(out2['last_hidden_state']))
print(out2['last_hidden_state'].shape)
peterchou/nezha-chinese-base and are newly initialized: ['bert.embeddings.position_embeddings.weight']
You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.
['知', '识', '能', '够', '命', '运', ',', '知', '识', '能', '够', '创', '造', '财', '富']
{'input_ids': tensor([[ 101, 4761, 6399, 5543, 1916, 1462, 6817, 8024, 4761, 6399, 5543, 1916,
         1158, 6863, 6568, 2168,  102]]), 'token_type_ids': tensor([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])}
MaskedLMOutput(loss=None, logits=tensor([[[-1.5926, -1.6417, -2.1210,  ..., -1.5677, -1.1762, -1.6904],
         [-1.5646, -1.6443, -2.0643,  ..., -1.5692, -1.1416, -1.6427],
         [-1.5799, -1.6389, -2.0780,  ..., -1.5667, -1.1474, -1.6620],
         ...,
         [-1.5501, -1.6078, -2.0719,  ..., -1.5519, -1.1284, -1.6379],
         [-1.5677, -1.6274, -2.0926,  ..., -1.5630, -1.1349, -1.6565],
         [-1.5882, -1.6440, -2.1055,  ..., -1.5621, -1.1654, -1.6847]]],
       grad_fn=<AddBackward0>), hidden_states=None, attentions=None)
BaseModelOutputWithPoolingAndCrossAttentions(last_hidden_state=tensor([[[-0.1913, -0.0510,  0.0123,  ..., -0.1211, -0.3183,  0.0061],
         [-0.2017, -0.0358,  0.0159,  ..., -0.1151, -0.3159,  0.0125],
         [-0.1941, -0.0410,  0.0204,  ..., -0.1216, -0.3184,  0.0205],
         ...,
         [-0.1967, -0.0466,  0.0125,  ..., -0.1130, -0.3168,  0.0060],
         [-0.1984, -0.0444,  0.0142,  ..., -0.1152, -0.3206,  0.0107],
         [-0.1933, -0.0509,  0.0115,  ..., -0.1216, -0.3160,  0.0046]]],
       grad_fn=<NativeLayerNormBackward0>), pooler_output=tensor([[-5.1029e-02, -1.6966e-01,  6.4870e-03, -7.7081e-03, -1.0803e-01,
          7.2312e-04,  3.3482e-01, -9.7116e-04,  8.8355e-03,  4.1585e-01,
         -3.0439e-02, -1.5613e-03, -1.1530e-02, -5.9002e-02,  4.3594e-04,
          5.4433e-04,  2.4863e-01, -1.5774e-03,  4.8588e-04,  5.9784e-03,
          7.9474e-01,  1.6567e-02,  1.0524e-01,  9.3665e-03, -2.7747e-01,
         -3.5255e-01, -2.3339e-01,  6.2010e-04,  3.8888e-01, -2.3269e-01,
         -7.0222e-04, -1.1843e-02,  1.1737e-01, -4.6992e-03,  1.2872e-01,
         -3.0624e-04, -4.9215e-03,  1.6829e-03, -4.0819e-04, -6.6047e-03,
          3.0268e-02,  8.3559e-01, -2.1231e-01, -3.5420e-01, -7.0375e-04,
          1.0399e-02,  6.7893e-02,  2.9910e-01, -4.1513e-04,  5.9959e-01,
         -8.3277e-04,  2.6870e-01,  6.2522e-04, -6.0011e-01, -3.1923e-03,
         -4.3258e-02, -8.3354e-04, -3.0177e-03,  6.2084e-03,  8.4210e-04,
         -1.5885e-01,  6.2507e-04, -6.9841e-04, -2.6274e-04, -1.1520e-01,
          8.3163e-03,  8.3690e-04, -1.2227e-02,  5.0720e-04, -3.2884e-04,
         -1.2752e-03, -1.1555e-02, -1.1233e-01,  2.2465e-01,  3.8176e-03,
         -2.1710e-03,  6.2145e-04,  1.1477e-04,  1.0075e-02,  6.7730e-03,
          9.3994e-03, -4.2671e-04,  4.6922e-01, -4.3835e-04, -1.2702e-02,
         -6.0991e-03, -5.4163e-03,  7.8747e-03,  6.0260e-04, -3.7140e-03,
          8.9774e-03,  1.1897e-02,  3.9476e-01,  1.4787e-03,  3.9973e-01,
          1.1530e-03, -4.7883e-01, -8.1434e-02, -1.3192e-01,  7.7904e-01,
          2.2454e-03, -1.3378e-01, -2.9282e-04, -2.3348e-04,  1.7897e-01,
          2.2990e-05, -5.3145e-01, -2.7983e-02, -4.3387e-03, -5.6722e-01,
         -3.3405e-01,  5.6651e-04,  1.1957e-02,  2.9677e-04, -4.3710e-01,
         -3.3572e-01, -1.4837e-01,  8.2434e-03, -1.2545e-02,  5.2055e-04,
          8.8633e-03,  8.4512e-05, -9.8315e-02,  1.0240e-03, -4.7940e-03,
         -8.1524e-03,  2.5969e-04,  7.0512e-03,  1.0873e-02, -1.8317e-01,
          7.8574e-03, -3.6337e-02,  2.4413e-01, -8.8991e-03, -3.0783e-02,
          9.8937e-04,  1.9211e-03,  8.8328e-04, -6.2195e-03,  1.1501e-02,
         -1.7287e-03,  3.8281e-04, -4.1748e-01,  1.4222e-02,  6.6453e-01,
          4.3714e-02,  4.9946e-01, -3.4545e-01,  4.0930e-03, -1.3628e-01,
          1.7160e-03,  8.9560e-03,  9.8256e-03, -6.4217e-03, -4.4301e-02,
          3.4593e-02,  1.2045e-02,  4.4183e-01, -9.0079e-03, -5.7060e-03,
          3.4323e-01,  3.7338e-02, -2.4785e-01, -8.4519e-02,  8.2782e-03,
         -1.0620e-02, -2.8561e-01, -8.4561e-03,  3.1417e-02, -2.8274e-01,
         -1.4464e-03, -5.4514e-05, -6.5956e-03, -8.5699e-01,  4.4661e-03,
         -1.1191e-02,  3.1446e-01,  9.5307e-04, -2.8998e-01, -6.7839e-03,
         -8.9906e-03,  1.7238e-03,  9.1010e-03,  6.3538e-04, -4.4476e-01,
          4.4514e-04,  5.2234e-01, -1.7166e-02,  3.1687e-01,  1.6149e-01,
          9.3662e-03,  8.0253e-04, -1.2632e-01, -1.3489e-03,  2.3852e-01,
         -2.3332e-04, -3.9004e-01, -1.4654e-03, -4.0551e-01, -4.9734e-05,
         -8.6607e-03, -9.8365e-03,  5.6043e-05, -2.4450e-04,  8.8161e-04,
          2.1533e-04,  2.6926e-01,  1.1386e-02,  9.1041e-04, -7.6836e-03,
         -6.3194e-04, -6.8669e-03,  7.4586e-03, -3.3885e-01, -4.4196e-01,
          8.1551e-03,  2.8300e-01, -3.8932e-03, -1.9660e-01, -1.7340e-01,
         -9.4308e-02, -5.9331e-04, -6.1451e-04, -3.6296e-02, -2.1179e-01,
          2.7499e-03, -8.7936e-03,  2.3282e-01, -4.1670e-01, -1.1626e-03,
          8.2756e-03,  3.6571e-06,  3.3993e-02, -3.7473e-03, -1.5424e-01,
          2.7936e-03,  8.8693e-04,  9.1645e-03,  6.1739e-04, -5.5687e-02,
          6.9225e-04, -8.7579e-03,  2.8067e-01, -3.1655e-03,  1.7089e-01,
          6.1631e-04,  7.5193e-05,  5.2853e-04, -1.9561e-03,  2.3191e-01,
         -4.0998e-01,  5.7983e-03,  8.7699e-04,  3.6149e-03, -3.4781e-04,
          5.0373e-01, -3.2783e-01, -2.1776e-01, -2.0652e-01, -2.8977e-03,
         -4.2094e-01, -7.0811e-01, -8.3726e-04,  4.8477e-04, -2.3455e-03,
         -4.5564e-03,  7.7907e-02, -6.3200e-03,  7.4007e-04, -5.5619e-03,
          5.6123e-03,  9.6769e-05, -2.8887e-03, -9.6143e-02,  3.4101e-01,
         -1.1187e-02,  4.5737e-01,  6.1995e-04,  3.4062e-04,  2.5164e-04,
          1.0440e-02, -3.9488e-01,  1.2606e-02,  3.1740e-02,  1.0401e-02,
          2.0311e-04, -7.7223e-05, -9.9816e-03,  9.0558e-03, -3.1412e-02,
          4.4180e-04,  4.8466e-02, -5.6224e-01,  4.9986e-04, -1.8775e-02,
          8.4066e-03, -2.0421e-01,  8.1857e-03,  9.4255e-02,  1.3565e-02,
         -2.3419e-04,  6.1787e-03,  9.1697e-04,  3.5826e-02,  9.4323e-04,
         -7.3501e-03, -4.6027e-02, -9.9166e-05, -9.6815e-03, -1.6210e-02,
         -1.0375e-02,  1.4454e-03,  1.2649e-01,  6.4321e-03, -1.8174e-01,
         -2.1774e-01, -3.2312e-01,  9.1127e-04, -9.4597e-02,  4.2153e-02,
          2.1828e-01,  1.1447e-03,  2.1295e-01,  4.9865e-01,  1.7826e-04,
         -1.2723e-03, -4.1016e-01,  1.0868e-03,  4.8855e-04,  3.4450e-01,
          7.0503e-04,  1.4067e-02, -1.0058e-03,  8.6671e-03, -4.3863e-03,
         -1.8769e-01,  3.0665e-01, -6.7084e-03,  2.5558e-05,  1.4885e-02,
         -4.1675e-04, -1.9737e-02, -8.2833e-03, -4.8724e-01, -1.1312e-03,
          3.1511e-03,  7.9780e-03, -3.8805e-02, -2.1308e-01, -3.4526e-03,
          3.4465e-01, -9.8737e-03,  3.1312e-01, -5.7533e-01, -2.2543e-03,
         -1.6487e-03, -4.3220e-04,  1.0621e-02,  2.4811e-02, -6.3967e-04,
          7.3052e-02, -3.2375e-01,  7.8260e-02,  6.5623e-04,  8.3054e-04,
          4.4566e-02, -2.9160e-04, -1.7982e-03,  4.7913e-03,  2.8718e-04,
          6.9763e-04, -6.0926e-04, -9.8733e-03, -7.6052e-04, -1.3002e-01,
         -2.3664e-01, -1.2027e-01,  1.1355e-04,  9.3792e-04, -1.5537e-03,
          1.2176e-02, -2.1243e-02, -3.7451e-01,  7.6080e-03,  2.5256e-03,
          3.5139e-01, -2.8592e-01,  1.6265e-03, -3.6388e-03, -6.7288e-03,
          3.4583e-01,  3.9162e-04, -1.9681e-03, -1.3231e-02, -2.4712e-02,
         -8.4445e-02,  1.0328e-02, -1.4612e-04,  1.6121e-01, -2.2219e-03,
         -7.9241e-03, -4.7988e-01,  8.8807e-03,  1.8361e-02, -1.9620e-01,
         -9.1161e-04, -2.6175e-02,  5.9223e-01, -1.0952e-01,  6.8576e-03,
          4.1844e-01, -1.2716e-01,  1.7043e-03,  9.3017e-03,  9.4992e-05,
         -4.5937e-02,  4.5851e-02,  5.1522e-01,  9.9027e-03,  1.2249e-01,
         -6.3370e-04,  7.2479e-04,  4.9273e-04, -1.0282e-02,  8.1039e-03,
          3.4553e-01, -6.9342e-02,  1.4222e-02,  1.1507e-01, -4.1564e-04,
         -1.7145e-01,  1.1483e-03,  7.0782e-01, -5.4101e-04, -1.2003e-02,
         -3.3010e-01, -1.5098e-04, -1.8980e-03, -2.2647e-01, -2.7264e-04,
         -3.4296e-01, -1.0119e-02, -5.9058e-01,  9.6881e-03, -1.5814e-04,
          4.0720e-01,  2.2149e-02,  3.0984e-04, -1.1243e-02, -1.1986e-02,
         -2.9267e-02,  4.9154e-04,  6.2062e-03,  1.0542e-03, -1.8985e-02,
          9.7887e-03,  2.0775e-03, -4.8728e-04, -3.6318e-01, -2.3336e-01,
          4.4117e-03,  2.8083e-02, -3.4356e-01, -6.0119e-04,  9.6520e-02,
         -1.3653e-02,  6.4200e-04,  1.8982e-01, -3.8512e-01,  4.3891e-04,
         -9.5859e-03,  3.8321e-03, -1.1774e-02, -6.1084e-02, -3.4491e-02,
         -5.9879e-02, -4.4396e-03,  4.3777e-01, -2.4149e-02,  6.2134e-04,
         -1.1332e-02, -1.0974e-02, -6.9315e-04, -2.5751e-03,  5.0493e-01,
          4.5255e-01, -5.9964e-04,  3.5678e-02, -1.0604e-02,  4.1083e-01,
          4.7627e-03,  1.3406e-01,  3.5403e-01,  2.5516e-03, -8.9925e-04,
         -7.1017e-02, -1.1486e-02,  1.2188e-02,  3.9377e-01,  2.6515e-04,
         -4.6200e-02, -1.6250e-03,  3.2648e-04, -9.6110e-03,  1.7935e-02,
          8.5781e-03,  1.2467e-04,  3.4929e-01, -1.1930e-03,  7.9480e-03,
         -6.8504e-03,  7.2814e-04, -9.8495e-02, -8.1816e-03,  1.0369e-02,
         -6.4301e-02,  6.0406e-04, -1.0740e-02,  3.1351e-01, -8.1188e-04,
          6.4491e-04,  8.2162e-04,  3.1652e-01,  1.9464e-01, -3.7527e-03,
          1.0603e-02, -5.0650e-03, -2.0726e-04, -1.6830e-03,  6.2950e-04,
          3.6911e-01, -3.7772e-03,  3.7588e-01, -1.2122e-03,  1.3955e-02,
          8.9212e-04, -1.6115e-02,  5.9378e-04,  5.4866e-01, -7.6594e-04,
          1.4055e-01, -1.7929e-02,  5.0845e-01, -1.4716e-03, -2.8486e-03,
          5.3367e-02,  4.5579e-03,  1.2195e-03, -4.5206e-03, -1.1514e-03,
          6.2013e-01,  8.9717e-03, -1.9220e-03, -1.0324e-02,  1.6261e-01,
          5.0904e-01,  4.7320e-04,  5.0617e-01,  3.8987e-03,  3.7471e-04,
         -7.7339e-03, -3.2243e-02,  2.6857e-01, -1.7030e-01, -1.3846e-01,
          7.7617e-04,  1.1035e-02,  2.7923e-01,  1.0507e-03, -8.9209e-03,
         -1.1088e-02,  8.4190e-03,  6.6923e-01, -4.6453e-04, -1.1021e-02,
          1.9103e-02, -1.0246e-04, -9.7585e-03,  4.7209e-01, -8.0939e-01,
          8.3452e-03, -3.7610e-04,  5.6465e-03, -1.0598e-02,  3.2162e-01,
         -2.1630e-04,  1.1493e-03,  2.3755e-05,  2.7947e-04,  7.6902e-03,
          1.4392e-02, -4.2042e-04, -1.3504e-02,  2.4362e-01,  7.8663e-03,
          1.6250e-01, -1.1735e-03,  2.5068e-03, -3.4205e-01,  1.0518e-01,
         -1.4692e-02, -1.5355e-03,  9.0433e-03,  7.0169e-04,  1.9775e-02,
         -4.9771e-02, -5.0980e-02, -4.7841e-01, -1.1275e-02, -2.1160e-01,
          2.9870e-01,  1.3427e-02, -3.7314e-02,  1.4522e-02, -4.3206e-02,
          1.3477e-02,  4.1396e-04, -9.5216e-03,  8.3291e-02,  2.3300e-04,
          5.0016e-01, -9.7559e-02, -4.1975e-01,  1.5092e-01,  1.6560e-03,
         -4.2606e-01, -8.1320e-03, -4.4904e-01, -6.2968e-05, -1.6768e-03,
         -4.9189e-02, -4.0009e-01, -1.1999e-04,  5.6322e-05,  4.2896e-01,
          1.7864e-01,  1.0448e-01, -1.0171e-04, -8.9524e-03,  1.2068e-02,
         -3.4177e-03,  1.3030e-03, -6.7867e-04, -3.6601e-03, -2.7148e-04,
         -1.3227e-02, -1.5156e-04,  1.1793e-03,  1.4107e-02,  3.1849e-02,
          1.9121e-03, -1.7778e-01,  3.0174e-04, -2.2365e-01, -5.2552e-01,
          2.5956e-01, -1.7626e-01, -4.5957e-02,  6.0495e-03, -3.9370e-04,
          7.8750e-04, -1.8855e-01, -5.1356e-04,  6.2647e-02,  1.0776e-01,
          9.1292e-03, -1.0974e-01,  5.8058e-04,  6.6234e-03, -1.8747e-01,
         -3.8403e-01,  3.6653e-01, -1.2749e-01,  8.3660e-03,  8.3055e-03,
         -3.0425e-01,  6.4011e-03,  1.1528e-02,  3.2256e-03, -2.6079e-03,
          9.1212e-04, -4.2959e-01,  3.2193e-01, -1.7376e-02, -1.2714e-02,
         -1.1279e-02,  9.0377e-02,  3.9651e-02,  5.6007e-03, -1.0877e-02,
          3.3484e-01, -4.8387e-03,  4.7019e-01,  2.7866e-01,  7.2845e-04,
         -7.6336e-01,  4.7871e-02,  6.4731e-04, -4.1836e-01, -3.6133e-03,
         -3.8399e-01,  1.5357e-01,  3.5179e-04, -9.3747e-03, -4.4888e-02,
          3.0433e-01,  2.4880e-03,  6.5122e-04, -1.0332e-02, -4.6579e-04,
          1.4631e-01,  8.8613e-03, -2.0261e-03, -1.1427e-03, -5.6856e-02,
         -2.3220e-04,  1.3819e-02,  1.1516e-02,  5.3917e-04,  8.7263e-03,
          1.4934e-01,  3.2466e-01, -3.4746e-01, -1.6781e-04, -3.1168e-01,
          3.8333e-03, -1.6643e-03,  5.9037e-03, -2.7703e-01,  1.2811e-03,
          8.7075e-02,  8.7331e-01, -1.8981e-01, -5.6090e-03, -4.5516e-03,
          9.7162e-04, -3.7087e-02,  1.5527e-01,  2.3867e-03,  2.4515e-06,
         -2.9242e-04, -5.5415e-04,  7.9066e-03, -1.4177e-03, -3.7298e-03,
         -8.2095e-02, -1.2691e-03, -1.7151e-01, -2.4832e-03, -6.1433e-01,
          2.4151e-03, -4.5647e-01,  8.9716e-03, -5.4747e-04, -1.2314e-02,
          8.5324e-03,  1.5137e-02, -3.1977e-01, -3.8588e-01,  5.5873e-05,
         -1.0873e-02, -2.6256e-01,  1.4627e-01, -8.8445e-03,  2.9819e-01,
         -1.2051e-01,  3.2118e-04, -9.7171e-04]], grad_fn=<TanhBackward0>), hidden_states=None, past_key_values=None, attentions=None, cross_attentions=None)
odict_keys(['last_hidden_state', 'pooler_output'])
<class 'torch.Tensor'>
torch.Size([1, 17, 768])
  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

YingJingh

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值