pytorch输入tensor看对应的输出数值

pytorch输入相应的数值,查看对应的输出数值与tensorflow类似,这里放入一段pytorch由输入计算对应的输出内容的过程。
比如如下的代码内容

# coding:utf-8
import os
import pickle

import torch
import random
import warnings
import numpy as np
import pandas as pd
from tqdm import tqdm
from typing import List, Tuple, Dict
from collections import defaultdict
from torch.utils.data import Dataset

from transformers import (
    BertTokenizer,
    DataCollatorForLanguageModeling,
    DataCollatorForWholeWordMask,
    PreTrainedTokenizer, BertConfig
)
from transformers.utils import logging

from modeling.modeling_nezha.modeling import NeZhaForMaskedLM,NeZhaModel
from modeling.modeling_nezha.configuration import NeZhaConfig
from simple_trainer import Trainer
from pretrain_args import TrainingArguments

warnings.filterwarnings('ignore')
logger = logging.get_logger(__name__)


def seed_everything(seed):
    random.seed(seed)
    np.random.seed(seed)
    torch.manual_seed(seed)
    torch.cuda.manual_seed_all(seed)
    return seed

def main():
    """
    download pretrain model from https://github.com/lonePatient/NeZha_Chinese_PyTorch,
    we only use pretrain model name : nezha-cn-base, nezha-base-wwm
    """
    config = {
        'pretrain_type': 'dynamic_mask',  # dynamic_mask, whole_word_mask
        'data_cache_path': '',
        'train_data_path': '/home/xiaoguzai/数据/data/train.txt',
        'test_data_path': '/home/xiaoguzai/数据/data/test.txt',
    }

    mlm_probability = 0.15
    num_train_epochs = 1
    seq_length = 90
    batch_size = 32
    learning_rate = 6e-5
    save_steps = 5000
    seed = 2021

    config['data_cache_path'] = '../user_data/pretrain/'+config['pretrain_type']+'/data.pkl'

    model_path = '/home/xiaoguzai/数据/nezha-chinese-base/pytorch_model.bin'
    config_path = '/home/xiaoguzai/数据/nezha-chinese-base/config.json'

    vocab_file = '/home/xiaoguzai/数据/nezha-chinese-base/vocab.txt'
    tokenizer = BertTokenizer.from_pretrained(vocab_file)
    model_config = NeZhaConfig.from_pretrained(config_path)
    nezha = NeZhaModel(config=model_config)
    input_ids = torch.tensor([[1,2],[3,4]])
    output = nezha(input_ids)
    print('output = ')
    print(output)
if __name__ == '__main__':
    main()

关键代码:

nezha = NeZhaModel(config=model_config)
input_ids = torch.tensor([[1,2],[3,4]])
output = nezha(input_ids)

从这段可以看出pytorch的计算过程和tensorflow的计算过程有类似之处,都是定义模型之后直接传入数值,即可获得相应的传出数值

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值