from torch.optim as Optimizer
from torch import nn
import torch
classmodelName(nn.Module):definit(self,):# 加入每个层的实例化# 常用如下
nn.Embedding()
nn.RNN()
nn.Dropout()
nn.linear()
nn.BatchNorm1d()passdefforword(self,input):# 一次向前传播的过程passdeftrainging_common_sentences():
optimizer = Optimizer.XXX()
optimizer.zero_grad()# 梯度清零input= torch.tensor(data,dtype=torch.xxdtype)
model = modelName()
output = model(input)
loss = loss_fn(output,target)
optimizer.backward(loss)
optimizer.step()
keras
from keras.layers import Input,Embedding,Dense
from keras.models import Model
import keras.backend as K
from keras.optimizers import Adam
defcreate_model():input= Input((dim_1,))
embed = Embedding(out_dim,input_dim,)(input)
model = Model(input,output)deftrain_common_sentences():
loss = K.binary_crossentropy(y,output)
model.add_loss(loss)
model.compile(optimizer="sgd")
model.fit(x_train,y_train,epochs=5,batch_size=32)