import paddle
import paddle.nn.functional as F
from paddle.vision.transforms import ToTensor
import numpy as np
import matplotlib.pyplot as plt
print(paddle.__version__)
transform = ToTensor()
cifar10_train = paddle.vision.datasets.Cifar10(mode='train',transform=transform)
cifar10_test = paddle.vision.datasets.Cifar10(mode='test',transform=transform)
class MyNet(paddle.nn.Layer):
def __init__(self, num_classes=1):
super(MyNet, self).__init__()
self.conv1 = paddle.nn.Conv2D(in_channels=3, out_channels=32, kernel_size=(3, 3))
self.pool1 = paddle.nn.MaxPool2D(kernel_size=2, stride=2)
self.conv2 = paddle.nn.Conv2D(in_channels=32, out_channels=64, kernel_size=(3,3))
self.pool2 = paddle.nn.MaxPool2D(kernel_size=2, stride=2)
self.conv3 = paddle.nn.Conv2D(in_channels=64, out_channels=64, kernel_size=(3,3))
self.flatten = paddle.nn.Flatten()
self.linear1 = paddle.nn.Linear(in_features=1024, out_features=64)
self.linear2 = paddle.nn.Linear(in_features=64, out_features=num_classes)
def forward(self, x):
x = self.conv1(x)
x = F.relu(x)
x = self.pool1(x)
x = self.conv2(x)
x = F.relu(x)
x = self.pool2(x)
x = self.conv3(x)
x = F.relu(x)
x = self.flatten(x)
x = self.linear1(x)
x = F.relu(x)
x = self.linear2(x)
return x
epoch_num = 10
batch_size = 32
learning_rate = 0.001
val_acc_history = []
val_loss_history = []
def train(model):
print('start training ... ')
# turn into training mode
model.train()
opt = paddle.optimizer.Adam(learning_rate=learning_rate,
parameters=model.parameters())
train_loader = paddle.io.DataLoader(cifar10_train,
shuffle=True,
batch_size=batch_size)
valid_loader = paddle.io.DataLoader(cifar10_test, batch_size=batch_size)
for epoch in range(epoch_num):
for batch_id, data in enumerate(train_loader()):
x_data = data[0]
y_data = paddle.to_tensor(data[1])
y_data = paddle.unsqueeze(y_data, 1)
logits = model(x_data)
loss = F.cross_entropy(logits, y_data)
if batch_id % 1000 == 0:
print("epoch: {}, batch_id: {}, loss is: {}".format(epoch, batch_id, loss.numpy()))
loss.backward()
opt.step()
opt.clear_grad()
# evaluate model after one epoch
model.eval()
accuracies = []
losses = []
for batch_id, data in enumerate(valid_loader()):
x_data = data[0]
y_data = paddle.to_tensor(data[1])
y_data = paddle.unsqueeze(y_data, 1)
logits = model(x_data)
loss = F.cross_entropy(logits, y_data)
acc = paddle.metric.accuracy(logits, y_data)
accuracies.append(acc.numpy())
losses.append(loss.numpy())
avg_acc, avg_loss = np.mean(accuracies), np.mean(losses)
print("[validation] accuracy/loss: {}/{}".format(avg_acc, avg_loss))
val_acc_history.append(avg_acc)
val_loss_history.append(avg_loss)
model.train()
model = MyNet(num_classes=10)
train(model)
plt.plot(val_acc_history, label = 'validation accuracy')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.ylim([0.5, 0.8])
plt.legend(loc='lower right')
运行结果如下
2.0.2
start training ...
epoch: 0, batch_id: 0, loss is: [2.5867429]
epoch: 0, batch_id: 1000, loss is: [1.4291399]
[validation] accuracy/loss: 0.5660942196846008/1.192248821258545
epoch: 1, batch_id: 0, loss is: [1.4453502]
epoch: 1, batch_id: 1000, loss is: [0.9542478]
[validation] accuracy/loss: 0.6403753757476807/1.0135371685028076
epoch: 2, batch_id: 0, loss is: [0.7726191]
epoch: 2, batch_id: 1000, loss is: [0.6877733]
[validation] accuracy/loss: 0.6546525359153748/0.9844738841056824
epoch: 3, batch_id: 0, loss is: [0.9387559]
epoch: 3, batch_id: 1000, loss is: [0.6351695]
[validation] accuracy/loss: 0.6799121499061584/0.921659529209137
epoch: 4, batch_id: 0, loss is: [0.85253346]
epoch: 4, batch_id: 1000, loss is: [1.2389075]
[validation] accuracy/loss: 0.6968849897384644/0.8811760544776917
epoch: 5, batch_id: 0, loss is: [0.57655555]
epoch: 5, batch_id: 1000, loss is: [0.5159716]
[validation] accuracy/loss: 0.7062699794769287/0.8671459555625916
epoch: 6, batch_id: 0, loss is: [0.5851355]
epoch: 6, batch_id: 1000, loss is: [0.8208879]
[validation] accuracy/loss: 0.7045726776123047/0.8722187280654907
epoch: 7, batch_id: 0, loss is: [0.5359383]
epoch: 7, batch_id: 1000, loss is: [0.6991255]
[validation] accuracy/loss: 0.6884983777999878/0.9462959170341492
epoch: 8, batch_id: 0, loss is: [0.67292917]
epoch: 8, batch_id: 1000, loss is: [0.7463263]
[validation] accuracy/loss: 0.6968849897384644/0.9279959797859192
epoch: 9, batch_id: 0, loss is: [0.44502622]
epoch: 9, batch_id: 1000, loss is: [0.36299592]
[validation] accuracy/loss: 0.7103634476661682/0.9326649904251099
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/matplotlib/cbook/__init__.py:2349: DeprecationWarning: Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated, and in 3.8 it will stop working
if isinstance(obj, collections.Iterator):
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/matplotlib/cbook/__init__.py:2366: DeprecationWarning: Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated, and in 3.8 it will stop working
return list(data) if isinstance(data, collections.MappingView) else data
<matplotlib.legend.Legend at 0x7f763fe00250>