import torch
import torch. nn as nn
import torchvision
import torchvision. transforms as transforms
device = torch. device( 'cuda' if torch. cuda. is_available( ) else 'cpu' )
num_epochs = 80
batch_size = 100
learning_rate = 0.001
transform = transforms. Compose( [
transforms. Pad( 4 ) ,
transforms. RandomHorizontalFlip( ) ,
transforms. RandomCrop( 32 ) ,
transforms. ToTensor( ) ] )
train_dataset = torchvision. datasets. CIFAR10( root= '../../data' ,
train= True ,
transform= transform)
test_dataset = torchvision. datasets. CIFAR10( root= '../../data' ,
train= False ,
transform= transform)
train_loader = torch. utils. data. DataLoader( dataset= train_dataset,
batch_size= batch_size,
shuffle= True )
test_loader = torch. utils. data. DataLoader( dataset= test_dataset,
batch_size= batch_size,
shuffle= False )
def conv3x3 ( in_channels, out_channels, stride= 1 ) :
return nn. Conv2d( in_channels, out_channels, kernel_size= 3 ,
stride= stride, padding= 1 , bias= False )
class ResidualBlock ( nn. Module) :
def __init__ ( self, in_channels, out_channels, stride= 1 , downsample= None ) :
super ( ResidualBlock, self) . __init__( )
self. conv1 = conv3x3( in_channels, out_channels, stride)
self. bn1 = nn. BatchNorm2d( out_channels)
self. relu = nn. ReLU( inplace= True )
self. conv2 = conv3x3( out_channels, out_channels)
self. bn2 = nn. BatchNorm2d( out_channels)
self. downsample = downsample
def forward ( self, x) :
residual = x
out = self. conv1( x)
out = self. bn1( out)
out = self. relu( out)
out = self. conv2( out)
out = self. bn2( out)
if self. downsample:
residual = self. downsample( x)
out += residual
out = self. relu( out)
return out
class ResNet ( nn. Module) :
def __init__ ( self, block, layers, num_classes= 10 ) :
super ( ResNet, self) . __init__( )
self. in_channels = 16
self. conv = conv3x3( 3 , 16 )
self. bn = nn. BatchNorm2d( 16 )
self. relu = nn. ReLU( inplace= True )
self. layer1 = self. make_layer( block, 16 , layers[ 0 ] )
self. layer2 = self. make_layer( block, 32 , layers[ 1 ] , 2 )
self. layer3 = self. make_layer( block, 64 , layers[ 2 ] , 2 )
self. avg_pool = nn. AvgPool2d( 8 )
self. fc = nn. Linear( 64 , num_classes)
def make_layer ( self, block, out_channels, blocks, stride= 1 ) :
downsample = None
if ( stride != 1 ) or ( self. in_channels != out_channels) :
downsample = nn. Sequential( conv3x3( self. in_channels, out_channels, stride= stride) ,
nn. BatchNorm2d( out_channels) )
layers = [ ]
layers. append( block( self. in_channels, out_channels, stride, downsample) )
self. in_channels = out_channels
for i in range ( 1 , blocks) :
layers. append( block( out_channels, out_channels) )
return nn. Sequential( * layers)
def forward ( self, x) :
out = self. conv( x)
out = self. bn( out)
out = self. relu( out)
out = self. layer1( out)
out = self. layer2( out)
out = self. layer3( out)
out = self. avg_pool( out)
out = out. view( out. size( 0 ) , - 1 )
out = self. fc( out)
return out
model = ResNet( ResidualBlock, [ 2 , 2 , 2 ] ) . to( device)
criterion = nn. CrossEntropyLoss( )
optimizer = torch. optim. Adam( model. parameters( ) , lr= learning_rate)
def update_lr ( optimizer, lr) :
for param_group in optimizer. param_groups:
param_group[ 'lr' ] = lr
total_step = len ( train_loader)
curr_lr = learning_rate
for epoch in range ( num_epochs) :
for i, ( images, labels) in enumerate ( train_loader) :
images = images. to( device)
labels = labels. to( device)
outputs = model( images)
loss = criterion( outputs, labels)
optimizer. zero_grad( )
loss. backward( )
optimizer. step( )
if ( i+ 1 ) % 100 == 0 :
print ( "Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}"
. format ( epoch+ 1 , num_epochs, i+ 1 , total_step, loss. item( ) ) )
Epoch [1/80], Step [100/500], Loss: 1.5762
Epoch [1/80], Step [200/500], Loss: 1.3403
Epoch [1/80], Step [300/500], Loss: 1.3554
Epoch [1/80], Step [400/500], Loss: 1.0875
Epoch [1/80], Step [500/500], Loss: 1.2314
Epoch [2/80], Step [100/500], Loss: 1.0771
Epoch [2/80], Step [200/500], Loss: 1.0226
Epoch [2/80], Step [300/500], Loss: 0.9251
Epoch [2/80], Step [400/500], Loss: 0.9837
Epoch [2/80], Step [500/500], Loss: 0.8946
Epoch [3/80], Step [100/500], Loss: 1.0241
Epoch [3/80], Step [200/500], Loss: 0.8404
Epoch [3/80], Step [300/500], Loss: 0.8677
Epoch [3/80], Step [400/500], Loss: 0.8813
Epoch [3/80], Step [500/500], Loss: 0.8162
Epoch [4/80], Step [100/500], Loss: 0.8872
Epoch [4/80], Step [200/500], Loss: 0.8941
Epoch [4/80], Step [300/500], Loss: 0.6441
Epoch [4/80], Step [400/500], Loss: 0.6321
Epoch [4/80], Step [500/500], Loss: 0.7459
Epoch [5/80], Step [100/500], Loss: 0.6637
Epoch [5/80], Step [200/500], Loss: 0.6761
Epoch [5/80], Step [300/500], Loss: 0.7153
Epoch [5/80], Step [400/500], Loss: 0.5910
Epoch [5/80], Step [500/500], Loss: 0.7322
Epoch [6/80], Step [100/500], Loss: 0.6090
Epoch [6/80], Step [200/500], Loss: 0.5780
Epoch [6/80], Step [300/500], Loss: 0.7058
Epoch [6/80], Step [400/500], Loss: 0.5379
Epoch [6/80], Step [500/500], Loss: 0.5864
Epoch [7/80], Step [100/500], Loss: 0.7285
Epoch [7/80], Step [200/500], Loss: 0.7091
Epoch [7/80], Step [300/500], Loss: 0.4751
Epoch [7/80], Step [400/500], Loss: 0.7019
Epoch [7/80], Step [500/500], Loss: 0.6529
Epoch [8/80], Step [100/500], Loss: 0.5066
Epoch [8/80], Step [200/500], Loss: 0.5977
Epoch [8/80], Step [300/500], Loss: 0.5677
Epoch [8/80], Step [400/500], Loss: 0.4757
Epoch [8/80], Step [500/500], Loss: 0.5904
Epoch [9/80], Step [100/500], Loss: 0.5766
Epoch [9/80], Step [200/500], Loss: 0.4707
Epoch [9/80], Step [300/500], Loss: 0.7031
Epoch [9/80], Step [400/500], Loss: 0.5224
Epoch [9/80], Step [500/500], Loss: 0.6124
Epoch [10/80], Step [100/500], Loss: 0.6083
Epoch [10/80], Step [200/500], Loss: 0.3922
Epoch [10/80], Step [300/500], Loss: 0.4919
Epoch [10/80], Step [400/500], Loss: 0.4444
Epoch [10/80], Step [500/500], Loss: 0.7481
Epoch [11/80], Step [100/500], Loss: 0.3925
Epoch [11/80], Step [200/500], Loss: 0.4807
Epoch [11/80], Step [300/500], Loss: 0.5437
Epoch [11/80], Step [400/500], Loss: 0.3982
Epoch [11/80], Step [500/500], Loss: 0.6027
Epoch [12/80], Step [100/500], Loss: 0.4001
Epoch [12/80], Step [200/500], Loss: 0.4891
Epoch [12/80], Step [300/500], Loss: 0.5112
Epoch [12/80], Step [400/500], Loss: 0.3395
Epoch [12/80], Step [500/500], Loss: 0.4576
Epoch [13/80], Step [100/500], Loss: 0.4057
Epoch [13/80], Step [200/500], Loss: 0.3944
Epoch [13/80], Step [300/500], Loss: 0.4909
Epoch [13/80], Step [400/500], Loss: 0.4984
Epoch [13/80], Step [500/500], Loss: 0.4071
Epoch [14/80], Step [100/500], Loss: 0.4293
Epoch [14/80], Step [200/500], Loss: 0.3345
Epoch [14/80], Step [300/500], Loss: 0.3877
Epoch [14/80], Step [400/500], Loss: 0.5184
Epoch [14/80], Step [500/500], Loss: 0.4583
Epoch [15/80], Step [100/500], Loss: 0.3694
Epoch [15/80], Step [200/500], Loss: 0.3034
Epoch [15/80], Step [300/500], Loss: 0.4626
Epoch [15/80], Step [400/500], Loss: 0.4211
Epoch [15/80], Step [500/500], Loss: 0.4108
Epoch [16/80], Step [100/500], Loss: 0.3813
Epoch [16/80], Step [200/500], Loss: 0.3624
Epoch [16/80], Step [300/500], Loss: 0.3195
Epoch [16/80], Step [400/500], Loss: 0.6363
Epoch [16/80], Step [500/500], Loss: 0.3853
Epoch [17/80], Step [100/500], Loss: 0.2451
Epoch [17/80], Step [200/500], Loss: 0.4081
Epoch [17/80], Step [300/500], Loss: 0.4094
Epoch [17/80], Step [400/500], Loss: 0.5220
Epoch [17/80], Step [500/500], Loss: 0.4002
Epoch [18/80], Step [100/500], Loss: 0.3539
Epoch [18/80], Step [200/500], Loss: 0.3408
Epoch [18/80], Step [300/500], Loss: 0.4528
Epoch [18/80], Step [400/500], Loss: 0.4958
Epoch [18/80], Step [500/500], Loss: 0.3533
Epoch [19/80], Step [100/500], Loss: 0.3593
Epoch [19/80], Step [200/500], Loss: 0.3446
Epoch [19/80], Step [300/500], Loss: 0.5390
Epoch [19/80], Step [400/500], Loss: 0.3758
Epoch [19/80], Step [500/500], Loss: 0.5264
Epoch [20/80], Step [100/500], Loss: 0.3963
Epoch [20/80], Step [200/500], Loss: 0.3253
Epoch [20/80], Step [300/500], Loss: 0.2982
Epoch [20/80], Step [400/500], Loss: 0.4432
Epoch [20/80], Step [500/500], Loss: 0.4277
Epoch [21/80], Step [100/500], Loss: 0.3551
Epoch [21/80], Step [200/500], Loss: 0.4178
Epoch [21/80], Step [300/500], Loss: 0.3620
Epoch [21/80], Step [400/500], Loss: 0.4359
Epoch [21/80], Step [500/500], Loss: 0.4085
Epoch [22/80], Step [100/500], Loss: 0.3407
Epoch [22/80], Step [200/500], Loss: 0.3673
Epoch [22/80], Step [300/500], Loss: 0.2835
Epoch [22/80], Step [400/500], Loss: 0.4101
Epoch [22/80], Step [500/500], Loss: 0.3882
Epoch [23/80], Step [100/500], Loss: 0.3227
Epoch [23/80], Step [200/500], Loss: 0.4617
Epoch [23/80], Step [300/500], Loss: 0.2973
Epoch [23/80], Step [400/500], Loss: 0.3678
Epoch [23/80], Step [500/500], Loss: 0.3429
Epoch [24/80], Step [100/500], Loss: 0.1978
Epoch [24/80], Step [200/500], Loss: 0.2038
Epoch [24/80], Step [300/500], Loss: 0.2736
Epoch [24/80], Step [400/500], Loss: 0.2299
Epoch [24/80], Step [500/500], Loss: 0.3331
Epoch [25/80], Step [100/500], Loss: 0.3207
Epoch [25/80], Step [200/500], Loss: 0.3646
Epoch [25/80], Step [300/500], Loss: 0.3261
Epoch [25/80], Step [400/500], Loss: 0.3122
Epoch [25/80], Step [500/500], Loss: 0.3622
Epoch [26/80], Step [100/500], Loss: 0.3574
Epoch [26/80], Step [200/500], Loss: 0.2704
Epoch [26/80], Step [300/500], Loss: 0.3022
Epoch [26/80], Step [400/500], Loss: 0.2779
Epoch [26/80], Step [500/500], Loss: 0.3574
Epoch [27/80], Step [100/500], Loss: 0.2380
Epoch [27/80], Step [200/500], Loss: 0.1288
Epoch [27/80], Step [300/500], Loss: 0.2354
Epoch [27/80], Step [400/500], Loss: 0.4427
Epoch [27/80], Step [500/500], Loss: 0.3255
Epoch [28/80], Step [100/500], Loss: 0.2964
Epoch [28/80], Step [200/500], Loss: 0.3254
Epoch [28/80], Step [300/500], Loss: 0.1881
Epoch [28/80], Step [400/500], Loss: 0.4440
Epoch [28/80], Step [500/500], Loss: 0.3123
Epoch [29/80], Step [100/500], Loss: 0.3065
Epoch [29/80], Step [200/500], Loss: 0.2815
Epoch [29/80], Step [300/500], Loss: 0.2467
Epoch [29/80], Step [400/500], Loss: 0.3063
Epoch [29/80], Step [500/500], Loss: 0.3652
Epoch [30/80], Step [100/500], Loss: 0.3264
Epoch [30/80], Step [200/500], Loss: 0.2425
Epoch [30/80], Step [300/500], Loss: 0.2608
Epoch [30/80], Step [400/500], Loss: 0.4155
Epoch [30/80], Step [500/500], Loss: 0.3214
Epoch [31/80], Step [100/500], Loss: 0.1212
Epoch [31/80], Step [200/500], Loss: 0.2171
Epoch [31/80], Step [300/500], Loss: 0.2945
Epoch [31/80], Step [400/500], Loss: 0.2927
Epoch [31/80], Step [500/500], Loss: 0.3075
Epoch [32/80], Step [100/500], Loss: 0.1831
Epoch [32/80], Step [200/500], Loss: 0.2182
Epoch [32/80], Step [300/500], Loss: 0.3482
Epoch [32/80], Step [400/500], Loss: 0.3035
Epoch [32/80], Step [500/500], Loss: 0.2285
Epoch [33/80], Step [100/500], Loss: 0.1738
Epoch [33/80], Step [200/500], Loss: 0.2407
Epoch [33/80], Step [300/500], Loss: 0.3299
Epoch [33/80], Step [400/500], Loss: 0.2976
Epoch [33/80], Step [500/500], Loss: 0.2273
Epoch [34/80], Step [100/500], Loss: 0.3169
Epoch [34/80], Step [200/500], Loss: 0.2362
Epoch [34/80], Step [300/500], Loss: 0.1517
Epoch [34/80], Step [400/500], Loss: 0.3536
Epoch [34/80], Step [500/500], Loss: 0.2798
Epoch [35/80], Step [100/500], Loss: 0.2591
Epoch [35/80], Step [200/500], Loss: 0.1770
Epoch [35/80], Step [300/500], Loss: 0.2246
Epoch [35/80], Step [400/500], Loss: 0.2181
Epoch [35/80], Step [500/500], Loss: 0.3327
Epoch [36/80], Step [100/500], Loss: 0.2776
Epoch [36/80], Step [200/500], Loss: 0.1536
Epoch [36/80], Step [300/500], Loss: 0.3514
Epoch [36/80], Step [400/500], Loss: 0.1732
Epoch [36/80], Step [500/500], Loss: 0.1783
Epoch [37/80], Step [100/500], Loss: 0.1768
Epoch [37/80], Step [200/500], Loss: 0.2327
Epoch [37/80], Step [300/500], Loss: 0.3117
Epoch [37/80], Step [400/500], Loss: 0.3398
Epoch [37/80], Step [500/500], Loss: 0.2773
Epoch [38/80], Step [100/500], Loss: 0.1481
Epoch [38/80], Step [200/500], Loss: 0.1423
Epoch [38/80], Step [300/500], Loss: 0.2493
Epoch [38/80], Step [400/500], Loss: 0.3462
Epoch [38/80], Step [500/500], Loss: 0.2133
Epoch [39/80], Step [100/500], Loss: 0.1774
Epoch [39/80], Step [200/500], Loss: 0.2748
Epoch [39/80], Step [300/500], Loss: 0.1728
Epoch [39/80], Step [400/500], Loss: 0.2278
Epoch [39/80], Step [500/500], Loss: 0.2479
Epoch [40/80], Step [100/500], Loss: 0.1808
Epoch [40/80], Step [200/500], Loss: 0.2273
Epoch [40/80], Step [300/500], Loss: 0.2186
Epoch [40/80], Step [400/500], Loss: 0.2461
Epoch [40/80], Step [500/500], Loss: 0.3028
Epoch [41/80], Step [100/500], Loss: 0.2703
Epoch [41/80], Step [200/500], Loss: 0.2342
Epoch [41/80], Step [300/500], Loss: 0.2019
Epoch [41/80], Step [400/500], Loss: 0.4073
Epoch [41/80], Step [500/500], Loss: 0.1835
Epoch [42/80], Step [100/500], Loss: 0.1370
Epoch [42/80], Step [200/500], Loss: 0.2080
Epoch [42/80], Step [300/500], Loss: 0.0908
Epoch [42/80], Step [400/500], Loss: 0.2554
Epoch [42/80], Step [500/500], Loss: 0.1956
Epoch [43/80], Step [100/500], Loss: 0.1763
Epoch [43/80], Step [200/500], Loss: 0.2144
Epoch [43/80], Step [300/500], Loss: 0.2851
Epoch [43/80], Step [400/500], Loss: 0.2409
Epoch [43/80], Step [500/500], Loss: 0.2242
Epoch [44/80], Step [100/500], Loss: 0.1618
Epoch [44/80], Step [200/500], Loss: 0.1487
Epoch [44/80], Step [300/500], Loss: 0.1841
Epoch [44/80], Step [400/500], Loss: 0.1820
Epoch [44/80], Step [500/500], Loss: 0.2557
Epoch [45/80], Step [100/500], Loss: 0.2867
Epoch [45/80], Step [200/500], Loss: 0.2042
Epoch [45/80], Step [300/500], Loss: 0.2000
Epoch [45/80], Step [400/500], Loss: 0.1579
Epoch [45/80], Step [500/500], Loss: 0.2681
Epoch [46/80], Step [100/500], Loss: 0.1551
Epoch [46/80], Step [200/500], Loss: 0.2198
Epoch [46/80], Step [300/500], Loss: 0.2958
Epoch [46/80], Step [400/500], Loss: 0.2145
Epoch [46/80], Step [500/500], Loss: 0.2579
Epoch [47/80], Step [100/500], Loss: 0.1394
Epoch [47/80], Step [200/500], Loss: 0.2146
Epoch [47/80], Step [300/500], Loss: 0.1710
Epoch [47/80], Step [400/500], Loss: 0.1590
Epoch [47/80], Step [500/500], Loss: 0.1470
Epoch [48/80], Step [100/500], Loss: 0.1962
Epoch [48/80], Step [200/500], Loss: 0.2232
Epoch [48/80], Step [300/500], Loss: 0.1686
Epoch [48/80], Step [400/500], Loss: 0.3067
Epoch [48/80], Step [500/500], Loss: 0.2685
Epoch [49/80], Step [100/500], Loss: 0.1417
Epoch [49/80], Step [200/500], Loss: 0.1506
Epoch [49/80], Step [300/500], Loss: 0.2252
Epoch [49/80], Step [400/500], Loss: 0.2574
Epoch [49/80], Step [500/500], Loss: 0.1232
Epoch [50/80], Step [100/500], Loss: 0.2177
Epoch [50/80], Step [200/500], Loss: 0.1809
Epoch [50/80], Step [300/500], Loss: 0.2496
Epoch [50/80], Step [400/500], Loss: 0.1618
Epoch [50/80], Step [500/500], Loss: 0.1933
Epoch [51/80], Step [100/500], Loss: 0.2316
Epoch [51/80], Step [200/500], Loss: 0.3021
Epoch [51/80], Step [300/500], Loss: 0.1793
Epoch [51/80], Step [400/500], Loss: 0.1902
Epoch [51/80], Step [500/500], Loss: 0.2463
Epoch [52/80], Step [100/500], Loss: 0.1417
Epoch [52/80], Step [200/500], Loss: 0.1784
Epoch [52/80], Step [300/500], Loss: 0.1849
Epoch [52/80], Step [400/500], Loss: 0.3067
Epoch [52/80], Step [500/500], Loss: 0.1606
Epoch [53/80], Step [100/500], Loss: 0.2153
Epoch [53/80], Step [200/500], Loss: 0.1975
Epoch [53/80], Step [300/500], Loss: 0.1823
Epoch [53/80], Step [400/500], Loss: 0.1709
Epoch [53/80], Step [500/500], Loss: 0.1117
Epoch [54/80], Step [100/500], Loss: 0.0943
Epoch [54/80], Step [200/500], Loss: 0.1369
Epoch [54/80], Step [300/500], Loss: 0.1761
Epoch [54/80], Step [400/500], Loss: 0.1823
Epoch [54/80], Step [500/500], Loss: 0.2230
Epoch [55/80], Step [100/500], Loss: 0.1253
Epoch [55/80], Step [200/500], Loss: 0.1470
Epoch [55/80], Step [300/500], Loss: 0.2650
Epoch [55/80], Step [400/500], Loss: 0.2928
Epoch [55/80], Step [500/500], Loss: 0.2083
Epoch [56/80], Step [100/500], Loss: 0.1393
Epoch [56/80], Step [200/500], Loss: 0.1190
Epoch [56/80], Step [300/500], Loss: 0.1761
Epoch [56/80], Step [400/500], Loss: 0.1921
Epoch [56/80], Step [500/500], Loss: 0.1909
Epoch [57/80], Step [100/500], Loss: 0.1711
Epoch [57/80], Step [200/500], Loss: 0.2736
Epoch [57/80], Step [300/500], Loss: 0.1435
Epoch [57/80], Step [400/500], Loss: 0.1843
Epoch [57/80], Step [500/500], Loss: 0.1662
Epoch [58/80], Step [100/500], Loss: 0.0726
Epoch [58/80], Step [200/500], Loss: 0.0474
Epoch [58/80], Step [300/500], Loss: 0.3009
Epoch [58/80], Step [400/500], Loss: 0.1847
Epoch [58/80], Step [500/500], Loss: 0.2121
Epoch [59/80], Step [100/500], Loss: 0.1617
Epoch [59/80], Step [200/500], Loss: 0.0809
Epoch [59/80], Step [300/500], Loss: 0.1406
Epoch [59/80], Step [400/500], Loss: 0.1325
Epoch [59/80], Step [500/500], Loss: 0.3449
Epoch [60/80], Step [100/500], Loss: 0.1996
Epoch [60/80], Step [200/500], Loss: 0.0681
Epoch [60/80], Step [300/500], Loss: 0.2138
Epoch [60/80], Step [400/500], Loss: 0.2091
Epoch [60/80], Step [500/500], Loss: 0.1212
Epoch [61/80], Step [100/500], Loss: 0.0661
Epoch [61/80], Step [200/500], Loss: 0.2304
Epoch [61/80], Step [300/500], Loss: 0.1157
Epoch [61/80], Step [400/500], Loss: 0.1931
Epoch [61/80], Step [500/500], Loss: 0.2525
Epoch [62/80], Step [100/500], Loss: 0.1343
Epoch [62/80], Step [200/500], Loss: 0.2773
Epoch [62/80], Step [300/500], Loss: 0.1402
Epoch [62/80], Step [400/500], Loss: 0.1830
Epoch [62/80], Step [500/500], Loss: 0.1188
Epoch [63/80], Step [100/500], Loss: 0.2578
Epoch [63/80], Step [200/500], Loss: 0.1356
Epoch [63/80], Step [300/500], Loss: 0.1527
Epoch [63/80], Step [400/500], Loss: 0.1002
Epoch [63/80], Step [500/500], Loss: 0.1767
Epoch [64/80], Step [100/500], Loss: 0.1269
Epoch [64/80], Step [200/500], Loss: 0.2103
Epoch [64/80], Step [300/500], Loss: 0.1291
Epoch [64/80], Step [400/500], Loss: 0.1776
Epoch [64/80], Step [500/500], Loss: 0.0795
Epoch [65/80], Step [100/500], Loss: 0.1605
Epoch [65/80], Step [200/500], Loss: 0.2012
Epoch [65/80], Step [300/500], Loss: 0.1505
Epoch [65/80], Step [400/500], Loss: 0.0554
Epoch [65/80], Step [500/500], Loss: 0.2361
Epoch [66/80], Step [100/500], Loss: 0.1018
Epoch [66/80], Step [200/500], Loss: 0.2111
Epoch [66/80], Step [300/500], Loss: 0.1561
Epoch [66/80], Step [400/500], Loss: 0.1095
Epoch [66/80], Step [500/500], Loss: 0.1387
Epoch [67/80], Step [100/500], Loss: 0.1832
Epoch [67/80], Step [200/500], Loss: 0.1073
Epoch [67/80], Step [300/500], Loss: 0.1876
Epoch [67/80], Step [400/500], Loss: 0.1999
Epoch [67/80], Step [500/500], Loss: 0.1774
Epoch [68/80], Step [100/500], Loss: 0.1364
Epoch [68/80], Step [200/500], Loss: 0.1086
Epoch [68/80], Step [300/500], Loss: 0.1324
Epoch [68/80], Step [400/500], Loss: 0.1146
Epoch [68/80], Step [500/500], Loss: 0.1608
Epoch [69/80], Step [100/500], Loss: 0.1190
Epoch [69/80], Step [200/500], Loss: 0.2497
Epoch [69/80], Step [300/500], Loss: 0.2121
Epoch [69/80], Step [400/500], Loss: 0.1431
Epoch [69/80], Step [500/500], Loss: 0.0936
Epoch [70/80], Step [100/500], Loss: 0.2063
Epoch [70/80], Step [200/500], Loss: 0.1734
Epoch [70/80], Step [300/500], Loss: 0.0849
Epoch [70/80], Step [400/500], Loss: 0.1122
Epoch [70/80], Step [500/500], Loss: 0.1814
Epoch [71/80], Step [100/500], Loss: 0.1795
Epoch [71/80], Step [200/500], Loss: 0.1698
Epoch [71/80], Step [300/500], Loss: 0.1683
Epoch [71/80], Step [400/500], Loss: 0.1997
Epoch [71/80], Step [500/500], Loss: 0.2391
Epoch [72/80], Step [100/500], Loss: 0.1597
Epoch [72/80], Step [200/500], Loss: 0.0688
Epoch [72/80], Step [300/500], Loss: 0.1460
Epoch [72/80], Step [400/500], Loss: 0.1132
Epoch [72/80], Step [500/500], Loss: 0.0775
Epoch [73/80], Step [100/500], Loss: 0.0952
Epoch [73/80], Step [200/500], Loss: 0.0728
Epoch [73/80], Step [300/500], Loss: 0.1084
Epoch [73/80], Step [400/500], Loss: 0.1674
Epoch [73/80], Step [500/500], Loss: 0.2439
Epoch [74/80], Step [100/500], Loss: 0.1990
Epoch [74/80], Step [200/500], Loss: 0.0840
Epoch [74/80], Step [300/500], Loss: 0.1093
Epoch [74/80], Step [400/500], Loss: 0.1223
Epoch [74/80], Step [500/500], Loss: 0.2627
Epoch [75/80], Step [100/500], Loss: 0.1768
Epoch [75/80], Step [200/500], Loss: 0.1167
Epoch [75/80], Step [300/500], Loss: 0.1619
Epoch [75/80], Step [400/500], Loss: 0.1418
Epoch [75/80], Step [500/500], Loss: 0.2167
Epoch [76/80], Step [100/500], Loss: 0.2540
Epoch [76/80], Step [200/500], Loss: 0.2036
Epoch [76/80], Step [300/500], Loss: 0.0924
Epoch [76/80], Step [400/500], Loss: 0.2102
Epoch [76/80], Step [500/500], Loss: 0.0761
Epoch [77/80], Step [100/500], Loss: 0.2176
Epoch [77/80], Step [200/500], Loss: 0.1518
Epoch [77/80], Step [300/500], Loss: 0.1040
Epoch [77/80], Step [400/500], Loss: 0.1389
Epoch [77/80], Step [500/500], Loss: 0.0750
Epoch [78/80], Step [100/500], Loss: 0.1723
Epoch [78/80], Step [200/500], Loss: 0.0972
Epoch [78/80], Step [300/500], Loss: 0.1122
Epoch [78/80], Step [400/500], Loss: 0.0907
Epoch [78/80], Step [500/500], Loss: 0.2186
Epoch [79/80], Step [100/500], Loss: 0.1585
Epoch [79/80], Step [200/500], Loss: 0.1186
Epoch [79/80], Step [300/500], Loss: 0.0886
Epoch [79/80], Step [400/500], Loss: 0.1035
Epoch [79/80], Step [500/500], Loss: 0.1829
Epoch [80/80], Step [100/500], Loss: 0.1647
Epoch [80/80], Step [200/500], Loss: 0.2264
Epoch [80/80], Step [300/500], Loss: 0.1059
Epoch [80/80], Step [400/500], Loss: 0.1147
Epoch [80/80], Step [500/500], Loss: 0.1452
model. eval ( )
with torch. no_grad( ) :
correct = 0
total = 0
for images, labels in test_loader:
images = images. to( device)
labels = labels. to( device)
outputs = model( images)
_, predicted = torch. max ( outputs. data, 1 )
total += labels. size( 0 )
correct += ( predicted == labels) . sum ( ) . item( )
print ( "Accuracy of the model on the test images: {}%."
. format ( 100 * correct/ total) )
Accuracy of the model on the test images: 87.33%.
torch. save( model. state_dict( ) , 'resnet.ckpt' )