一,top1_accuracy
import mxnet as mx
import os
import numpy as np
from collections import namedtuple
Batch = namedtuple('Batch', ['data'])
data_dir = 'E:\Spyder\Data\CIFAR-10'
batch_size = 100
gpu_list = [0]
kv_store = 'device'
kv = mx.kvstore.create(kv_store)
begin_epoch = 0
epoch = 100
class_num = 10
def cifar_iterator(data_dir, batch_size, kv):
train = mx.io.ImageRecordIter(
path_imgrec = os.path.join(data_dir, 'train', "cifar10_train.rec"),
label_width = 1,
data_name = 'data',
label_name = 'softmax_label',
data_shape = (3, 32, 32),
batch_size = batch_size,
pad = 4,
fill_value = 127,
rand_crop=True,
max_random_scale=1.0, # 480 with imagnet, 32 with cifar10
min_random_scale=1.0,
max_aspect_ratio=0,
random_h = 0,
random_s=0,
random_l=0,
max_rotate_angle=0,
max_shear_ratio=0,
rand_mirror = True,
shuffle = True,
num_parts = kv.num_workers,
part_index = kv.rank)
val = mx.io.ImageRecordIter(
path_imgrec = os.path.join(data_dir, 'val', "cifar10_val.rec"),
label_width = 1,
data_name = 'data',
label_name = 'softmax_label',
batch_size = batch_size,
data_shape = (3, 32, 32),
rand_crop = False,
rand_mirror = False,
num_parts = kv.num_workers,
part_index = kv.rank)
num_examples = 50000
return train, val, num_examples
# load data
train, val, num_examples = cifar_iterator(data_dir, batch_size, kv=kv)
# load pretrained model
prefix = 'E:/Spyder/Cresnet-50-fine-0.01'
sym, arg_params, aux_params = mx.model.load_checkpoint(prefix, 100)
mod = mx.mod.Module(symbol=sym, context=mx.cpu(), label_names= None)
mod.bind(for_training=False, data_shapes=[('data', (batch_size,3, 32, 32))],
label_shapes=mod._label_shapes)
mod.set_params(arg_params, aux_params, allow_missing=True)
# # evaluate a bath of image, mod.predict predict the finall output
# group = mod.predict(val)
# print('group_label:', group[0].shape, type(group[0]))
# print('group_label:', group[0])
### use the softmax_output calculate the accuracy ###
all_layers = sym.get_internals()
# print(all_layers.list_outputs())
sof_sym = all_layers['softmax_output']
sof_mod = mx.mod.Module(symbol=sof_sym, context=mx.cpu(), label_names=None)
sof_mod.bind(for_training=False, data_shapes=[('data', (batch_size,3, 32, 32))],
label_shapes=sof_mod._label_shapes)
sof_mod.set_params(arg_params, aux_params, allow_missing=True)
val.reset()
iter_num = int(1000 / batch_size)
# top_1 accuracy
acc = 0
for i in range(iter_num):
val_iter = val.next()
true_label = val_iter.label[0].asnumpy()
sof_mod.forward(Batch(val_iter.data))
sof_out = sof_mod.get_outputs()[0].asnumpy()
sof_index = np.argmax(sof_out, axis=1)
score_index = sof_index == true_label
acc += score_index.mean()
top1_acc = acc / iter_num
print(top1_acc)
二,topk_accuracy
# top_k accuracy, k > 1
k = 5
true_num = 0
for i in range(iter_num):
val_iter = val.next()
true_label = val_iter.label[0].asnumpy()
sof_mod.forward(Batch(val_iter.data))
sof_out = sof_mod.get_outputs()[0].asnumpy()
sof_index = np.argsort(sof_out)
for j in range(batch_size):
if true_label[j] in sof_index[j, (class_num - k):]:
true_num += 1
print(true_num)
top5_accuracy = true_num / 1000
print(top5_accuracy)