#coding=gbk
import numpy as np
import matplotlib.pyplot as plt
import scipy.io as sio # loadmat
mysolvers = list()
mysolvers0 = list()
niter = 10
######### 解析过程,由外向内解析##############
############################################## 例子1 ######################################
solvers = [('pretrained',mysolvers),('scratch',mysolvers0)]
blobs = ('loss','accuracy')
loss,accuracy = ({name:np.zeros(niter) for name,_ in solvers } for _ in blobs)
############## 第一层姐解析################
# blobs 长度为2 ,遍历的时候将{}内的两个分别赋给 loss1和loss2
loss = {name:np.zeros(niter) for name,_ in solvers}
accuracy = {name:np.zeros(niter) for name,_ in solvers}
############## 第二层姐解析################
# 分别对 loss1和loss2解析
loss1 = {'loss': np.zeros(niter),'accuracy': np.zeros(niter)}
loss2 = {'loss': np.zeros(niter),'accuracy': np.zeros(niter)}
############################################## 例子2 ######################################
it = 1 # 迭代次数
loss_disp = ' ; '.join('%s: loss = %.2f ,acc = %.2f' % (n, loss[n][it], accuracy[n][it]) for n, _ in solvers)
# loss_disp 与 loss_disp_eq 是相同的
loss_disp_eq = ' ; '.join(('%s: loss = %.2f ,acc = %.2f' % (n, loss[n][it], accuracy[n][it])) for n, _ in solvers)
############## 第一层姐解析################
loss_disp1 = '%s: loss = %.2f ,acc = %.2f' % ('pretrained', loss['pretrained'][it], accuracy['pretrained'][it])
loss_disp2 = '%s: loss = %.2f ,acc = %.2f' % ('scratch', loss['scratch'][it], accuracy['scratch'][it])
############## 第二层姐解析################
loss_disp_parse = loss_disp1 + ' ; ' + loss_disp2
######## 下面三种输出形式是等价的 ###############
print loss_disp
print loss_disp_eq
print loss_disp_parse
python 列表推导式
最新推荐文章于 2022-10-26 19:00:20 发布