Sklearn部分习题

题目:

步骤一:

代码:

from sklearn import datasets
X,Y=datasets.make_classification(n_samples=1000,n_features=10,n_classes=2)
print(X)
print(Y)

输出:

[[-1.80647012 -1.43419604 -0.28311776 ... -0.53762212 -0.6953816
  -0.70075976]
 [-0.40105081 -0.48818771 -0.2388701  ... -0.14350404 -1.21294631
  -0.95708248]
 [ 0.55253213 -1.17637328 -0.29499082 ... -0.42357666  0.05036445
   1.04113447]
 ...
 [-0.09154113  0.93550293  0.25381486 ...  0.33151761  0.79142403
   0.62442776]
 [-1.22534209 -0.10833795 -0.01011302 ... -0.04373628 -0.68807114
  -1.00516621]
 [-0.36315539  0.36608284  0.03853966 ...  0.14657807 -0.45135585
   0.26615598]]
[1 1 1 1 0 1 0 0 0 1 1 1 1 0 0 1 1 1 1 1 1 1 0 0 0 0 1 0 0 0 1 0 1 0 0 0 0
 0 0 1 0 0 1 1 1 1 0 0 1 0 1 0 0 0 0 1 0 0 1 1 0 1 1 1 0 1 0 1 1 0 1 0 1 0
 0 1 1 1 0 0 1 1 0 0 0 0 1 1 0 0 1 1 1 1 1 0 1 0 1 1 1 1 1 1 1 1 1 1 1 1 1
 1 0 0 0 0 0 0 0 1 1 1 1 0 1 0 0 1 1 0 0 0 0 0 0 1 0 1 1 0 0 1 0 1 1 0 0 1
 0 0 0 0 0 1 0 0 1 0 0 1 0 1 0 1 0 1 0 1 0 0 0 1 0 1 0 0 0 1 1 1 0 0 0 1 0
 1 0 1 1 1 1 0 0 0 1 1 0 1 1 0 0 0 1 1 1 0 1 1 0 0 0 1 0 0 1 1 0 0 0 1 1 0
 0 1 0 1 0 0 1 1 1 1 1 1 1 0 1 0 1 0 1 1 1 0 1 0 0 1 0 0 0 1 1 0 1 1 1 1 0
 1 1 1 0 0 0 0 0 1 0 0 1 1 0 1 1 1 1 0 1 0 1 0 1 1 0 0 0 0 0 0 0 1 1 0 1 1
 1 1 1 1 1 1 1 0 1 0 1 1 1 0 1 0 0 1 0 1 0 1 1 1 1 0 0 0 1 0 1 1 1 0 1 1 0
 0 0 0 0 1 1 0 1 0 1 0 0 1 1 1 0 0 0 0 1 0 1 1 1 0 1 0 0 0 0 1 1 1 0 0 1 0
 1 1 1 1 0 0 0 0 0 0 0 1 1 0 1 0 0 0 0 1 0 0 1 0 1 1 0 0 1 1 0 0 1 0 1 1 1
 1 0 1 0 0 1 0 1 1 0 0 1 1 1 0 0 1 1 0 0 1 0 0 0 0 0 1 0 1 0 0 0 1 0 1 1 0
 1 1 1 1 0 1 0 0 1 0 0 0 1 0 1 0 0 0 1 1 0 0 0 1 1 1 0 1 1 1 1 1 1 0 1 0 0
 1 0 1 1 0 1 1 0 1 0 0 0 1 0 1 0 0 0 1 0 0 1 0 1 1 1 1 0 1 1 1 1 0 1 1 1 1
 1 0 0 0 0 1 0 0 0 0 0 1 0 0 0 1 1 0 0 0 1 1 1 0 1 0 1 0 1 1 1 1 0 0 1 0 1
 0 0 0 0 1 1 1 0 1 0 0 1 0 0 0 0 1 1 0 1 1 1 1 0 0 1 1 0 0 1 0 1 0 0 1 0 1
 1 0 1 0 0 0 0 0 0 0 0 0 1 0 1 0 0 1 1 0 0 0 0 1 0 0 0 1 1 1 1 1 0 1 0 1 0
 0 1 1 1 1 0 0 1 1 1 1 1 0 0 0 1 0 0 0 1 0 0 1 0 0 0 1 1 1 0 1 1 1 0 0 1 0
 1 0 1 1 0 0 1 0 1 0 1 1 0 0 1 0 0 1 1 1 1 0 1 0 1 0 1 1 1 0 0 1 0 0 0 0 1
 0 1 1 1 1 1 0 0 0 1 0 1 1 1 1 0 0 1 1 0 1 0 1 1 0 1 1 1 1 1 1 0 1 1 1 0 0
 0 1 1 1 0 0 1 0 0 1 1 1 0 1 0 1 1 0 1 0 1 0 1 0 1 1 0 1 0 0 0 0 0 0 1 0 0
 0 1 1 1 1 1 0 0 0 0 0 0 1 1 1 0 1 1 0 0 0 1 1 1 0 1 1 0 0 1 0 0 1 1 0 1 0
 1 1 0 0 0 1 0 1 1 1 1 1 0 1 1 0 0 1 0 1 1 1 1 0 1 0 0 0 1 0 0 1 0 0 1 0 1
 0 1 0 0 1 1 1 0 0 0 1 0 0 0 0 1 1 0 0 1 1 1 0 0 1 0 1 0 1 1 0 0 0 1 1 0 1
 0 1 0 0 1 0 1 1 0 0 0 0 1 1 1 1 0 1 0 0 0 1 1 1 1 0 1 1 1 1 0 1 1 0 1 1 0
 0 1 1 0 1 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 1 1 1 0 1 1 0 1 0 1 0 1 0 1 0 1 0
 0 1 1 1 0 0 0 1 0 1 0 1 1 0 0 1 0 1 0 0 1 0 1 1 1 0 0 0 1 0 0 0 0 1 0 0 0
 0]

步骤二:

代码:

from sklearn import cross_validation
kf=cross_validation.KFold(len(X),n_folds=10,shuffle=True)
for train_index,test_index in kf:
    X_train,Y_train=X[train_index],Y[train_index]
    X_test,Y_test=X[test_index],Y[test_index]
print(X_train)
print(Y_train)
print(X_test)
print(Y_test)

输出:

[[ 1.21198437 -1.56314766  0.28776258 ...  0.60582953 -1.28649919
  -1.04512689]
 [ 1.08442852 -0.41663361 -0.79928577 ...  0.01663205 -0.64393865
  -1.18830256]
 [-1.24136672  1.06605984 -0.22211874 ... -0.47361842 -0.40775938
   0.13153701]
 ...
 [ 2.64727998  0.13745864  0.74356353 ... -1.04930397 -0.50189399
  -0.93786842]
 [ 0.21165009 -1.7788994   1.56883132 ... -0.36501726 -0.13476309
   0.47600208]
 [-0.43238361  0.1972481   1.22680025 ... -1.25308915  0.97978849
  -0.95893893]]
[1 1 0 0 0 0 0 1 0 0 0 0 1 0 1 0 0 0 1 0 0 1 1 0 0 1 0 1 0 0 0 1 0 0 0 1 1
 0 0 1 1 0 1 1 1 1 1 1 1 1 1 1 1 0 0 1 0 1 1 0 1 0 0 1 0 0 0 1 1 0 0 1 1 1
 0 0 0 1 0 0 0 1 1 0 1 0 0 1 1 0 0 0 0 1 1 0 0 1 1 1 0 0 0 1 1 0 0 1 0 1 0
 0 1 0 0 0 0 1 1 1 1 0 1 1 1 0 1 0 1 0 1 1 1 1 1 0 1 0 1 1 1 0 0 0 0 1 0 0
 1 0 0 1 0 1 1 1 1 0 1 0 0 1 0 1 0 1 1 0 0 0 1 0 1 1 0 0 0 1 1 1 0 0 0 1 1
 0 0 1 1 0 1 1 1 1 0 0 1 1 0 1 1 1 1 0 1 0 1 1 0 1 1 1 1 1 0 1 0 1 1 1 1 0
 1 0 0 0 1 1 0 0 1 0 1 1 0 1 1 0 0 1 1 1 0 1 1 1 1 0 1 0 1 1 0 0 1 0 0 0 0
 1 1 0 1 0 1 1 0 1 0 1 1 1 0 0 0 0 0 1 1 0 1 1 0 0 0 1 1 1 0 0 0 0 0 1 1 0
 1 0 1 0 1 0 1 1 0 0 1 0 0 0 1 1 1 1 0 1 1 1 1 0 0 1 1 1 1 1 1 0 1 0 1 1 1
 0 0 0 0 1 0 1 1 1 1 0 1 1 0 0 0 0 0 0 1 1 0 1 1 0 0 1 0 1 0 0 1 1 0 0 1 0
 1 1 0 0 0 0 0 0 0 0 1 1 0 1 0 1 0 1 0 1 1 1 1 0 0 0 1 0 1 1 1 1 0 1 0 0 1
 1 0 0 1 1 0 0 1 0 0 0 1 0 0 1 0 1 1 1 0 1 1 1 0 1 1 1 1 0 0 1 1 1 0 0 0 1
 0 0 1 1 1 0 1 1 0 0 1 0 0 1 1 0 0 1 0 1 1 1 1 1 1 1 1 1 1 0 1 0 0 0 0 1 0
 0 0 0 1 0 1 1 0 0 0 1 1 0 1 1 1 1 0 1 1 0 0 0 0 1 0 0 0 0 0 0 1 0 1 0 0 0
 1 1 0 1 0 1 1 0 0 0 1 1 0 0 0 0 0 1 1 1 1 1 1 0 1 0 1 1 1 0 1 0 1 0 1 1 0
 1 1 0 0 1 0 0 0 1 1 0 0 1 0 0 1 0 0 0 0 1 1 0 1 1 0 0 0 0 1 1 1 1 1 1 0 0
 1 1 0 1 1 0 0 0 0 0 1 1 1 0 1 1 0 1 1 1 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 1
 1 0 0 0 0 0 1 0 0 0 0 0 1 0 1 0 1 0 0 0 0 1 1 1 1 1 1 0 1 0 1 1 1 0 1 1 1
 1 0 1 1 0 1 0 0 1 1 0 0 0 1 1 0 0 1 0 1 1 1 1 1 0 1 0 1 1 0 0 0 1 0 1 0 0
 0 0 0 0 0 0 0 1 0 1 1 0 1 1 0 0 1 1 1 0 1 1 1 1 1 0 0 0 0 0 0 1 1 0 1 0 1
 1 1 0 0 1 1 1 1 0 0 0 0 0 0 1 1 0 0 0 1 0 1 0 1 0 1 1 0 1 0 1 0 0 0 1 0 1
 0 0 0 1 0 1 0 0 1 0 0 0 1 1 1 0 1 1 1 0 1 1 1 0 1 0 1 1 1 1 1 0 0 0 0 0 0
 0 1 0 1 0 1 1 1 1 1 1 1 1 0 1 0 0 1 0 0 1 0 1 0 1 1 0 1 0 0 1 1 0 0 1 0 0
 0 0 0 0 1 1 0 0 0 1 0 0 1 0 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0
 1 0 1 1 0 0 0 0 0 1 1 1]
[[-4.93402209e-01  9.40516009e-01 -2.37479960e-01  1.20410703e+00
  -1.23699913e+00 -6.74744038e-02 -1.83211766e+00  2.28779716e-01
  -8.56218369e-01  6.39680663e-01]
 [ 2.46274043e-01 -1.91724983e-02 -7.62789250e-01  1.80001530e+00
  -1.00546424e+00  9.61291165e-01  8.98114668e-01  2.94417982e-01
   5.24221449e-01  1.02346712e+00]
 [ 9.98016432e-01  7.46645582e-01  1.31520298e+00 -5.91363091e-01
  -7.11777757e-01 -3.05248072e-01 -2.64060285e-02 -3.90379608e-01
  -7.72522533e-01  2.36058069e-01]
 [-1.66009106e+00  7.89342989e-03  5.97812371e-03 -1.37595408e+00
  -1.91495274e+00  1.77323995e+00  1.84040423e+00 -9.51995343e-01
  -5.25423430e-01  1.92070629e+00]
 [ 8.56689767e-01 -3.99252038e-01 -2.08803983e-01 -3.45338604e-01
   6.25946508e-01 -6.52700761e-02  3.74475965e-01 -1.78070275e-01
   1.43711209e+00 -3.72952954e-01]
 [ 7.53780709e-01  4.50641002e-01 -1.49252743e-01 -3.10981986e-01
  -1.85776545e+00  1.14587954e+00  3.83560264e-01  1.42737992e+00
   1.73398361e+00  1.57871640e+00]
 [ 9.17507532e-01 -1.07936020e+00 -1.86043431e+00  6.94957337e-01
   1.93011782e+00 -3.98020148e-01  2.57622559e-01  6.45933090e-01
  -8.02576322e-02 -1.24750581e+00]
 [ 3.03418425e-01 -1.77785473e-02 -2.78172595e-01  1.76317557e+00
  -1.21112914e+00  1.15102795e+00 -1.63107499e+00  1.27965041e+00
  -8.04649640e-01  1.22939900e+00]
 [ 6.69494715e-01  1.37244878e+00 -6.30669912e-01  8.75340882e-02
  -2.63071826e+00  6.70477992e-01  9.48455919e-01  2.61142604e-01
  -3.11148588e-02  1.76374810e+00]
 [ 1.52117010e+00  1.33322415e+00 -9.05838090e-01 -1.30335555e+00
  -8.70595355e-01 -9.17936131e-01 -9.15251426e-01 -1.85894760e+00
  -9.54205047e-01  1.88781830e-02]
 [-9.27749564e-01  1.27686914e+00  1.12699623e+00  6.78663812e-01
  -2.66594293e+00  8.27219870e-01  3.64248788e-01 -9.90224905e-01
   2.60699635e-01  1.86058484e+00]
 [-1.38437477e+00 -3.47232292e-02 -3.60799905e-02  1.65174276e+00
   1.09951627e+00 -9.79000521e-01 -2.37282144e-01  5.82886051e-01
   1.07583107e+00 -1.08342112e+00]
 [-4.57889912e-03  1.01208331e+00 -7.80973468e-01 -1.68729065e+00
  -8.07625588e-01 -5.60167338e-01 -4.52343566e-02  6.51995567e-01
   1.10028073e+00  1.61895597e-01]
 [ 3.25642619e-01  6.31799812e-01  7.68684445e-01  9.65235722e-01
  -1.35635542e+00  4.43991504e-01 -1.47666482e-01 -1.17098265e+00
   4.09931454e-01  9.58071602e-01]
 [-4.20402994e-01  1.48490631e+00 -2.09113157e-01  5.61361343e-01
  -9.86376493e-01 -1.00678673e+00  1.63722127e+00 -5.98544314e-01
  -6.96225329e-01  3.78532536e-02]
 [-9.34125837e-01  1.44526247e+00  8.88469580e-01 -1.77608850e-01
  -1.91300666e+00 -9.23723441e-02 -7.96909703e-01 -3.95063824e-01
   6.06554213e-01  9.95194136e-01]
 [ 3.81850239e-02 -1.59414837e-02 -1.62970993e+00 -9.92559369e-01
  -8.12631797e-01  7.77508717e-01  1.25396044e+00  6.87893937e-02
   1.59513672e+00  8.27468556e-01]
 [-1.67232640e+00  1.41133527e+00  1.03542235e+00  7.85986696e-01
  -9.84605654e-01 -9.13038288e-01 -1.52907157e+00  5.35600197e-01
  -8.30279453e-02  8.33441265e-02]
 [-5.75997311e-01 -7.88144351e-02 -1.65807047e+00 -1.78810791e-02
   1.12092197e+00 -9.41764597e-01  1.26102403e+00 -9.47515288e-01
   6.29777288e-01 -1.07661784e+00]
 [ 5.76227619e-01 -1.34686739e+00 -1.16554826e+00  3.77170307e-01
   7.68311270e-01  1.03088839e+00  1.56314585e+00 -3.41580156e-01
   1.60024480e+00  9.27503296e-02]
 [ 1.23978449e+00  1.08042770e+00 -1.25183499e+00  5.08208163e-01
  -2.37464519e+00  8.10642780e-01  1.12930444e+00 -9.06560401e-01
   1.75546531e+00  1.69386022e+00]
 [-8.85342077e-01 -7.69720456e-01  1.34463563e-02  9.95580755e-01
  -5.37171216e-01  1.49836681e+00 -8.73448391e-02 -1.60737045e+00
  -1.29648808e+00  1.03477762e+00]
 [ 8.84611279e-01  2.60296856e+00  3.29383460e-01 -3.45267808e-01
  -1.48641488e+00 -1.99084302e+00  5.98306414e-01  3.49647960e-01
   2.97967569e-01 -1.77671980e-01]
 [-1.95840208e+00  1.52115289e+00 -1.09440581e+00  6.56167246e-01
  -1.05359011e+00 -9.91187909e-01  1.17038754e+00 -2.71197427e-01
  -6.61273843e-01  8.21572608e-02]
 [ 1.54405390e+00  1.12254471e+00  1.65853278e+00  3.04765021e-02
  -8.73268317e-01 -6.42264019e-01 -2.82904728e-02 -4.56497497e-01
   3.42502930e-01  1.56934464e-01]
 [ 7.45719721e-01 -1.39799963e-01 -4.42948327e-02 -1.64601595e-01
  -9.16450959e-01  1.03480396e+00  1.34163902e-01  1.84359990e+00
   1.71549246e+00  1.01145761e+00]
 [-7.45095702e-01 -1.64702436e+00  4.11161478e-01 -9.86648484e-01
   1.45427044e+00  7.81231982e-01 -8.49406810e-01  1.29861051e+00
  -1.04093743e+00 -4.04226251e-01]
 [ 9.42980876e-03  5.95622315e-01 -1.07677488e+00  1.43909685e+00
  -2.02158562e+00  1.11045851e+00  6.15318890e-01  6.00013502e-02
  -1.43291556e-01  1.65030757e+00]
 [-1.44427463e-01 -2.82900402e+00  2.72474886e-01  1.56548065e-01
   9.13542305e-01  2.81747751e+00  6.11427020e-01  3.76995777e-02
  -3.53620227e-01  8.99017969e-01]
 [-9.10097089e-01 -1.79478424e-01  8.90026250e-01  1.16624947e+00
   1.31962483e+00 -9.96296409e-01  1.81248277e+00 -6.59668706e-01
   1.16061757e-01 -1.21176418e+00]
 [ 2.29829878e+00  2.73546277e+00 -2.37184359e+00 -6.23563308e-01
  -1.59311022e+00 -2.06327507e+00 -8.45466597e-01 -6.64355279e-02
  -1.13458622e+00 -1.55505204e-01]
 [-4.07746393e-01  1.28854561e+00  8.02092484e-01  2.35432053e-01
  -8.72960638e-01 -8.57799709e-01 -8.81578752e-01  1.06678647e+00
   1.09948808e+00  4.99642176e-02]
 [ 2.53633378e-01  4.38020438e-01  4.55291334e-01 -1.47975730e+00
   4.46469992e-01 -9.83786025e-01  1.79329830e+00 -1.32946248e+00
  -1.12134963e+00 -7.30435899e-01]
 [-3.75284485e-01 -1.59154297e+00 -2.61830165e-01  1.10193206e+00
   8.58504362e-01  1.26415241e+00  2.33619334e-01  7.85629314e-01
  -1.72082425e+00  1.59259176e-01]
 [ 3.00600648e-02  1.44484473e-01  4.13720100e-01  5.18875532e-01
   9.17412059e-01 -1.04177335e+00  1.23873276e+00  2.52989880e-01
  -1.51058470e+00 -1.01543408e+00]
 [-1.35538599e-01  6.29365252e-01 -8.92383900e-01  1.71673745e+00
  -5.80998238e-01 -2.74974062e-01  2.79646458e-01  5.36778171e-01
   8.40261001e-01  1.79895569e-01]
 [-9.49761690e-01 -2.45047669e+00  6.88586125e-02 -5.74690837e-01
   1.30942383e+00  1.95794982e+00  4.37728691e-01  9.85143958e-01
   1.65562745e+00  2.57682721e-01]
 [-1.84479780e+00 -1.75403771e+00  3.12000967e-01 -2.91895172e-01
   1.55222678e+00  8.28762644e-01 -2.36875877e-01  1.17419876e-01
  -6.10765854e-01 -4.33976889e-01]
 [ 1.05370361e-01 -3.20628082e-01  6.56308385e-01 -8.65959825e-01
   1.51986957e+00 -9.99767491e-01  1.79047986e+00  1.19430945e+00
  -1.78459222e+00 -1.32244779e+00]
 [-7.24248036e-01 -1.61929504e+00 -2.72806088e-01 -1.60166421e+00
   4.47764512e-01  1.68267692e+00 -2.48945173e+00  2.90175763e-01
  -7.63686168e-01  5.90152541e-01]
 [ 1.41934781e+00 -1.24315229e+00 -6.09723222e-01  5.23444095e-01
   1.88382246e+00 -1.42518311e-01  7.67608078e-01 -7.00902121e-02
   4.61393981e-01 -1.09570713e+00]
 [-4.69441429e-01 -1.21708127e+00 -1.78250751e+00  8.39736883e-01
   9.48468048e-02  1.48982352e+00 -2.25020889e+00 -1.43763818e+00
   2.70317232e-01  6.86630242e-01]
 [ 1.87031106e+00 -1.86305292e+00  1.18906469e+00  5.34264434e-01
   1.42850495e+00  1.08534718e+00  2.55097341e-01  5.96569053e-01
   9.50140881e-01 -2.39509932e-01]
 [-3.25596038e-01  4.35297201e-01 -5.09596252e-01 -1.96645114e+00
  -1.42454041e+00  7.62294961e-01  9.16319152e-02 -2.59224904e-01
  -7.56500505e-01  1.15290120e+00]
 [ 1.55241000e-01 -1.62892870e+00 -2.04305675e-01 -7.38203867e-01
   1.16796802e+00  1.02441315e+00  1.70313331e-01 -3.01130592e-01
   4.99475187e-01 -1.27932386e-01]
 [ 4.91909829e-01 -2.15710119e-01 -6.62026913e-01  5.18807081e-01
  -8.16342710e-01  1.03999975e+00 -6.69300215e-01  1.50971760e+00
   8.33086231e-01  9.59558116e-01]
 [ 1.27870632e+00 -1.04104900e-01  9.08910624e-01 -8.75932395e-01
  -6.50273941e-01  7.40617348e-01 -1.05364171e+00 -1.62933225e+00
  -1.41896014e+00  7.20840667e-01]
 [ 9.93492451e-01  9.77194651e-02 -2.21222813e+00  1.03135479e+00
  -1.01986912e+00  8.23136205e-01  1.10329998e+00 -6.68600863e-01
   1.11596409e+00  9.62846653e-01]
 [-1.62986634e-01 -9.30006338e-01  8.14666784e-01 -1.41106724e+00
   1.35479709e+00 -5.58633648e-02 -1.84405905e-01 -6.26639304e-01
   4.24267140e-01 -7.64897342e-01]
 [-1.48579805e+00 -1.24781996e-01  5.48397764e-01  1.02457121e+00
   1.23965820e+00 -9.92743620e-01 -3.98845075e-01 -2.03773996e+00
   2.00076659e+00 -1.16648968e+00]
 [-4.87010874e-02  2.49537105e+00  3.97629961e-01 -1.16763267e-01
  -1.46016627e+00 -1.87577050e+00  2.93249104e-01  6.26744819e-01
   8.42291827e-01 -1.34934174e-01]
 [-1.55878309e+00 -7.12376873e-02  4.75093428e-03  1.77742262e-03
   1.17171154e+00 -9.98891614e-01 -1.27094456e+00  6.13582152e-01
   2.88864951e+00 -1.13256283e+00]
 [-9.33806515e-01  1.63563798e+00  1.32754556e-01  8.76704148e-01
  -1.02702498e+00 -1.16437902e+00 -2.48620486e-01 -2.18878455e+00
   1.01890368e+00 -1.81183372e-02]
 [-4.98257899e-01  1.30941341e-01 -1.61935836e-01  8.71030824e-01
   9.18973806e-01 -1.02566684e+00  6.89449749e-01 -1.30634470e+00
  -1.07119209e+00 -1.00830277e+00]
 [ 1.87869906e-01  1.42491222e+00  6.08327237e-01  3.26529057e-01
  -2.35369121e+00  3.44443167e-01  6.86612100e-01  7.41214091e-01
  -1.26532008e-01  1.45144558e+00]
 [-1.25980917e+00 -2.03846139e-01 -1.20561876e+00 -7.78349656e-01
   1.26133326e+00 -9.10410049e-01 -3.83859075e-01 -1.03808834e-01
   1.51695193e+00 -1.13748606e+00]
 [-7.32455700e-01  1.09960175e-01 -1.18330137e+00  5.58288208e-01
   9.10342343e-01 -9.90422257e-01  4.48135691e-01 -1.13331671e+00
  -3.74139698e-02 -9.86141450e-01]
 [-1.31763008e+00  5.18745169e-01 -1.27717492e+00  6.94473770e-01
   4.59199150e-01 -1.10031492e+00 -1.42952623e+00 -1.45646754e+00
  -3.04633344e-02 -7.95105209e-01]
 [-1.05835032e+00 -1.61001324e-01 -8.90547746e-01  4.95766760e-01
   1.27174014e+00 -9.75658183e-01  1.88399592e-02  1.05533105e+00
   1.71399656e-01 -1.17548091e+00]
 [ 1.47248180e+00 -1.99559458e+00 -1.27608270e+00  5.31909615e-01
  -4.30058604e-02  2.62768981e+00 -3.72225376e-01 -2.03270558e-01
  -6.16872543e-01  1.32548162e+00]
 [ 7.51955775e-01 -1.31412671e+00  1.11643659e+00  4.21435653e-01
  -2.48919475e-01  1.93582370e+00  9.90267675e-01 -8.08170301e-01
  -2.22268889e+00  1.09469461e+00]
 [ 7.54874991e-01  3.05695808e+00 -1.68069378e+00  1.80098125e+00
  -1.72563532e+00 -2.35672372e+00  1.67289582e+00 -2.60609145e-01
   5.77408910e-01 -2.28801844e-01]
 [-1.12791706e+00 -1.56800630e+00  1.04746116e+00 -1.65031872e+00
   1.13736981e+00  9.73914012e-01  1.20348245e+00 -1.64931950e+00
  -2.38450531e+00 -1.36305775e-01]
 [-4.00743437e-01 -2.27936825e-01 -3.00119601e-01 -6.01121345e-01
  -2.57438738e-01  5.35323283e-01  1.60166006e-01  1.19912632e+00
   1.23304027e+00  4.05350674e-01]
 [ 1.60916509e-01 -1.06529551e+00 -1.35077579e+00  7.80528884e-01
   1.16837540e+00  2.93184936e-01 -6.20421635e-01 -5.53919472e-01
   5.60870323e-02 -4.90494224e-01]
 [ 3.42113602e-02  2.05922791e+00 -2.63194951e-02 -7.76356842e-01
  -1.28764920e+00 -1.47090711e+00 -1.18136083e+00 -4.67007807e-02
  -1.65638395e+00 -2.81907183e-02]
 [ 1.36330919e+00 -3.73285427e-01 -3.16637978e-02  9.34603485e-01
   2.12083165e+00 -1.49118904e+00 -7.93981893e-01 -2.14898367e-01
  -9.85542840e-01 -1.89297313e+00]
 [-3.35012715e-01 -1.35357407e+00  7.32297310e-02 -1.05907619e+00
   1.04586528e+00  7.81087178e-01  3.73003051e-02  2.91104282e-01
  -2.69356828e-01 -1.82063452e-01]
 [ 1.21125349e+00 -1.30023430e+00 -4.52714020e-01 -1.16810484e-02
   1.11479480e+00  6.47725923e-01 -1.86138578e+00 -8.31640984e-01
   2.48039037e-01 -2.85655124e-01]
 [ 8.02692520e-01  1.79956052e+00 -1.14202955e+00 -8.61120486e-02
  -1.12826922e+00 -1.28264022e+00 -5.96620433e-01 -2.64401630e-01
  -7.03489491e-01 -2.16271715e-02]
 [-3.26300647e-01  4.11241501e-01 -1.29970117e+00 -2.22208942e+00
  -1.59343870e+00  9.50789375e-01  1.99489896e-02  1.51575933e-01
  -6.07103270e-01  1.33821082e+00]
 [ 8.61495002e-01  1.75699887e+00  3.19952399e-01 -5.61964554e-01
  -1.08553135e+00 -1.26725518e+00  2.43054577e-01 -2.37803911e-01
  -2.58189743e-02 -3.72594350e-02]
 [ 1.15518027e+00 -1.67699212e+00 -2.04844155e+00  1.58039169e+00
   1.08225670e+00  1.16656227e+00  5.56484667e-01 -1.87942514e-01
   2.01499136e+00 -1.08542721e-02]
 [-1.49338741e+00 -4.27309873e-01  6.61214285e-01  3.22274252e-01
  -5.01640655e-01  1.02128064e+00  1.84104463e+00 -3.35771085e-01
   5.83201147e-01  7.79036568e-01]
 [-1.09014570e+00  1.48754403e+00 -1.44472553e+00 -5.53684740e-01
  -9.40887130e-01 -1.05257312e+00  5.53628815e-01 -1.44901042e-01
   9.15035038e-01 -9.58807831e-03]
 [-1.79826441e+00 -1.59273311e+00 -4.14686690e-02 -1.27868776e+00
   1.09519741e+00  1.04525354e+00  4.95603863e-01 -1.30438029e+00
  -9.80461753e-02 -7.80072185e-02]
 [ 1.40323558e+00 -9.99162303e-01  7.26960918e-01 -3.34271466e-01
   7.59907869e-01  5.87854499e-01 -1.75338554e+00  5.35935901e-01
   1.42953900e-01 -1.22210281e-01]
 [-9.65452131e-01 -2.52282603e+00 -4.52360490e-01 -2.80490137e-01
   1.92543243e+00  1.47804893e+00 -4.38468668e-01  2.92281039e-01
   4.32139896e-01 -3.15321472e-01]
 [-4.89110895e-01  9.17055061e-02  1.58156683e+00 -5.98431334e-01
   9.67334002e-01 -1.01983060e+00 -8.62697018e-01 -4.48973247e-01
   1.30881039e+00 -1.03172608e+00]
 [-1.77487579e-01 -1.98017615e+00 -1.39565271e+00 -1.34159074e+00
   1.30062612e+00  1.35631677e+00 -1.35557917e-01 -1.86356755e-01
   6.82068975e-01 -3.56528073e-02]
 [ 2.76097334e-01  9.03364799e-01 -1.57879521e+00 -1.80255593e-01
  -6.95491500e-01 -5.23629958e-01  9.80993634e-01  7.06485738e-01
  -3.20778224e-01  1.18982691e-01]
 [ 9.19622293e-02  4.57508229e-01 -1.54182377e+00 -6.97593332e-01
   4.55016645e-01 -1.01701520e+00  6.99785867e-01  3.58107746e-01
  -2.27326904e+00 -7.51552390e-01]
 [ 7.39458874e-01 -5.88586586e-01 -5.62666382e-02 -2.57939785e-01
   3.75698005e-01  4.13302053e-01  8.43317757e-01 -8.36003887e-01
  -2.05472031e-01  3.63654518e-04]
 [ 1.06201570e+00  1.18556248e-02  2.34995681e+00 -1.32508489e+00
   1.04677014e+00 -9.90273464e-01  1.40502157e+00 -3.85943667e-01
   3.40844881e-01 -1.06030520e+00]
 [ 7.55546549e-02  1.24789587e+00  3.73168470e-01 -2.43717090e+00
  -9.63359976e-01 -7.20897571e-01 -6.91592721e-01  6.46612192e-01
   3.50813458e-01  1.66993327e-01]
 [-2.36087017e-01 -1.20348671e+00 -3.42224291e-01  1.19359798e+00
   9.20729226e-01  7.03017127e-01 -1.42090635e+00  1.23152665e-01
   2.43599050e-01 -1.52655878e-01]
 [ 5.32769887e-02 -1.95844563e+00  5.73886776e-01 -1.34719168e+00
   9.55514171e-01  1.64955647e+00 -1.02646815e+00 -2.25103148e-01
   1.48898913e+00  2.97447528e-01]
 [ 3.93552558e-01 -1.67138589e+00 -2.30418854e-01  1.46566021e+00
   1.34028467e+00  9.18980641e-01 -1.91368207e-01 -6.37006193e-01
  -1.56368933e+00 -2.73943067e-01]
 [-5.55258820e-01 -2.62083127e+00 -7.80216126e-01  2.71991789e-01
  -3.08702373e-02  3.42711590e+00 -8.53548181e-01  2.46064376e-02
  -1.17275031e+00  1.71501175e+00]
 [ 3.57388339e-01  4.09387382e-02  5.30817462e-01 -1.20963353e+00
  -3.62188518e-01  2.84237016e-01  2.49289374e-01 -1.50099706e-01
   5.09169748e-01  3.37931555e-01]
 [-2.05732640e-01 -1.05973311e+00  4.10066560e-01  7.71678384e-03
   1.14769440e+00  3.05233392e-01  1.07629917e+00 -7.23883327e-01
  -1.28267387e+00 -4.73270333e-01]
 [ 4.17386225e-02  7.73499664e-02 -6.00165498e-01  1.04280515e+00
   9.15945876e-01 -9.53356233e-01 -2.89435961e-01 -1.65975297e+00
   1.93440147e-01 -9.70823568e-01]
 [-1.39679744e+00 -1.82825654e+00 -7.49106391e-01 -1.97387913e+00
   6.56561826e-01  1.75917035e+00 -1.97930956e-01 -1.71095635e-01
   1.17387931e+00  5.14439258e-01]
 [-1.64574171e+00  1.99534885e+00 -5.60101511e-01  3.29445582e-01
  -1.18238138e+00 -1.48611706e+00 -5.78606648e-01 -2.74185666e-01
   6.14451439e-01 -9.30093095e-02]
 [ 9.05818235e-01 -3.69599252e-01 -1.30037670e+00 -6.95702036e-01
  -4.95671533e-01  9.40889458e-01  2.19741314e-01  4.88178084e-03
   1.47316535e+00  7.35952803e-01]
 [ 7.04038653e-01  1.52523955e+00  1.11767707e-01 -2.19765347e-03
  -3.24885542e+00  1.04805494e+00  1.03350081e+00 -5.96264557e-01
  -4.94125890e-01  2.28720672e+00]
 [-4.78348410e-01  2.06507125e+00 -1.33188885e+00  6.99949118e-01
  -1.23200008e+00 -1.53031237e+00 -1.28791077e+00 -8.65638915e-01
  -1.84010437e-01 -8.79089658e-02]
 [-3.07429206e-01 -2.73019495e-02 -1.54401652e+00 -4.31690189e-01
   1.02894904e+00 -9.22901304e-01  1.32913484e-01  8.82262785e-02
  -8.98302968e-02 -1.01722338e+00]
 [ 1.44569034e+00 -8.93022407e-01  4.67933234e-02 -1.36801332e+00
   1.53876580e+00 -2.75157199e-01  9.07580599e-01  4.97292534e-01
   3.73601424e-01 -9.73669382e-01]
 [-1.92871700e-01 -3.00346894e-02  1.18136336e+00  1.87178065e+00
   1.16715893e+00 -1.04807846e+00  4.97643800e-01 -1.05615079e+00
   1.54067039e-01 -1.15445871e+00]]
[0 0 0 0 1 0 1 0 0 0 0 1 0 0 0 0 0 0 1 1 0 0 0 0 0 0 1 0 1 1 0 0 1 1 1 0 1
 1 1 1 1 0 1 0 1 0 0 0 1 1 0 1 0 1 0 1 1 1 1 1 1 0 1 0 1 0 1 1 1 0 0 0 1 0
 0 1 1 0 1 1 0 1 1 1 0 1 1 1 1 0 1 1 1 0 0 0 0 1 1 1]

步骤三&步骤四:

GaussianNB:

代码:

from sklearn import datasets
from sklearn import cross_validation
from sklearn.naive_bayes import GaussianNB
from sklearn import metrics

X,y=datasets.make_classification(n_samples=1000,n_features=10,n_classes=2)
kf=cross_validation.KFold(len(X),n_folds=10,shuffle=True)
for train_index,test_index in kf:
    X_train,y_train=X[train_index],y[train_index]
    X_test,y_test=X[test_index],y[test_index]
clf = GaussianNB()
clf.fit(X_train, y_train)
pred = clf.predict(X_test)
print(pred)
print(y_test)
acc = metrics.accuracy_score(y_test, pred)
print(acc)
f1 = metrics.f1_score(y_test, pred)
print(f1)
auc = metrics.roc_auc_score(y_test, pred)
print(auc)

输出:

[0 1 1 0 1 1 1 0 1 1 0 1 1 0 1 1 1 0 0 1 1 1 1 0 0 0 1 0 1 1 0 0 0 1 1 1 1
 0 0 0 0 0 1 1 0 0 1 0 1 1 0 1 0 1 0 1 0 1 0 1 0 0 0 1 1 1 0 1 1 1 1 0 1 0
 1 1 1 0 0 1 0 0 0 1 0 0 1 1 0 0 1 0 1 1 0 0 0 1 1 0]
[0 1 1 0 1 1 1 0 0 1 0 0 1 0 1 1 1 0 0 1 1 1 1 0 0 0 1 0 0 1 0 0 0 1 1 1 1
 0 0 0 0 0 1 1 0 0 1 0 1 1 0 1 0 1 0 1 0 1 0 1 0 0 0 1 1 1 0 1 1 1 1 0 1 0
 1 1 1 0 0 0 0 0 0 1 0 0 1 0 0 0 1 0 1 1 0 0 0 1 1 0]
0.95
0.9504950495049505
0.951923076923077

SVC (possible C values [1e-02, 1e-01, 1e00, 1e01, 1e02], RBF kernel)

代码:

from sklearn import datasets
from sklearn import cross_validation
from sklearn.svm import SVC
from sklearn import metrics
X,y=datasets.make_classification(n_samples=1000,n_features=10,n_classes=2)
kf=cross_validation.KFold(len(X),n_folds=10,shuffle=True)
for train_index,test_index in kf:
    X_train,y_train=X[train_index],y[train_index]
    X_test,y_test=X[test_index],y[test_index]
for C in  [1e-02, 1e-01, 1e00, 1e01, 1e02]:
    clf = SVC(C, kernel='rbf', gamma=0.1)
    clf.fit(X_train, y_train)
    pred = clf.predict(X_test)
    print(pred)
    print(y_test)
    acc = metrics.accuracy_score(y_test, pred)
    print(acc)
    f1 = metrics.f1_score(y_test, pred)
    print(f1)
    auc = metrics.roc_auc_score(y_test, pred)
    print(auc)

输出:

[0 0 0 1 1 1 0 0 0 1 1 0 0 0 1 1 0 1 1 1 1 0 0 0 0 0 1 0 0 0 0 1 0 1 1 0 0
 1 1 1 1 0 0 1 0 0 0 0 0 0 0 1 1 1 0 0 1 1 0 1 0 0 0 0 0 0 1 1 0 0 0 1 0 0
 0 0 1 1 1 0 0 1 1 1 1 0 0 1 0 1 1 0 0 0 1 1 1 1 0 0]
[0 0 0 1 1 1 0 0 1 1 1 0 1 0 1 1 1 1 1 1 1 1 0 0 0 1 1 0 0 0 0 1 0 1 1 1 0
 1 1 1 1 0 0 1 1 0 0 0 1 0 0 1 1 1 0 0 1 1 0 1 1 0 0 0 0 1 1 1 0 0 1 1 0 0
 0 0 1 1 1 1 0 1 1 1 1 0 0 1 1 1 1 1 0 0 1 1 1 1 0 0]
0.86
0.86
0.8771929824561404
[0 0 0 1 1 1 0 0 1 1 1 0 1 0 1 1 0 1 1 1 1 0 0 0 0 1 1 0 0 0 0 1 0 1 1 1 0
 1 1 1 1 0 0 1 0 0 0 0 0 0 0 1 1 1 0 0 1 1 0 1 1 0 0 0 0 1 1 1 0 0 0 1 0 0
 0 0 1 1 1 0 0 1 1 1 1 0 0 1 0 1 1 0 0 0 1 1 1 1 0 0]
[0 0 0 1 1 1 0 0 1 1 1 0 1 0 1 1 1 1 1 1 1 1 0 0 0 1 1 0 0 0 0 1 0 1 1 1 0
 1 1 1 1 0 0 1 1 0 0 0 1 0 0 1 1 1 0 0 1 1 0 1 1 0 0 0 0 1 1 1 0 0 1 1 0 0
 0 0 1 1 1 1 0 1 1 1 1 0 0 1 1 1 1 1 0 0 1 1 1 1 0 0]
0.92
0.9245283018867925
0.9298245614035088
[0 0 0 1 1 1 0 0 1 1 1 0 1 0 1 1 0 1 1 1 1 0 0 0 0 1 1 0 0 0 0 1 0 1 1 1 0
 1 1 1 1 0 0 1 1 0 0 0 0 0 0 1 1 1 0 0 1 1 0 1 1 0 0 0 0 1 1 1 0 0 0 1 0 0
 0 0 1 1 1 0 0 1 1 1 1 0 0 1 0 1 1 1 0 0 1 1 1 1 0 0]
[0 0 0 1 1 1 0 0 1 1 1 0 1 0 1 1 1 1 1 1 1 1 0 0 0 1 1 0 0 0 0 1 0 1 1 1 0
 1 1 1 1 0 0 1 1 0 0 0 1 0 0 1 1 1 0 0 1 1 0 1 1 0 0 0 0 1 1 1 0 0 1 1 0 0
 0 0 1 1 1 1 0 1 1 1 1 0 0 1 1 1 1 1 0 0 1 1 1 1 0 0]
0.94
0.9444444444444444
0.9473684210526316
[0 0 0 1 1 1 0 0 1 1 1 0 1 1 1 1 0 1 1 1 1 0 0 0 1 1 1 0 0 0 0 1 0 1 1 1 0
 1 1 1 1 0 0 1 1 0 0 0 1 0 0 1 1 1 0 0 1 1 0 1 1 0 0 0 1 1 1 1 0 0 0 1 0 0
 0 0 1 1 1 0 0 1 1 1 1 0 0 1 0 1 1 1 0 0 1 1 1 1 0 0]
[0 0 0 1 1 1 0 0 1 1 1 0 1 0 1 1 1 1 1 1 1 1 0 0 0 1 1 0 0 0 0 1 0 1 1 1 0
 1 1 1 1 0 0 1 1 0 0 0 1 0 0 1 1 1 0 0 1 1 0 1 1 0 0 0 0 1 1 1 0 0 1 1 0 0
 0 0 1 1 1 1 0 1 1 1 1 0 0 1 1 1 1 1 0 0 1 1 1 1 0 0]
0.92
0.9285714285714285
0.9212566299469604
[0 0 0 1 1 1 0 0 1 1 1 1 1 1 1 1 0 1 1 1 1 0 1 0 0 1 1 0 0 0 0 1 0 1 1 0 0
 1 1 1 1 0 0 1 1 0 0 0 0 0 0 1 1 1 0 0 1 1 0 1 1 0 1 0 1 1 1 1 0 0 0 1 0 0
 0 0 1 1 1 0 0 1 1 1 1 0 1 1 1 1 1 0 0 0 1 1 1 1 0 0]
[0 0 0 1 1 1 0 0 1 1 1 0 1 0 1 1 1 1 1 1 1 1 0 0 0 1 1 0 0 0 0 1 0 1 1 1 0
 1 1 1 1 0 0 1 1 0 0 0 1 0 0 1 1 1 0 0 1 1 0 1 1 0 0 0 0 1 1 1 0 0 1 1 0 0
 0 0 1 1 1 1 0 1 1 1 1 0 0 1 1 1 1 1 0 0 1 1 1 1 0 0]
0.87
0.8849557522123894
0.8688290493676051

RandomForestClassifier (possible n estimators values [10, 100, 1000])

代码:

from sklearn import datasets
from sklearn import cross_validation
from sklearn.ensemble import RandomForestClassifier
from sklearn import metrics
X,y=datasets.make_classification(n_samples=1000,n_features=10,n_classes=2)
kf=cross_validation.KFold(len(X),n_folds=10,shuffle=True)
for train_index,test_index in kf:
    X_train,y_train=X[train_index],y[train_index]
    X_test,y_test=X[test_index],y[test_index]
for n_estimators in [10, 100, 1000]:
    clf = RandomForestClassifier(n_estimators=6)
    clf.fit(X_train, y_train)
    pred = clf.predict(X_test)
    print(pred)
    print(y_test)
    acc = metrics.accuracy_score(y_test, pred)
    print(acc)
    f1 = metrics.f1_score(y_test, pred)
    print(f1)
    auc = metrics.roc_auc_score(y_test, pred)
    print(auc)

输出:

[0 0 1 0 1 1 1 0 1 1 1 1 0 1 1 0 0 1 0 0 1 1 0 0 1 1 1 1 1 1 1 1 0 1 0 0 1
 1 0 1 1 0 0 0 0 1 0 1 1 0 0 1 0 1 1 1 1 1 0 1 1 1 1 0 1 1 0 0 1 0 0 1 0 0
 1 0 1 0 0 0 1 0 1 0 0 1 0 1 0 0 1 1 0 0 1 0 1 0 1 0]
[0 0 1 1 1 1 1 0 1 1 0 1 0 1 1 0 0 1 0 0 1 1 0 0 1 1 1 0 1 0 1 0 0 1 1 0 1
 1 0 1 1 0 1 1 0 1 0 1 1 0 0 1 1 0 1 1 1 1 0 1 1 1 1 0 1 1 0 0 1 0 0 1 0 0
 1 0 1 0 0 0 1 1 1 1 0 1 0 1 0 0 1 1 0 0 1 0 1 0 1 0]
0.88
0.8909090909090909
0.8806818181818181
[0 0 1 0 1 0 1 0 1 1 0 1 0 1 1 0 0 1 0 0 1 1 0 0 1 1 1 1 1 0 1 1 1 1 0 0 1
 1 0 1 1 0 0 0 0 1 0 1 1 0 0 1 0 1 1 1 1 1 0 1 1 1 1 0 1 1 0 0 1 0 0 1 0 0
 1 0 1 0 0 0 1 1 1 0 0 1 0 1 0 0 1 1 0 0 1 0 1 0 1 0]
[0 0 1 1 1 1 1 0 1 1 0 1 0 1 1 0 0 1 0 0 1 1 0 0 1 1 1 0 1 0 1 0 0 1 1 0 1
 1 0 1 1 0 1 1 0 1 0 1 1 0 0 1 1 0 1 1 1 1 0 1 1 1 1 0 1 1 0 0 1 0 0 1 0 0
 1 0 1 0 0 0 1 1 1 1 0 1 0 1 0 0 1 1 0 0 1 0 1 0 1 0]
0.89
0.8990825688073395
0.8920454545454546
[0 0 1 0 1 1 1 0 1 1 0 1 0 1 1 0 0 1 0 0 1 1 0 0 1 1 1 1 1 1 1 1 1 1 0 0 1
 1 0 1 1 0 0 0 0 1 0 1 1 0 0 1 0 0 1 1 1 1 0 1 1 1 1 1 1 1 0 0 1 0 0 1 0 0
 1 0 1 0 0 0 1 1 1 0 0 1 0 1 0 0 1 1 0 0 1 0 1 0 1 0]
[0 0 1 1 1 1 1 0 1 1 0 1 0 1 1 0 0 1 0 0 1 1 0 0 1 1 1 0 1 0 1 0 0 1 1 0 1
 1 0 1 1 0 1 1 0 1 0 1 1 0 0 1 1 0 1 1 1 1 0 1 1 1 1 0 1 1 0 0 1 0 0 1 0 0
 1 0 1 0 0 0 1 1 1 1 0 1 0 1 0 0 1 1 0 0 1 0 1 0 1 0]
0.89
0.9009009009009009
0.8896103896103895

 

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值