from sklearn import datasets # 用于调用sklearn自带的数据集
from sklearn.model_selection import KFold
import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np
import pandas as pd
datasets.load_wine
<function sklearn.datasets.base.load_wine(return_X_y=False)>
wine_data=datasets.load_wine()
print(wine_data.feature_names)
['alcohol', 'malic_acid', 'ash', 'alcalinity_of_ash', 'magnesium', 'total_phenols', 'flavanoids', 'nonflavanoid_phenols', 'proanthocyanins', 'color_intensity', 'hue', 'od280/od315_of_diluted_wines', 'proline']
data_input = wine_data.data # 输入输出数据
data_output = wine_data.target
data_input
array([[1.423e+01, 1.710e+00, 2.430e+00, ..., 1.040e+00, 3.920e+00,
1.065e+03],
[1.320e+01, 1.780e+00, 2.140e+00, ..., 1.050e+00, 3.400e+00,
1.050e+03],
[1.316e+01, 2.360e+00, 2.670e+00, ..., 1.030e+00, 3.170e+00,
1.185e+03],
...,
[1.327e+01, 4.280e+00, 2.260e+00, ..., 5.900e-01, 1.560e+00,
8.350e+02],
[1.317e+01, 2.590e+00, 2.370e+00, ..., 6.000e-01, 1.620e+00,
8.400e+02],
[1.413e+01, 4.100e+00, 2.740e+00, ..., 6.100e-01, 1.600e+00,
5.600e+02]])
data_output
array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2])
from sklearn.linear_model import LogisticRegression # 逻辑回归模型
from sklearn.metrics import f1_score,log_loss,classification_report
kf=KFold(4,shuffle=True)
kf.get_n_splits(data_input)
lr=LogisticRegression()
for train_index,test_index in kf.split(data_input,data_output):
print(train_index,test_index)
[ 0 1 2 3 6 7 8 9 10 11 13 15 18 19 20 22 23 24
26 27 28 29 30 31 32 33 34 37 39 41 43 44 46 49 50 51
53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 70 71
73 74 77 79 80 81 82 84 85 86 87 88 89 90 91 92 94 95
98 100 101 105 106 107 108 110 111 112 113 115 116 117 118 119 120 121
123 125 127 128 129 130 131 132 133 135 136 140 141 142 143 145 146 147
148 149 151 152 153 154 157 158 159 160 161 163 164 165 166 167 168 169
170 171 172 173 174 175 177] [ 4 5 12 14 16 17 21 25 35 36 38 40 42 45 47 48 52 69
72 75 76 78 83 93 96 97 99 102 103 104 109 114 122 124 126 134
137 138 139 144 150 155 156 162 176]
[ 0 1 4 5 6 8 9 10 12 13 14 16 17 19 21 23 24 25
26 28 29 30 31 32 34 35 36 38 39 40 42 45 46 47 48 49
50 52 56 57 58 59 60 63 64 65 66 67 68 69 70 71 72 74
75 76 77 78 79 80 81 82 83 85 86 87 88 89 90 92 93 94
95 96 97 98 99 100 102 103 104 105 106 107 108 109 111 112 113 114
115 116 117 121 122 124 125 126 128 130 131 132 133 134 135 136 137 138
139 142 143 144 147 148 150 152 154 155 156 157 158 159 160 161 162 164
166 167 170 172 174 176 177] [ 2 3 7 11 15 18 20 22 27 33 37 41 43 44 51 53 54 55
61 62 73 84 91 101 110 118 119 120 123 127 129 140 141 145 146 149
151 153 163 165 168 169 171 173 175]
[ 0 1 2 3 4 5 6 7 8 9 11 12 14 15 16 17 18 20
21 22 23 25 27 30 31 33 35 36 37 38 39 40 41 42 43 44
45 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 67
68 69 70 71 72 73 75 76 78 80 81 83 84 85 89 91 93 94
95 96 97 99 100 101 102 103 104 109 110 114 116 118 119 120 121 122
123 124 125 126 127 129 130 131 134 135 136 137 138 139 140 141 143 144
145 146 147 149 150 151 152 153 155 156 158 159 161 162 163 165 167 168
169 170 171 173 174 175 176 177] [ 10 13 19 24 26 28 29 32 34 46 63 64 65 66 74 77 79 82
86 87 88 90 92 98 105 106 107 108 111 112 113 115 117 128 132 133
142 148 154 157 160 164 166 172]
[ 2 3 4 5 7 10 11 12 13 14 15 16 17 18 19 20 21 22
24 25 26 27 28 29 32 33 34 35 36 37 38 40 41 42 43 44
45 46 47 48 51 52 53 54 55 61 62 63 64 65 66 69 72 73
74 75 76 77 78 79 82 83 84 86 87 88 90 91 92 93 96 97
98 99 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 117
118 119 120 122 123 124 126 127 128 129 132 133 134 137 138 139 140 141
142 144 145 146 148 149 150 151 153 154 155 156 157 160 162 163 164 165
166 168 169 171 172 173 175 176] [ 0 1 6 8 9 23 30 31 39 49 50 56 57 58 59 60 67 68
70 71 80 81 85 89 94 95 100 116 121 125 130 131 135 136 143 147
152 158 159 161 167 170 174 177]
for train_index,test_index in kf.split(data_input,data_output):
#print(train_index,test_index)
lr.fit(data_input[train_index],data_output[train_index])
y_pre_lr=lr.predict(data_input[test_index])
y=data_output[test_index]
print(f1_score(y,y_pre_lr,average=None))
[1. 1. 1.]
[0.96296296 0.97297297 1. ]
[1. 0.94444444 0.90909091]
[0.9375 0.88235294 0.90909091]
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.
FutureWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:460: FutureWarning: Default multi_class will be changed to 'auto' in 0.22. Specify the multi_class option to silence this warning.
"this warning.", FutureWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.
FutureWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:460: FutureWarning: Default multi_class will be changed to 'auto' in 0.22. Specify the multi_class option to silence this warning.
"this warning.", FutureWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.
FutureWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:460: FutureWarning: Default multi_class will be changed to 'auto' in 0.22. Specify the multi_class option to silence this warning.
"this warning.", FutureWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.
FutureWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:460: FutureWarning: Default multi_class will be changed to 'auto' in 0.22. Specify the multi_class option to silence this warning.
"this warning.", FutureWarning)
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import LogisticRegression
from sklearn import svm
from sklearn.model_selection import cross_val_score
rf_class=RandomForestClassifier(n_estimators=10)
log_class=LogisticRegression()
svm_class=svm.LinearSVC()
print(cross_val_score(rf_class,data_input,data_output,scoring='accuracy',cv=4))
[0.91111111 0.97777778 0.97777778 1. ]
accuracy=cross_val_score(rf_class,data_input,data_output,scoring='accuracy',cv=4).mean()*100
print('Accuracy of Random Forest is:',accuracy)
Accuracy of Random Forest is: 95.55555555555556
accuracy=cross_val_score(log_class,data_input,data_output,scoring='accuracy',cv=4).mean()*100
print('Accuracy of Logistic is:',accuracy)
Accuracy of Logistic is: 96.11111111111111
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.
FutureWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:460: FutureWarning: Default multi_class will be changed to 'auto' in 0.22. Specify the multi_class option to silence this warning.
"this warning.", FutureWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.
FutureWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:460: FutureWarning: Default multi_class will be changed to 'auto' in 0.22. Specify the multi_class option to silence this warning.
"this warning.", FutureWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.
FutureWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:460: FutureWarning: Default multi_class will be changed to 'auto' in 0.22. Specify the multi_class option to silence this warning.
"this warning.", FutureWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:433: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.
FutureWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:460: FutureWarning: Default multi_class will be changed to 'auto' in 0.22. Specify the multi_class option to silence this warning.
"this warning.", FutureWarning)
accuracy=cross_val_score(svm_class,data_input,data_output,scoring='accuracy',cv=4).mean()*100
print('Accuracy of SVM is:',accuracy)
Accuracy of SVM is: 88.86304909560724
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/svm/base.py:931: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.
"the number of iterations.", ConvergenceWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/svm/base.py:931: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.
"the number of iterations.", ConvergenceWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/svm/base.py:931: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.
"the number of iterations.", ConvergenceWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/svm/base.py:931: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.
"the number of iterations.", ConvergenceWarning)
#正则化
import numpy as np
import pandas as pd
import random
import matplotlib.pyplot as plt
%matplotlib inline
from matplotlib.pylab import rcParams
rcParams['figure.figsize'] = 12, 10
x = np.array([1.4*i*np.pi/180 for i in range(0,300,4)])
np.random.seed(20) #随机数
y = np.sin(x) + np.random.normal(0,0.2,len(x)) # 加噪音
data = pd.DataFrame(np.column_stack([x,y]),columns=['x','y'])
plt.plot(data['x'],data['y'],'.')
[<matplotlib.lines.Line2D at 0x11fb34a20>]
# 模型复杂度设置
for i in range(2,16):
colname = 'x_%d'%i # 变量名变为 x_i形式
data[colname] = data['x']**i
print(data.head()) # 显示五行
x y x_2 x_3 x_4 x_5 x_6
0 0.000000 0.176779 0.000000 0.000000 0.000000 0.000000 0.000000e+00
1 0.097738 0.136756 0.009553 0.000934 0.000091 0.000009 8.717508e-07
2 0.195477 0.265742 0.038211 0.007469 0.001460 0.000285 5.579205e-05
3 0.293215 -0.179621 0.085975 0.025209 0.007392 0.002167 6.355064e-04
4 0.390954 0.164104 0.152845 0.059755 0.023362 0.009133 3.570691e-03
x_7 x_8 x_9 x_10 x_11
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 8.520356e-08 8.327663e-09 8.139328e-10 7.955252e-11 7.775339e-12
2 1.090606e-05 2.131882e-06 4.167336e-07 8.146178e-08 1.592389e-08
3 1.863402e-04 5.463780e-05 1.602064e-05 4.697497e-06 1.377378e-06
4 1.395975e-03 5.457617e-04 2.133676e-04 8.341686e-05 3.261214e-05
x_12 x_13 x_14 x_15
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 7.599495e-13 7.427628e-14 7.259647e-15 7.095466e-16
2 3.112753e-09 6.084713e-10 1.189421e-10 2.325042e-11
3 4.038683e-07 1.184204e-07 3.472267e-08 1.018122e-08
4 1.274984e-05 4.984597e-06 1.948747e-06 7.618699e-07
x y x_2 x_3 x_4 x_5 x_6
0 0.000000 0.176779 0.000000 0.000000 0.000000 0.000000 0.000000e+00
1 0.097738 0.136756 0.009553 0.000934 0.000091 0.000009 8.717508e-07
2 0.195477 0.265742 0.038211 0.007469 0.001460 0.000285 5.579205e-05
3 0.293215 -0.179621 0.085975 0.025209 0.007392 0.002167 6.355064e-04
4 0.390954 0.164104 0.152845 0.059755 0.023362 0.009133 3.570691e-03
x_7 x_8 x_9 x_10 x_11
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 8.520356e-08 8.327663e-09 8.139328e-10 7.955252e-11 7.775339e-12
2 1.090606e-05 2.131882e-06 4.167336e-07 8.146178e-08 1.592389e-08
3 1.863402e-04 5.463780e-05 1.602064e-05 4.697497e-06 1.377378e-06
4 1.395975e-03 5.457617e-04 2.133676e-04 8.341686e-05 3.261214e-05
x_12 x_13 x_14 x_15
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 7.599495e-13 7.427628e-14 7.259647e-15 7.095466e-16
2 3.112753e-09 6.084713e-10 1.189421e-10 2.325042e-11
3 4.038683e-07 1.184204e-07 3.472267e-08 1.018122e-08
4 1.274984e-05 4.984597e-06 1.948747e-06 7.618699e-07
x y x_2 x_3 x_4 x_5 x_6
0 0.000000 0.176779 0.000000 0.000000 0.000000 0.000000 0.000000e+00
1 0.097738 0.136756 0.009553 0.000934 0.000091 0.000009 8.717508e-07
2 0.195477 0.265742 0.038211 0.007469 0.001460 0.000285 5.579205e-05
3 0.293215 -0.179621 0.085975 0.025209 0.007392 0.002167 6.355064e-04
4 0.390954 0.164104 0.152845 0.059755 0.023362 0.009133 3.570691e-03
x_7 x_8 x_9 x_10 x_11
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 8.520356e-08 8.327663e-09 8.139328e-10 7.955252e-11 7.775339e-12
2 1.090606e-05 2.131882e-06 4.167336e-07 8.146178e-08 1.592389e-08
3 1.863402e-04 5.463780e-05 1.602064e-05 4.697497e-06 1.377378e-06
4 1.395975e-03 5.457617e-04 2.133676e-04 8.341686e-05 3.261214e-05
x_12 x_13 x_14 x_15
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 7.599495e-13 7.427628e-14 7.259647e-15 7.095466e-16
2 3.112753e-09 6.084713e-10 1.189421e-10 2.325042e-11
3 4.038683e-07 1.184204e-07 3.472267e-08 1.018122e-08
4 1.274984e-05 4.984597e-06 1.948747e-06 7.618699e-07
x y x_2 x_3 x_4 x_5 x_6
0 0.000000 0.176779 0.000000 0.000000 0.000000 0.000000 0.000000e+00
1 0.097738 0.136756 0.009553 0.000934 0.000091 0.000009 8.717508e-07
2 0.195477 0.265742 0.038211 0.007469 0.001460 0.000285 5.579205e-05
3 0.293215 -0.179621 0.085975 0.025209 0.007392 0.002167 6.355064e-04
4 0.390954 0.164104 0.152845 0.059755 0.023362 0.009133 3.570691e-03
x_7 x_8 x_9 x_10 x_11
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 8.520356e-08 8.327663e-09 8.139328e-10 7.955252e-11 7.775339e-12
2 1.090606e-05 2.131882e-06 4.167336e-07 8.146178e-08 1.592389e-08
3 1.863402e-04 5.463780e-05 1.602064e-05 4.697497e-06 1.377378e-06
4 1.395975e-03 5.457617e-04 2.133676e-04 8.341686e-05 3.261214e-05
x_12 x_13 x_14 x_15
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 7.599495e-13 7.427628e-14 7.259647e-15 7.095466e-16
2 3.112753e-09 6.084713e-10 1.189421e-10 2.325042e-11
3 4.038683e-07 1.184204e-07 3.472267e-08 1.018122e-08
4 1.274984e-05 4.984597e-06 1.948747e-06 7.618699e-07
x y x_2 x_3 x_4 x_5 x_6
0 0.000000 0.176779 0.000000 0.000000 0.000000 0.000000 0.000000e+00
1 0.097738 0.136756 0.009553 0.000934 0.000091 0.000009 8.717508e-07
2 0.195477 0.265742 0.038211 0.007469 0.001460 0.000285 5.579205e-05
3 0.293215 -0.179621 0.085975 0.025209 0.007392 0.002167 6.355064e-04
4 0.390954 0.164104 0.152845 0.059755 0.023362 0.009133 3.570691e-03
x_7 x_8 x_9 x_10 x_11
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 8.520356e-08 8.327663e-09 8.139328e-10 7.955252e-11 7.775339e-12
2 1.090606e-05 2.131882e-06 4.167336e-07 8.146178e-08 1.592389e-08
3 1.863402e-04 5.463780e-05 1.602064e-05 4.697497e-06 1.377378e-06
4 1.395975e-03 5.457617e-04 2.133676e-04 8.341686e-05 3.261214e-05
x_12 x_13 x_14 x_15
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 7.599495e-13 7.427628e-14 7.259647e-15 7.095466e-16
2 3.112753e-09 6.084713e-10 1.189421e-10 2.325042e-11
3 4.038683e-07 1.184204e-07 3.472267e-08 1.018122e-08
4 1.274984e-05 4.984597e-06 1.948747e-06 7.618699e-07
x y x_2 x_3 x_4 x_5 x_6
0 0.000000 0.176779 0.000000 0.000000 0.000000 0.000000 0.000000e+00
1 0.097738 0.136756 0.009553 0.000934 0.000091 0.000009 8.717508e-07
2 0.195477 0.265742 0.038211 0.007469 0.001460 0.000285 5.579205e-05
3 0.293215 -0.179621 0.085975 0.025209 0.007392 0.002167 6.355064e-04
4 0.390954 0.164104 0.152845 0.059755 0.023362 0.009133 3.570691e-03
x_7 x_8 x_9 x_10 x_11
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 8.520356e-08 8.327663e-09 8.139328e-10 7.955252e-11 7.775339e-12
2 1.090606e-05 2.131882e-06 4.167336e-07 8.146178e-08 1.592389e-08
3 1.863402e-04 5.463780e-05 1.602064e-05 4.697497e-06 1.377378e-06
4 1.395975e-03 5.457617e-04 2.133676e-04 8.341686e-05 3.261214e-05
x_12 x_13 x_14 x_15
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 7.599495e-13 7.427628e-14 7.259647e-15 7.095466e-16
2 3.112753e-09 6.084713e-10 1.189421e-10 2.325042e-11
3 4.038683e-07 1.184204e-07 3.472267e-08 1.018122e-08
4 1.274984e-05 4.984597e-06 1.948747e-06 7.618699e-07
x y x_2 x_3 x_4 x_5 x_6
0 0.000000 0.176779 0.000000 0.000000 0.000000 0.000000 0.000000e+00
1 0.097738 0.136756 0.009553 0.000934 0.000091 0.000009 8.717508e-07
2 0.195477 0.265742 0.038211 0.007469 0.001460 0.000285 5.579205e-05
3 0.293215 -0.179621 0.085975 0.025209 0.007392 0.002167 6.355064e-04
4 0.390954 0.164104 0.152845 0.059755 0.023362 0.009133 3.570691e-03
x_7 x_8 x_9 x_10 x_11
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 8.520356e-08 8.327663e-09 8.139328e-10 7.955252e-11 7.775339e-12
2 1.090606e-05 2.131882e-06 4.167336e-07 8.146178e-08 1.592389e-08
3 1.863402e-04 5.463780e-05 1.602064e-05 4.697497e-06 1.377378e-06
4 1.395975e-03 5.457617e-04 2.133676e-04 8.341686e-05 3.261214e-05
x_12 x_13 x_14 x_15
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 7.599495e-13 7.427628e-14 7.259647e-15 7.095466e-16
2 3.112753e-09 6.084713e-10 1.189421e-10 2.325042e-11
3 4.038683e-07 1.184204e-07 3.472267e-08 1.018122e-08
4 1.274984e-05 4.984597e-06 1.948747e-06 7.618699e-07
x y x_2 x_3 x_4 x_5 x_6
0 0.000000 0.176779 0.000000 0.000000 0.000000 0.000000 0.000000e+00
1 0.097738 0.136756 0.009553 0.000934 0.000091 0.000009 8.717508e-07
2 0.195477 0.265742 0.038211 0.007469 0.001460 0.000285 5.579205e-05
3 0.293215 -0.179621 0.085975 0.025209 0.007392 0.002167 6.355064e-04
4 0.390954 0.164104 0.152845 0.059755 0.023362 0.009133 3.570691e-03
x_7 x_8 x_9 x_10 x_11
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 8.520356e-08 8.327663e-09 8.139328e-10 7.955252e-11 7.775339e-12
2 1.090606e-05 2.131882e-06 4.167336e-07 8.146178e-08 1.592389e-08
3 1.863402e-04 5.463780e-05 1.602064e-05 4.697497e-06 1.377378e-06
4 1.395975e-03 5.457617e-04 2.133676e-04 8.341686e-05 3.261214e-05
x_12 x_13 x_14 x_15
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 7.599495e-13 7.427628e-14 7.259647e-15 7.095466e-16
2 3.112753e-09 6.084713e-10 1.189421e-10 2.325042e-11
3 4.038683e-07 1.184204e-07 3.472267e-08 1.018122e-08
4 1.274984e-05 4.984597e-06 1.948747e-06 7.618699e-07
x y x_2 x_3 x_4 x_5 x_6
0 0.000000 0.176779 0.000000 0.000000 0.000000 0.000000 0.000000e+00
1 0.097738 0.136756 0.009553 0.000934 0.000091 0.000009 8.717508e-07
2 0.195477 0.265742 0.038211 0.007469 0.001460 0.000285 5.579205e-05
3 0.293215 -0.179621 0.085975 0.025209 0.007392 0.002167 6.355064e-04
4 0.390954 0.164104 0.152845 0.059755 0.023362 0.009133 3.570691e-03
x_7 x_8 x_9 x_10 x_11
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 8.520356e-08 8.327663e-09 8.139328e-10 7.955252e-11 7.775339e-12
2 1.090606e-05 2.131882e-06 4.167336e-07 8.146178e-08 1.592389e-08
3 1.863402e-04 5.463780e-05 1.602064e-05 4.697497e-06 1.377378e-06
4 1.395975e-03 5.457617e-04 2.133676e-04 8.341686e-05 3.261214e-05
x_12 x_13 x_14 x_15
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 7.599495e-13 7.427628e-14 7.259647e-15 7.095466e-16
2 3.112753e-09 6.084713e-10 1.189421e-10 2.325042e-11
3 4.038683e-07 1.184204e-07 3.472267e-08 1.018122e-08
4 1.274984e-05 4.984597e-06 1.948747e-06 7.618699e-07
x y x_2 x_3 x_4 x_5 x_6
0 0.000000 0.176779 0.000000 0.000000 0.000000 0.000000 0.000000e+00
1 0.097738 0.136756 0.009553 0.000934 0.000091 0.000009 8.717508e-07
2 0.195477 0.265742 0.038211 0.007469 0.001460 0.000285 5.579205e-05
3 0.293215 -0.179621 0.085975 0.025209 0.007392 0.002167 6.355064e-04
4 0.390954 0.164104 0.152845 0.059755 0.023362 0.009133 3.570691e-03
x_7 x_8 x_9 x_10 x_11
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 8.520356e-08 8.327663e-09 8.139328e-10 7.955252e-11 7.775339e-12
2 1.090606e-05 2.131882e-06 4.167336e-07 8.146178e-08 1.592389e-08
3 1.863402e-04 5.463780e-05 1.602064e-05 4.697497e-06 1.377378e-06
4 1.395975e-03 5.457617e-04 2.133676e-04 8.341686e-05 3.261214e-05
x_12 x_13 x_14 x_15
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 7.599495e-13 7.427628e-14 7.259647e-15 7.095466e-16
2 3.112753e-09 6.084713e-10 1.189421e-10 2.325042e-11
3 4.038683e-07 1.184204e-07 3.472267e-08 1.018122e-08
4 1.274984e-05 4.984597e-06 1.948747e-06 7.618699e-07
x y x_2 x_3 x_4 x_5 x_6
0 0.000000 0.176779 0.000000 0.000000 0.000000 0.000000 0.000000e+00
1 0.097738 0.136756 0.009553 0.000934 0.000091 0.000009 8.717508e-07
2 0.195477 0.265742 0.038211 0.007469 0.001460 0.000285 5.579205e-05
3 0.293215 -0.179621 0.085975 0.025209 0.007392 0.002167 6.355064e-04
4 0.390954 0.164104 0.152845 0.059755 0.023362 0.009133 3.570691e-03
x_7 x_8 x_9 x_10 x_11
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 8.520356e-08 8.327663e-09 8.139328e-10 7.955252e-11 7.775339e-12
2 1.090606e-05 2.131882e-06 4.167336e-07 8.146178e-08 1.592389e-08
3 1.863402e-04 5.463780e-05 1.602064e-05 4.697497e-06 1.377378e-06
4 1.395975e-03 5.457617e-04 2.133676e-04 8.341686e-05 3.261214e-05
x_12 x_13 x_14 x_15
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 7.599495e-13 7.427628e-14 7.259647e-15 7.095466e-16
2 3.112753e-09 6.084713e-10 1.189421e-10 2.325042e-11
3 4.038683e-07 1.184204e-07 3.472267e-08 1.018122e-08
4 1.274984e-05 4.984597e-06 1.948747e-06 7.618699e-07
x y x_2 x_3 x_4 x_5 x_6
0 0.000000 0.176779 0.000000 0.000000 0.000000 0.000000 0.000000e+00
1 0.097738 0.136756 0.009553 0.000934 0.000091 0.000009 8.717508e-07
2 0.195477 0.265742 0.038211 0.007469 0.001460 0.000285 5.579205e-05
3 0.293215 -0.179621 0.085975 0.025209 0.007392 0.002167 6.355064e-04
4 0.390954 0.164104 0.152845 0.059755 0.023362 0.009133 3.570691e-03
x_7 x_8 x_9 x_10 x_11
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 8.520356e-08 8.327663e-09 8.139328e-10 7.955252e-11 7.775339e-12
2 1.090606e-05 2.131882e-06 4.167336e-07 8.146178e-08 1.592389e-08
3 1.863402e-04 5.463780e-05 1.602064e-05 4.697497e-06 1.377378e-06
4 1.395975e-03 5.457617e-04 2.133676e-04 8.341686e-05 3.261214e-05
x_12 x_13 x_14 x_15
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 7.599495e-13 7.427628e-14 7.259647e-15 7.095466e-16
2 3.112753e-09 6.084713e-10 1.189421e-10 2.325042e-11
3 4.038683e-07 1.184204e-07 3.472267e-08 1.018122e-08
4 1.274984e-05 4.984597e-06 1.948747e-06 7.618699e-07
x y x_2 x_3 x_4 x_5 x_6
0 0.000000 0.176779 0.000000 0.000000 0.000000 0.000000 0.000000e+00
1 0.097738 0.136756 0.009553 0.000934 0.000091 0.000009 8.717508e-07
2 0.195477 0.265742 0.038211 0.007469 0.001460 0.000285 5.579205e-05
3 0.293215 -0.179621 0.085975 0.025209 0.007392 0.002167 6.355064e-04
4 0.390954 0.164104 0.152845 0.059755 0.023362 0.009133 3.570691e-03
x_7 x_8 x_9 x_10 x_11
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 8.520356e-08 8.327663e-09 8.139328e-10 7.955252e-11 7.775339e-12
2 1.090606e-05 2.131882e-06 4.167336e-07 8.146178e-08 1.592389e-08
3 1.863402e-04 5.463780e-05 1.602064e-05 4.697497e-06 1.377378e-06
4 1.395975e-03 5.457617e-04 2.133676e-04 8.341686e-05 3.261214e-05
x_12 x_13 x_14 x_15
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 7.599495e-13 7.427628e-14 7.259647e-15 7.095466e-16
2 3.112753e-09 6.084713e-10 1.189421e-10 2.325042e-11
3 4.038683e-07 1.184204e-07 3.472267e-08 1.018122e-08
4 1.274984e-05 4.984597e-06 1.948747e-06 7.618699e-07
x y x_2 x_3 x_4 x_5 x_6
0 0.000000 0.176779 0.000000 0.000000 0.000000 0.000000 0.000000e+00
1 0.097738 0.136756 0.009553 0.000934 0.000091 0.000009 8.717508e-07
2 0.195477 0.265742 0.038211 0.007469 0.001460 0.000285 5.579205e-05
3 0.293215 -0.179621 0.085975 0.025209 0.007392 0.002167 6.355064e-04
4 0.390954 0.164104 0.152845 0.059755 0.023362 0.009133 3.570691e-03
x_7 x_8 x_9 x_10 x_11
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 8.520356e-08 8.327663e-09 8.139328e-10 7.955252e-11 7.775339e-12
2 1.090606e-05 2.131882e-06 4.167336e-07 8.146178e-08 1.592389e-08
3 1.863402e-04 5.463780e-05 1.602064e-05 4.697497e-06 1.377378e-06
4 1.395975e-03 5.457617e-04 2.133676e-04 8.341686e-05 3.261214e-05
x_12 x_13 x_14 x_15
0 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00
1 7.599495e-13 7.427628e-14 7.259647e-15 7.095466e-16
2 3.112753e-09 6.084713e-10 1.189421e-10 2.325042e-11
3 4.038683e-07 1.184204e-07 3.472267e-08 1.018122e-08
4 1.274984e-05 4.984597e-06 1.948747e-06 7.618699e-07
# 模型复杂度可变
from sklearn.linear_model import LinearRegression
def linear_regression(data, power, models_to_plot):
# 初始化预测器
predictors=['x']
if power>=2:
predictors.extend(['x_%d'%i for i in range(2,power+1)])
# 模型训练
linreg = LinearRegression(normalize=True)
linreg.fit(data[predictors],data['y'])
# 预测
y_pred = linreg.predict(data[predictors])
# 是否要画图(复杂度是否在models_to_plot中)为了便于比较选择性画图
if power in models_to_plot:
plt.subplot(models_to_plot[power])
plt.tight_layout()
plt.plot(data['x'],y_pred)
plt.plot(data['x'],data['y'],'.')
plt.title('Plot for power: %d'%power)
# 返回结果
rss = sum((y_pred-data['y'])**2)
ret = [rss]
ret.extend([linreg.intercept_])
ret.extend(linreg.coef_)
return ret
col = ['rss','intercept'] + ['coef_x_%d'%i for i in range(1,16)]
ind = ['model_pow_%d'%i for i in range(1,16)]
coef_matrix_simple = pd.DataFrame(index=ind, columns=col)
# 定义作图的位置与模型的复杂度
models_to_plot = {1:231,3:232,6:233,8:234,11:235,14:236}
# 画图
for i in range(1,16):
coef_matrix_simple.iloc[i-1,0:i+2] = linear_regression(data, power=i, models_to_plot=models_to_plot)
#Ridge(L2-norm)
from sklearn.linear_model import Ridge
def ridge_regression(data, predictors, alpha, models_to_plot={}):
# 模型训练
ridgereg = Ridge(alpha=alpha,normalize=True)
ridgereg.fit(data[predictors],data['y'])
# 预测
y_pred = ridgereg.predict(data[predictors])
# 选择alpha值画图
if alpha in models_to_plot:
plt.subplot(models_to_plot[alpha])
plt.tight_layout()
plt.plot(data['x'],y_pred)
plt.plot(data['x'],data['y'],'.')
plt.title('Plot for alpha: %.3g'%alpha)
rss = sum((y_pred-data['y'])**2)
ret = [rss]
ret.extend([ridgereg.intercept_])
ret.extend(ridgereg.coef_)
return ret
predictors=['x']
predictors.extend(['x_%d'%i for i in range(2,16)])
# 定义alpha值
alpha_ridge = [1e-15, 1e-10, 1e-8, 1e-4, 1e-3,1e-2, 1, 5, 10, 20]
col = ['rss','intercept'] + ['coef_x_%d'%i for i in range(1,16)]
ind = ['alpha_%.2g'%alpha_ridge[i] for i in range(0,10)]
coef_matrix_ridge = pd.DataFrame(index=ind, columns=col)
models_to_plot = {1e-15:231, 1e-10:232, 1e-4:233, 1e-3:234, 1e-2:235, 5:236}
for i in range(10):
coef_matrix_ridge.iloc[i,] = ridge_regression(data, predictors, alpha_ridge[i], models_to_plot)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/ridge.py:125: LinAlgWarning: Ill-conditioned matrix (rcond=5.34098e-17): result may not be accurate.
overwrite_a=True).T
#Lasso(L1-norm)
from sklearn.linear_model import Lasso
def lasso_regression(data, predictors, alpha, models_to_plot={}):
#Fit the model
lassoreg = Lasso(alpha=alpha,normalize=True, max_iter=1e5)
lassoreg.fit(data[predictors],data['y'])
y_pred = lassoreg.predict(data[predictors])
#Check if a plot is to be made for the entered alpha
if alpha in models_to_plot:
plt.subplot(models_to_plot[alpha])
plt.tight_layout()
plt.plot(data['x'],y_pred)
plt.plot(data['x'],data['y'],'.')
plt.title('Plot for alpha: %.3g'%alpha)
#Return the result in pre-defined format
rss = sum((y_pred-data['y'])**2)
ret = [rss]
ret.extend([lassoreg.intercept_])
ret.extend(lassoreg.coef_)
return ret
predictors=['x']
predictors.extend(['x_%d'%i for i in range(2,16)])
# 定义alpha值去测试
alpha_lasso = [1e-15, 1e-10, 1e-8, 1e-5,1e-4, 1e-3,1e-2, 1, 5, 10]
col = ['rss','intercept'] + ['coef_x_%d'%i for i in range(1,16)]
ind = ['alpha_%.2g'%alpha_lasso[i] for i in range(0,10)]
coef_matrix_lasso = pd.DataFrame(index=ind, columns=col)
# 定义画图的模式
models_to_plot = {1e-10:231, 1e-5:232,1e-4:233, 1e-3:234, 1e-2:235, 1:236}
#迭代10个alpha值:
for i in range(10):
coef_matrix_lasso.iloc[i,] = lasso_regression(data, predictors, alpha_lasso[i], models_to_plot)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/coordinate_descent.py:492: ConvergenceWarning: Objective did not converge. You might want to increase the number of iterations. Fitting data with very small alpha may cause precision problems.
ConvergenceWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/coordinate_descent.py:492: ConvergenceWarning: Objective did not converge. You might want to increase the number of iterations. Fitting data with very small alpha may cause precision problems.
ConvergenceWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/coordinate_descent.py:492: ConvergenceWarning: Objective did not converge. You might want to increase the number of iterations. Fitting data with very small alpha may cause precision problems.
ConvergenceWarning)
/anaconda3/envs/tensorflow/lib/python3.7/site-packages/sklearn/linear_model/coordinate_descent.py:492: ConvergenceWarning: Objective did not converge. You might want to increase the number of iterations. Fitting data with very small alpha may cause precision problems.
ConvergenceWarning)