废话少说,直接看代码
from keras.models import Sequential
from keras.layers import Dense
import numpy as np
np.random.seed(7)
dataset = np.loadtxt('pima-indians-diabetes.csv', delimiter = ',')
x = dataset[:, 0 : 8]
Y = dataset[:, 8]
model = Sequential()
model.add(Dense(10, input_dim=8, activation='sigmoid'))
model.add(Dense(5, activation='sigmoid'))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit(x=x, y=Y, epochs=100, batch_size=10, validation_split=0.2)
weights = np.array(model.get_weights())
print(weights.shape) #(6,),三层结构,6个数组(每层一个全连接数组,一个偏置数组)
print(weights[0].shape) #(8, 10),全连接
print(weights[0]) #[[...]],
print(weights[1].shape) #(10,),偏置
print(weights[1]) #[...]
print(weights[2].shape) #(10, 5),全连接
print(weights[2]) #[[...]]
print(weights[3].shape) #(5,),偏置
print(weights[3]) #[...]
print(weights[4].shape) #(5, 1),全连接
print(weights[4]) #[[...]]
print(weights[5].shape) #(1,),偏置
print(weights[5]) #[...]
运行后结果如下,
... ...
614/614 [==============================] - 0s 102us/step - loss: 0.5342 - accuracy: 0.7394 - val_loss: 0.5898 - val_accuracy: 0.7143
Epoch 98/100
614/614 [==============================] - 0s 92us/step - loss: 0.5343 - accuracy: 0.7345 - val_loss: 0.5963 - val_accuracy: 0.6883
Epoch 99/100
614/614 [==============================] - 0s 91us/step - loss: 0.5346 - accuracy: 0.7459 - val_loss: 0.5950 - val_accuracy: 0.7143
Epoch 100/100
614/614 [==============================] - 0s 94us/step - loss: 0.5343 - accuracy: 0.7378 - val_loss: 0.5992 - val_accuracy: 0.6883
(6,)
(8, 10)
[[-3.16984117e-01 7.68889785e-01 -6.09298885e-01 2.47692540e-01
-4.28811222e-01 -3.27906728e-01 -9.68379498e-01 5.13341837e-02
-1.25264615e-01 -4.91730124e-01]
[ 2.11044028e-03 -4.49306726e-01 1.98752776e-01 -2.66629070e-01
-1.40203267e-01 4.65806782e-01 1.77532241e-01 -7.84069952e-03
-3.89660805e-01 9.84918177e-02]
[ 2.31141478e-01 -1.12549305e-01 2.68504262e-01 -2.70382732e-01
3.41944665e-01 2.30201110e-01 -1.10623777e-01 -1.36407129e-02
-9.43713263e-03 1.61676109e-01]
[ 9.12439078e-02 1.37153640e-01 -1.14635983e-03 1.55549422e-01
-9.64731127e-02 -2.13079065e-01 -7.52463162e-01 1.12140961e-01
-2.36525431e-01 -3.01530827e-02]
[ 3.61231089e-01 -3.56254607e-01 -2.21178934e-01 3.97225380e-01
1.04284659e-02 -5.62568188e-01 -4.30800825e-01 1.00965045e-01
3.91700178e-01 -3.33490968e-01]
[-3.33913565e-01 -1.10131040e-01 -3.23349953e-01 9.21180472e-02
1.06020682e-01 -4.13157195e-02 -1.33292452e-01 3.75863463e-01
1.40875010e-02 8.16177130e-02]
[-7.82935143e-01 -1.88346773e-01 -1.04280390e-01 4.78980929e-01
-1.18376267e+00 7.10901558e-01 -5.79004109e-01 -9.32134613e-02
7.95687854e-01 2.71425750e-02]
[-2.17972279e-01 2.39495710e-01 -1.78288132e-01 1.90169722e-01
-3.07604492e-01 -3.15494061e-01 -2.90811323e-02 -2.84363925e-01
3.06198806e-01 -4.56228644e-01]]
(10,)
[ 0.72589767 -0.28218535 0.44881618 -0.41012928 0.8835287 0.44842014
-0.418379 -0.42579165 -0.31845236 -0.5013525 ]
(10, 5)
[[ 0.5809633 0.41399166 0.79127985 0.42651224 -0.54863864]
[-0.64285654 -0.6585386 -1.0624979 -1.3291585 1.414629 ]
[ 0.39915696 0.68231463 0.21638893 -0.14376499 -0.5282974 ]
[-0.2700348 -1.2455482 -0.8031258 -0.64963794 0.7466293 ]
[ 0.5946755 1.6657637 1.7360553 1.9690804 -2.1626413 ]
[ 0.26416108 -0.30562374 0.27448004 0.6426197 0.51702225]
[-0.6067534 0.02540117 -0.6464479 -0.07267946 0.2550761 ]
[-0.08417924 -0.32010064 -0.20277596 -0.699236 0.1021447 ]
[-1.1629095 -0.10892586 -1.0560298 -0.9182803 1.0299007 ]
[-1.0873649 -1.3365886 -1.3386427 -1.4494324 1.0108947 ]]
(5,)
[-0.27321738 -0.218769 -0.17955495 0.05500561 0.37478566]
(5, 1)
[[-0.4963149 ]
[-1.0566001 ]
[-0.9775477 ]
[-0.91774327]
[ 1.0470334 ]]
(1,)
[0.26617393]