import numpy as np
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
model = Sequential()
model.add(Dense(64,activation= 'relu',input_shape = (784,)))
model.add(Dense(10,activation='softmax'))
from tensorflow.keras.layers import Dense,Flatten,Softmax
model = Sequential()
model.add(Flatten(input_shape=(28,28)))
model.add(Dense(16,activation='relu',name='layer_1'))
model.add(Dense(16,activation='relu'))
model.add(Dense(10))
model.add(Softmax())
model.weights
[<tf.Variable 'layer_1/kernel:0' shape=(784, 16) dtype=float32, numpy=
array([[ 0.01290578, 0.07445015, 0.01681935, ..., 0.04626302,
0.01588257, -0.01308318],
[-0.0527299 , -0.07710216, 0.07464638, ..., 0.0087358 ,
-0.02961487, -0.05955961],
[ 0.03564721, -0.08287186, -0.03924981, ..., -0.0771492 ,
-0.01618949, -0.05667122],
...,
[ 0.05695684, -0.03964275, -0.0828326 , ..., 0.04255903,
0.07647406, -0.07485285],
[-0.0596628 , -0.06039616, 0.01491483, ..., -0.06700376,
-0.04544031, 0.02588803],
[-0.0542443 , 0.04723233, -0.0512512 , ..., 0.05871357,
0.00494871, 0.00856258]], dtype=float32)>,
<tf.Variable 'layer_1/bias:0' shape=(16,) dtype=float32, numpy=
array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
dtype=float32)>,
<tf.Variable 'dense_2/kernel:0' shape=(16, 16) dtype=float32, numpy=
array([[ 3.40442985e-01, -4.17145699e-01, -3.55908453e-01,
5.62509298e-02, 4.30662185e-01, -2.74808615e-01,
3.66872460e-01, -3.51692528e-01, -1.94020331e-01,
-2.64424592e-01, -3.09619218e-01, -3.71832252e-02,
-2.10865408e-01, 5.35627902e-02, 3.65656346e-01,
-2.95218408e-01],
[ 9.27143395e-02, -3.74022126e-01, 1.87536269e-01,
1.70864791e-01, 2.35066026e-01, 6.93340003e-02,
-2.57344306e-01, -2.92516887e-01, 3.15936655e-01,
-1.14047825e-01, 8.96017253e-02, -2.54236758e-02,
3.05450171e-01, 2.01215446e-02, -1.91974759e-01,
-1.95948616e-01],
[-1.88459083e-01, 1.10159487e-01, -4.29021925e-01,
7.55192339e-02, -3.92421991e-01, 4.13672775e-01,
8.35732520e-02, -1.30163729e-02, -2.17564017e-01,
-2.04560027e-01, -2.11227536e-02, -1.98445335e-01,
-1.75059170e-01, -3.59995067e-02, -1.48799598e-01,
-3.02458525e-01],
[-2.93337405e-01, 4.23404187e-01, 9.23266113e-02,
-4.59597111e-02, -7.17966557e-02, 2.40310818e-01,
2.35053897e-03, 6.20829463e-02, 1.63255244e-01,
2.99993902e-01, 2.19102770e-01, 3.32830042e-01,
2.87256986e-01, 2.86558360e-01, -3.94982815e-01,
-1.73235893e-01],