@keras模型
简单决策树模型
import pandas as pd
import random
path='C:\\Users\\Administrator\\Desktop\\网络模型/loudian.xls'
data=pd.read_excel(path)
print(data)
data=data.values #数组形式
print(data)
p=0.8 #80%数据训练,20%预测
train=data[:int(len(data)*p),:]
test = data[int(len(data)*p):,:]
from sklearn.tree import DecisionTreeClassifier #分类器knn
tree=DecisionTreeClassifier()
tree.fit(train[:,:3],train[:,3])
print(tree.score(train[:,:3],train[:,3]))
print(tree.score(test[:,:3],test[:,3]))
简单神经网络模型
import numpy as np
import pandas as pd
import random
data1 = np.random.randint(2,size=1000)
data2 = np.random.randint(10,size=1000)
data3 = np.random.randint(5,size=1000)
data4 = np.random.randint(2,size=1000)
data = np.array([data1,data2,data3,data4]).reshape(-1,4)
print(data)
p=0.8 #80%数据训练,20%预测
train=data[:int(len(data)*p),:]
test = data[int(len(data)*p):,:]
from keras.models import Sequential #神经网络线性模型
from keras.layers.core import Dense, Activation
net = Sequential()
net.add(Dense(input_dim=3, output_dim=14)) # 3个节点,14个中间节点
net.add(Activation("relu"))
net.add(Dense(units=18))
net.add(Activation("relu"))
net.add(Dense(input_dim=18, output_dim=1))
net.add(Activation("relu"))
net.add(Activation("sigmoid"))
net.compile(loss="binary_crossentropy",optimizer="adam")
net.fit(train[:,:3], train[:,3], epochs=100, batch_size=1)
last = net.predict_classes(train[:,:3]).reshape(len(train))
print( (last==train[:,3]).sum()/len(last))
keras一元模型
from keras.models import Sequential
from keras.layers.core import Dense, Activation
import numpy as np
import matplotlib.pyplot as plt
np.random.seed(1271)
x=np.linspace(-1,1,200)
np.random.shuffle(x)
print(x)
y=0.3*x+np.random.normal(0,0.05,(200,))
print(y)
plt.scatter(x,y)
plt.show()
x_train,y_train=x[:160],y[:160]
x_test,y_test=x[160:],y[160:]
print("训练--------------")
model = Sequential()
model.add(Dense(input_dim=1,output_dim=1))
model.compile(loss='mse',optimizer='sgd')
for step in range(301):
cost=model.train_on_batch(x_train,y_train)
if step%30==0:
print("训练结果",cost)
print("测试ing--------------")
cost=model.evaluate(x_test,y_test,batch_size=40)
print("测试结",cost)
w,b=model.layers[0].get_weights()
y_new=model.predict(x_test)
plt.plot(x_test,y_new)
plt.show()
keras二元模型
from keras.models import Sequential
from keras.layers.core import Dense, Activation
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
x,y=datasets.make_classification(n_samples=200,
n_features=2,
n_informative=2,
n_redundant=0,
n_repeated=0,
n_classes=2,
n_clusters_per_class=1)
model = Sequential()
model.add(Dense(units=1, input_dim=2))
model.add(Activation("sigmoid"))
model.compile(loss='binary_crossentropy',optimizer='sgd')
for step in range(501):
cost=model.train_on_batch(x,y)
if step%30==0:
print("训练结果",cost)
print("测试ing--------------")
cost=model.evaluate(x,y,batch_size=40)
print("测试结果",cost)
w,b=model.layers[0].get_weights()
y_new=model.predict(x)
y_pred=(y_new*2).astype('int')
plt.subplot(2,1,1).scatter(x[:,0],x[:,1],c=y_pred[:,0])
plt.subplot(2,1,2).scatter(x[:,0],x[:,1],c=y)
plt.show()
keras三元分类模型
from keras.models import Sequential
from keras.layers.core import Dense, Activation
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from keras.utils import np_utils
x,y=datasets.make_classification(n_samples=200,
n_features=2,
n_informative=2,
n_redundant=0,
n_repeated=0,
n_classes=3,
n_clusters_per_class=1)
n_class=3
y=np_utils.to_categorical(y,n_class) #数据转换
model = Sequential()
model.add(Dense(units=n_class, input_dim=2))
model.add(Activation("softmax"))
model.compile(loss='categorical_crossentropy',optimizer='sgd')
for step in range(501):
cost=model.train_on_batch(x,y)
if step%30==0:
print("训练结果",cost)
print("测试ing--------------")
cost=model.evaluate(x,y,batch_size=40)
print("测试结果",cost)
w,b=model.layers[0].get_weights()
y_new=model.predict(x)
y_pred=y_new.argmax(axis=1)#获取概率最大的分类
print(y_pred)
plt.subplot(2,1,1).scatter(x[:,0],x[:,1],c=y_pred)
plt.subplot(2,1,2).scatter(x[:,0],x[:,1],c=y)
plt.show()