浅层神经网络搭建
import numpy as np
import matplotlib.pyplot as plt
from testCases import *
import sklearn #机器学习库
import sklearn.datasets #sklearn数据集
import sklearn.linear_model #线性回归
from planar_utils import plot_decision_boundary,sigmoid,load_planar_dataset,load_extra_datasets
%matplotlib inline
np.random.seed(1) #每次random初始值一致(随机到的值
#用于指定随机数生成时所用算法开始的整数值如果使用相同的seed( )值,则每次生成的随即数都相同,如果不设置这个值,则系统根据时间来自己选择这个值,此时每次生成的随机数因时间差异而不同
读取数据
X, Y = load_planar_dataset()#X:数据矩阵
plt.scatter(X[0,:],X[1,:],c=Y.reshape(X[0,:].shape),cmap=plt.cm.Spectral)
<matplotlib.collections.PathCollection at 0x1a6c4cb4b00>
shape_X = X.shape
shape_Y = Y.shape
m = shape_X[1]
print('The shape of X is' + str(shape_X))
print('The shape of X is' + str(shape_Y))
print('I have m = %d training examples' %(m))
The shape of X is(2, 400)
The shape of X is(1, 400)
I have m = 400 training examples
sklearn.linear_model.LogisticRegressionCV()初步理解为线性回归模型??
clf = sklearn.linear_model.LogisticRegressionCV()
clf.fit(X.T,Y.T)
D:\windows\software\anaconda\lib\site-packages\sklearn\utils\validation.py:578: DataConversionWarning: A column-vector y was passed when a 1d array was expected. Please change the shape of y to (n_samples, ), for example using ravel().
y = column_or_1d(y, warn=True)
LogisticRegressionCV(Cs=10, class_weight=None, cv=None, dual=False,
fit_intercept=True, intercept_scaling=1.0, max_iter=100,
multi_class='ovr', n_jobs=1, penalty='l2', random_state=None,
refit=True, scoring=None, solver='lbfgs', tol=0.0001, verbose=0)
绘图必须重置Y为Y.reshape(X[0,:].shape)
# Plot the decision boundary for logistic regression
plot_decision_boundary(lambda x: clf.predict(x), X, Y.reshape(X[0,:].shape))
plt.title("Logistic Regression")
Text(0.5,1,'Logistic Regression')
# Print accuracy
LR_predictions = clf.predict(X.T)
print ('Accuracy of logistic regression: %d ' % float((np.dot(Y,LR_predictions) + np.dot(1-Y,1-LR_predictions))/float(Y.size)*100) +
'% ' + "(percentage of correctly labelled datapoints)")
Accuracy of logistic regression: 47 % (percentage of correctly labelled datapoints)
def layer_sizes(X,Y):
n_x =X.shape[0]
n_h = 4
n_y = Y.shape[0]
return n_x,n_h,n_y
X_assess,Y_assess = layer_sizes_test_case()
(n_x,n_h,n_y) = layer_sizes(X_assess,Y_assess)
print(n_x,n_h,n