# max_iter 控制步长 # max_iter越大,步长越小,迭代次数大,模型时间长,反之 from sklearn.linear_model import LogisticRegression as LR from sklearn.datasets import load_breast_cancer import numpy as np import matplotlib.pyplot as plt from sklearn.metrics import accuracy_score from sklearn.model_selection import cross_val_score, train_test_split from sklearn.feature_selection import SelectFromModel data = load_breast_cancer() X = data.data Y = data.target # print(data.data.shape) l2 = [] l2test = [] Xtrain, Xtest, Ytrain, Ytest = train_test_split(X, Y, test_size=0.3, random_state=420) # todo : 步长 # for i in np.arange(1, 201, 10): # lrl2 = LR(penalty='l2', solver="liblinear", C=0.8, max_iter=i) # lrl2 = lrl2.fit(Xtrain, Ytrain) # l2.append(accuracy_score(lrl2.predict(Xtrain), Ytrain)) # l2test.append(accuracy_score(lrl2.predict(Xtest), Ytest)) # graph = [l2, l2test] # color = ['black', 'gray'] # label = ['L2', 'L2test'] # plt.figure(figsize=(20, 5)) # for i in range(len(graph)): # plt.plot(np.arange(1, 201,10), graph[i], color[i], label=label[i]) # plt.legend(loc=4) # plt.xticks(np.arange(1, 201, 10)) # plt.show() # 属性 n_iter_ 调用本次求解中真正实现的迭代次数 lr = LR(penalty='l2', solver='liblinear', C=0.8, max_iter=1000).fit(Xtrain, Ytrain) print(lr.n_iter_) # [24]
day 9.0 逻辑回归- 梯度下降
最新推荐文章于 2023-06-20 10:02:47 发布