import matplotlib.pyplot as plt
import numpy as np
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import classification_report, confusion_matrix
x = np.arange(10).reshape(-1, 1)
y = np.array([0, 0, 0, 0, 1, 1, 1, 1, 1, 1])
model = LogisticRegression(solver='liblinear', random_state=0)
model.fit(x, y)
# LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,
# intercept_scaling=1, l1_ratio=None, max_iter=100,
# multi_class='warn', n_jobs=None, penalty='l2',
# random_state=0, solver='liblinear', tol=0.0001, verbose=0,
# warm_start=False)
model = LogisticRegression(solver='liblinear', random_state=0)
model.fit(x, y)
print(model.classes_)
print(model.intercept_)
print(model.coef_)
# In the matrix above, each row corresponds to a single observation. The first column is the probability of the predicted output being zero, that is 1 - 𝑝(𝑥). The second column is the probability that the output is one, or 𝑝(𝑥).
print(model.predict_proba(x))
print(model.score(x, y))
cm = confusion_matrix(y, model.predict(x))
fig, ax = plt.subplots(figsize=(8, 8))
ax.imshow(cm)
ax.grid(False)
ax.xaxis.set(ticks=(0, 1), ticklabels=('Predicted 0s', 'Predicted 1s'))
ax.yaxis.set(ticks=(0, 1), ticklabels=('Actual 0s', 'Actual 1s'))
ax.set_ylim(1.5, -0.5)
for i in range(2):
for j in range(2):
ax.text(j, i, cm[i, j], ha='center', va='center', color='red')
plt.show()
Logistic Regression in Python – Real Python
有时间在弄一下logistic regression吧:下面的就是实际的例子
https://towardsdatascience.com/building-a-logistic-regression-in-python-step-by-step-becd4d56c9c8