期中复习
import pandas as pd
import numpy as np
import math
import random
import matplotlib.pyplot as plt
def load_data():
data = pd.read_csv("C:/Users/lolo/Documents/WeChat Files/L15804627211/Files/iris.csv")
return data
def choose_random(data,alpha):
chooselist = random.sample(range(0,len(data)),int(alpha*len(data)))
return data.loc[chooselist,:]
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def logistic_gradient_descent_method(train_x, train_y, alpha, maxnum):
numSam, numFeature = train_x.shape
add = np.ones((numSam,1))
train_x = np.hstack((train_x,add))
theta = np.ones((numFeature + 1, 1))
for i in range(0,maxnum):
cum = np.zeros((1,numFeature+1))
for k in range(0,numSam):
cum += (train_y[k]-sigmoid(np.dot(train_x[k],theta))) * train_x[k]
theta = theta - alpha * cum.transpose()
return theta
def logistic_newton_method(train_x, train_y, maxnum):
numSam, numFeature = train_x.shape
add = np.ones((numSam,1))
train_x = np.hstack((train_x,add))
beta = np.ones((numFeature + 1, 1))
for i in range(0,maxnum):
patial1 = np.zeros((numFeature + 1, 1))
patial2 = np.zeros((numFeature + 1, numFeature + 1))
for k in range(0, numSam):
p1 = sigmoid(np.dot(train_x[k], beta))
patial1 -= np.matrix(train_x[k]).transpose() * (train_y[k] - p1)
patial2 += np.dot(train_x[k].transpose(),train_x[k]) * p1 * (1-p1)
beta <