logistic regression
Python代码:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
def sigmond(x):
sig = 1 / (1 + np.exp(-x))
return sig
def gradient_descend(X, Y, init_theta, alpha, iter_nums):
theta = init_theta
for i in range(0, iter_nums):
hyp = sigmond(np.dot(np.transpose(theta), X)) - np.transpose(Y)
hyp = np.dot(hyp, np.transpose(X))
theta -= hyp * alpha / m
return theta
# load the data
filepath = r'C:\Users\LENOVO\OneDrive - 东南大学\桌面\机器学习\吴恩达作业\python\ex2\data\ex2data1.txt'
dataset = np.loadtxt(filepath, delimiter=',', usecols=(0, 1, 2))
[m, n] = np.shape(dataset)
x1 = dataset[:, 0]
x2 = dataset[:, 1]
y = dataset[:, 2]
X = np.c_[np.ones(m), x1, x2]
X = np.transpose(X)
Y = y
# scatter
pos = (y == 0)
neg = (y == 1)
plt.plot(x1[pos], x2[pos], 'r+')
plt.plot(x1[neg], x2[neg], 'go')
init_theta = np.float64(np.transpose(np.array([0, 0, 0])))
alpha = 0.001
iter_nums = 1000000
theta = gradient_descend(X, Y, init_theta, alpha, iter_nums)
print(theta)
# fitted curve
fitted_x2 = np.ones((m, 1), dtype=float, order='C') * theta[1]
for i in range(0, m):
fitted_x2[i, 0] = (fitted_x2[i, 0] * x1[i] + theta[0]) / theta[2] * (-1)
plt.plot(x1, fitted_x2, 'b-')
plt.show()
2、Regularized Logistic Regression
待更…