lr = py`import numpy as np
import numpy as np
from scipy.optimize import minimize
def sigmoid(z):
return(1 / (1 + np.exp(-z)))
def costFunction(theta, X, y):
m = y.size
h = sigmoid(X.dot(theta))
J = -1.0*(1.0/m)*(np.log(h).T.dot(y)+np.log(1-h).T.dot(1-y))
if np.isnan(J[0]):
return(np.inf)
return J[0]
def gradient(theta, X, y):
m = y.size
h = sigmoid(X.dot(theta.reshape(-1,1)))
grad =(1.0/m)*X.T.dot(h-y)
return(grad.flatten())
def run(X, y):
X = np.array(X)
y = np.array(y)
initial_theta = np.zeros(X.shape[1])
cost = costFunction(initial_theta, X, y)
grad = gradient(initial_theta, X, y)
grad_all = [grad]
cost_all = [cost]
def history(x):
grad_all.append(x)
cost_all.append(costFunction(x, X, y))
res = minimize(costFunction, initial_theta, args=(X,y), jac=gradient, callback=history, options={'maxiter':400, 'disp':True})
return {'res':res, 'grad_all':grad_all, 'cost_all':cost_all}
run`