Skip to content

Log_likelihood update #1008

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 12 commits into from
Jul 13, 2019
28 changes: 17 additions & 11 deletions machine_learning/logistic_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,16 @@ def sigmoid_function(z):
def cost_function(h, y):
return (-y * np.log(h) - (1 - y) * np.log(1 - h)).mean()

def log_likelihood(X, Y, weights):
scores = np.dot(X, weights)
return np.sum(Y*scores - np.log(1 + np.exp(scores)) )

# here alpha is the learning rate, X is the feature matrix,y is the target matrix

def logistic_reg(
alpha,
X,
y,
num_steps,
max_iterations=70000,
):
converged = False
Expand All @@ -49,21 +52,24 @@ def logistic_reg(
h = sigmoid_function(z)
gradient = np.dot(X.T, h - y) / y.size
theta = theta - alpha * gradient

z = np.dot(X, theta)
h = sigmoid_function(z)
J = cost_function(h, y)

iterations += 1 # update iterations

if iterations == max_iterations:
print ('Maximum iterations exceeded!')
print ('Minimal cost function J=', J)
converged = True

weights = np.zeros(X.shape[1])
for step in range(num_steps):
scores = np.dot(X, weights)
predictions = sigmoid_function(scores)
if step % 10000 == 0:
print(log_likelihood(X,y,weights)) # Print log-likelihood every so often
return weights

if iterations == max_iterations:
print ('Maximum iterations exceeded!')
print ('Minimal cost function J=', J)
converged = True
return theta


# In[68]:

if __name__ == '__main__':
Expand All @@ -72,7 +78,7 @@ def logistic_reg(
y = (iris.target != 0) * 1

alpha = 0.1
theta = logistic_reg(alpha, X, y, max_iterations=70000)
theta = logistic_reg(alpha,X,y,max_iterations=70000,num_steps=30000)
print (theta)


Expand Down
Loading