Skip to content

Commit

Permalink
Merge branch 'develop'
Browse files Browse the repository at this point in the history
  • Loading branch information
ebay-yobae committed May 12, 2017
2 parents 58018c1 + 1630ede commit bb19e62
Showing 1 changed file with 13 additions and 13 deletions.
26 changes: 13 additions & 13 deletions classes/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,54 +61,54 @@ def col2im_indices(cols, x_shape, field_height=3, field_width=3, padding=1, stri
return x_padded
return x_padded[:, :, padding:-padding, padding:-padding]


def adam_update(neurons, lr, t, l2_reg=0, beta1=np.float32(0.9), beta2=np.float32(0.999)):
for n in neurons:
l2 = l2_reg * n.weights
dx = (n.last_input.dot(n.delta)).T
dBias = np.average(n.delta)
d_bias = np.average(n.delta)

n.m = beta1 * n.m + (1 - beta1) * dx
n.v = beta2 * n.v + (1 - beta2)*(dx**2)
n.v = beta2 * n.v + (1 - beta2) * (dx**2)

if t < 3:
n.m /= np.float32(1-beta1**t)
n.v /= np.float32(1-beta2**t)
m = n.m / np.float32(1-beta1**t)
v = n.v / np.float32(1-beta2**t)

n.weights -= lr * n.m / (np.sqrt(n.v) + 1e-8) + l2
n.b -= lr * dBias
n.weights -= lr * m / (np.sqrt(v) + 1e-8) + l2
n.b -= lr * d_bias

def nag_update(neurons, lr, l2_reg=0, mu=np.float32(0.9)):
for n in neurons:
l2 = l2_reg * n.weights
dx = (n.last_input.dot(n.delta)).T
dBias = np.average(n.delta)
d_bias = np.average(n.delta)

n.v_prev = n.v
n.v = mu * n.v - lr * dx

n.weights += -mu * n.v_prev + (1 + mu) * n.v - l2
n.b -= lr * dBias
n.b -= lr * d_bias

def momentum_update(neurons, lr, l2_reg=0, mu=np.float32(0.9)):
for n in neurons:
l2 = l2_reg * n.weights
dx = (n.last_input.dot(n.delta)).T
dBias = np.average(n.delta)
d_bias = np.average(n.delta)

n.v = mu * n.v - lr * dx

n.weights += n.v - l2
n.b -= lr * dBias
n.b -= lr * d_bias


def vanila_update(neurons, lr, l2_reg=0):
for n in neurons:
l2 = l2_reg * n.weights
dx = (n.last_input.dot(n.delta)).T
dBias = np.average(n.delta)
d_bias = np.average(n.delta)

n.weights -= lr * dx + l2
n.b -= lr * dBias
n.b -= lr * d_bias

def sigmoid(input):
return 1/(1+np.exp(-input))
Expand Down

0 comments on commit bb19e62

Please sign in to comment.