Code

Python Implementation

"""So below the AdaGrad class that shows how a way to implement it in python. The class get one layers node number and its input dimensions. A function get_steps takes in current gradients for weights and biases and returns the step that we need to update those weights and biases"""

import numpy as np class AdaGrad(): def __init__(self, input_dims, nodes): self.learning_rate = 0.01 # G matrix for parameters self.G_weights = np.zeros((input_dims, nodes)) # G matrix for biases self.G_biases = np.zeros(nodes) def get_steps(self, grad_weights, grad_biases): eps = 1e-8 # updating G matrixes self.G_weights += np.multiply(grad_weights, grad_weights) self.G_biases += np.multiply(grad_biases, grad_biases) weights_step = np.multiply((self.learning_rate / np.sqrt(self.G_weights + eps)), grad_weights) biases_step = np.multiply((self.learning_rate / np.sqrt(self.G_biases + eps)), grad_biases) return weights_step, biases_step

0

1

Updated 2020-11-16

Tags

Deep Learning (in Machine learning)

Data Science