Skip to content

Commit

Permalink
add name
Browse files Browse the repository at this point in the history
  • Loading branch information
fsx950223 committed Jul 20, 2021
1 parent 64b70b4 commit 4dbc208
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions tensorflow_addons/optimizers/gradient_accumulator.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,8 @@ def _accum_grad(grads_and_vars):
with tf.init_scope():
if not self._gradients:
for grad, var in grads_and_vars:
self._gradients[var.ref()] = tf.Variable(
tf.zeros_like(var), trainable=False
self._gradients[var.ref()] = self.add_weight(
"ga", shape=var.shape, dtype=var.dtype, trainable=False
)
new_grads_and_vars = []
for grad, var in grads_and_vars:
Expand Down

0 comments on commit 4dbc208

Please sign in to comment.