[go: nahoru, domu]

Skip to content

Commit

Permalink
Rewrite TensorFlow L1 norm
Browse files Browse the repository at this point in the history
Prevent losses from diverging as per 
tensorflow/tensorflow#12071.
  • Loading branch information
CloudyOverhead committed Dec 3, 2020
1 parent 7754484 commit b37dbf5
Showing 1 changed file with 6 additions and 2 deletions.
8 changes: 6 additions & 2 deletions GeoFlow/Losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,10 @@ def loss(label, output):

# Minimize gradient (blocky inversion)
if beta > 0:
num = tf.norm(output[:, 1:, :] - output[:, :-1, :], ord=1,
num = tf_norm(output[:, 1:, :] - output[:, :-1, :],
axis=[1, 2])
if output.get_shape()[-1] != 1:
num += tf.norm(output[:, :, 1:] - output[:, :, :-1], ord=1,
num += tf_norm(output[:, :, 1:] - output[:, :, :-1],
axis=[1, 2])
den = tf.norm(output, ord=1, axis=[1, 2])
loss = tf.reduce_sum(num / den)
Expand All @@ -75,3 +75,7 @@ def loss(label, output):
return tf.reduce_sum(losses)

return loss


def tf_norm(input, axis=None):
return tf.reduce_sum(tf.abs(input), axis=axis)

0 comments on commit b37dbf5

Please sign in to comment.