Commit 557f555b authored by jean Ibarz's avatar jean Ibarz
Browse files

Fixed bug in RandomScale2DLayer, where the shape must input tensor shape must...

Fixed bug in RandomScale2DLayer, where the shape must input tensor shape must be evaluated using a dynamic pointer scalar in the case where the batch size is dynamically allocated.
parent b169af47
......@@ -54,6 +54,15 @@ class RandomScale2DLayer(tf.keras.layers.Layer):
if not training:
return input
else:
scale_values = tf.random.uniform(shape=[input.shape[0], 1, 1, 1], minval=self.minval, maxval=self.maxval,
# because input.shape[0] may return None when the batch_size may vary,
# we need to use tf.shape, which return instead a dynamic pointer scalar
# tensor pointing to inputs' actual batch size.
# see: https://github.com/tensorflow/tensorflow/issues/31991
batch_size = tf.shape(input)[0] #
scale_factor = tf.random.uniform(shape=(batch_size, 1, 1, 1), minval=self.minval,
maxval=self.maxval,
dtype=tf.dtypes.float32)
return tf.math.multiply(x=scale_values, y=input)
return tf.math.multiply(x=scale_factor, y=input)
def compute_output_shape(self, input_shape):
return input_shape
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment