.../tensorflow/python/ops/resource_variable_ops.py in __imul__(self, unused_other)
1449
1450 def __imul__(self, unused_other):
-> 1451 raise RuntimeError("Variable *= value not supported. Use "
1452 "`var.assign(var * value)` to modify the variable or "
1453 "`var = var * value` to get a new Tensor object.")
RuntimeError: Variable *= value not supported. Use `var.assign(var * value)` to modify the variable or `var = var * value` to get a new Tensor object.
The full stacktrace is below.
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-1-3e98e7412ec2> in <module>
4 keras.layers.GRU(128, return_sequences=True, stateful=True,
5 batch_input_shape=[32, None, 5],
----> 6 recurrent_dropout=0.2)
7 ])
.../tensorflow/python/training/tracking/base.py in _method_wrapper(self, *args, **kwargs)
456 self._self_setattr_tracking = False # pylint: disable=protected-access
457 try:
--> 458 result = method(self, *args, **kwargs)
459 finally:
460 self._self_setattr_tracking = previous_value # pylint: disable=protected-access
.../tensorflow/python/keras/engine/sequential.py in __init__(self, layers, name)
106 if layers:
107 for layer in layers:
--> 108 self.add(layer)
109
110 @property
.../tensorflow/python/training/tracking/base.py in _method_wrapper(self, *args, **kwargs)
456 self._self_setattr_tracking = False # pylint: disable=protected-access
457 try:
--> 458 result = method(self, *args, **kwargs)
459 finally:
460 self._self_setattr_tracking = previous_value # pylint: disable=protected-access
.../tensorflow/python/keras/engine/sequential.py in add(self, layer)
167 # and create the node connecting the current layer
168 # to the input layer we just created.
--> 169 layer(x)
170 set_inputs = True
171
.../tensorflow/python/keras/layers/recurrent.py in __call__(self, inputs, initial_state, constants, **kwargs)
620
621 if initial_state is None and constants is None:
--> 622 return super(RNN, self).__call__(inputs, **kwargs)
623
624 # If any of `initial_state` or `constants` are specified and are Keras
.../tensorflow/python/keras/engine/base_layer.py in __call__(self, inputs, *args, **kwargs)
631 base_layer_utils.AutoAddUpdates(self,
632 inputs)) as auto_updater:
--> 633 outputs = call_fn(inputs, *args, **kwargs)
634 auto_updater.set_outputs(outputs)
635
.../tensorflow/python/keras/layers/recurrent_v2.py in call(self, inputs, mask, training, initial_state)
328 input_length=timesteps,
329 time_major=self.time_major,
--> 330 zero_output_for_mask=self.zero_output_for_mask)
331 # This is a dummy tensor for testing purpose.
332 runtime = _runtime('unknown')
.../tensorflow/python/keras/backend.py in rnn(step_function, inputs, initial_states, go_backwards, mask, constants, unroll, input_length, time_major, zero_output_for_mask)
3558 # the value is discarded.
3559 output_time_zero, _ = step_function(input_time_zero,
-> 3560 initial_states + constants)
3561 output_ta = tuple(
3562 tensor_array_ops.TensorArray(
.../tensorflow/python/keras/layers/recurrent_v2.py in step(cell_inputs, cell_states)
316
317 def step(cell_inputs, cell_states):
--> 318 return self.cell.call(cell_inputs, cell_states, **kwargs)
319
320 last_output, outputs, states = K.rnn(
.../tensorflow/python/keras/layers/recurrent.py in call(self, inputs, states, training)
1706
1707 if 0. < self.recurrent_dropout < 1.:
-> 1708 h_tm1 *= rec_dp_mask[0]
1709
1710 if self.reset_after:
.../tensorflow/python/ops/resource_variable_ops.py in __imul__(self, unused_other)
1449
1450 def __imul__(self, unused_other):
-> 1451 raise RuntimeError("Variable *= value not supported. Use "
1452 "`var.assign(var * value)` to modify the variable or "
1453 "`var = var * value` to get a new Tensor object.")
RuntimeError: Variable *= value not supported. Use `var.assign(var * value)` to modify the variable or `var = var * value` to get a new Tensor object.
System information
Yes
MacOSX 10.13.6
N/A
binary
tf.version.VERSION=2.0.0-dev20190413
tf.version.GIT_VERSION=v1.12.0-12481-gc7ce6f4cd9
3.6.8
N/A
N/A
N/A
N/A
Describe the current behavior
I get an exception when trying to use
recurrent_dropoutin a stateful RNN:The full stacktrace is below.
Describe the expected behavior
No exception.
Code to reproduce the issue
Other info / logs
Complete stacktrace: