You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py in _rnn_get_variable(self, getter, *args, **kwargs)
181
182 def _rnn_get_variable(self, getter, *args, **kwargs):
--> 183 variable = getter(*args, **kwargs)
184 trainable = (variable in tf_variables.trainable_variables() or
185 (isinstance(variable, tf_variables.PartitionedVariable) and
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\python\ops\variable_scope.py in _true_getter(name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource)
350 trainable=trainable, collections=collections,
351 caching_device=caching_device, validate_shape=validate_shape,
--> 352 use_resource=use_resource)
353
354 if custom_getter is not None:
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\python\ops\variable_scope.py in _get_single_variable(self, name, shape, dtype, initializer, regularizer, partition_info, reuse, trainable, collections, caching_device, validate_shape, use_resource)
662 " Did you mean to set reuse=True in VarScope? "
663 "Originally defined at:\n\n%s" % (
--> 664 name, "".join(traceback.format_list(tb))))
665 found_var = self._vars[name]
666 if not shape.is_compatible_with(found_var.get_shape()):
The text was updated successfully, but these errors were encountered:
ValueError Traceback (most recent call last)
in ()
140 with tf.Session() as sess:
141 sample_encoder_inputs, sample_decoder_inputs ,sample_target_weights= get_samples() #被投喂的数据
--> 142 encoder_inputs, decoder_inputs, target_weights, outputs, loss = get_model() #申请placeholder,前馈得到outputs
143
144 input_feed = {} #投喂数据的键值对,将真实数据投喂到placeholder构成的list--encoder_inputs中,同理对decoder_inputs
in get_model()
131 embedding_size=size,
132 feed_previous=False,
--> 133 dtype=tf.float32)
134
135 #计算交叉熵损失
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\contrib\legacy_seq2seq\python\ops\seq2seq.py in embedding_attention_seq2seq(encoder_inputs, decoder_inputs, cell, num_encoder_symbols, num_decoder_symbols, embedding_size, num_heads, output_projection, feed_previous, dtype, scope, initial_state_attention)
852 embedding_size=embedding_size)
853 encoder_outputs, encoder_state = rnn.static_rnn(
--> 854 encoder_cell, encoder_inputs, dtype=dtype)
855
856 # First calculate a concatenation of encoder outputs to put attention on.
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\python\ops\rnn.py in static_rnn(cell, inputs, initial_state, dtype, sequence_length, scope)
1210 state_size=cell.state_size)
1211 else:
-> 1212 (output, state) = call_cell()
1213
1214 outputs.append(output)
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\python\ops\rnn.py in ()
1197 varscope.reuse_variables()
1198 # pylint: disable=cell-var-from-loop
-> 1199 call_cell = lambda: cell(input_, state)
1200 # pylint: enable=cell-var-from-loop
1201 if sequence_length is not None:
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py in call(self, inputs, state, scope)
178 with vs.variable_scope(vs.get_variable_scope(),
179 custom_getter=self._rnn_get_variable):
--> 180 return super(RNNCell, self).call(inputs, state)
181
182 def _rnn_get_variable(self, getter, *args, **kwargs):
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\python\layers\base.py in call(self, inputs, *args, **kwargs)
439 # Check input assumptions set after layer building, e.g. input shape.
440 self._assert_input_compatibility(inputs)
--> 441 outputs = self.call(inputs, *args, **kwargs)
442
443 # Apply activity regularization.
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\contrib\rnn\python\ops\core_rnn_cell.py in call(self, inputs, state)
112 "embedding", [self._embedding_classes, self._embedding_size],
113 initializer=initializer,
--> 114 dtype=data_type)
115 embedded = embedding_ops.embedding_lookup(embedding,
116 array_ops.reshape(inputs, [-1]))
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\python\ops\variable_scope.py in get_variable(name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)
1063 collections=collections, caching_device=caching_device,
1064 partitioner=partitioner, validate_shape=validate_shape,
-> 1065 use_resource=use_resource, custom_getter=custom_getter)
1066 get_variable_or_local_docstring = (
1067 """%s
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\python\ops\variable_scope.py in get_variable(self, var_store, name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)
960 collections=collections, caching_device=caching_device,
961 partitioner=partitioner, validate_shape=validate_shape,
--> 962 use_resource=use_resource, custom_getter=custom_getter)
963
964 def _get_partitioned_variable(self,
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\python\ops\variable_scope.py in get_variable(self, name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)
358 reuse=reuse, trainable=trainable, collections=collections,
359 caching_device=caching_device, partitioner=partitioner,
--> 360 validate_shape=validate_shape, use_resource=use_resource)
361 else:
362 return _true_getter(
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py in _rnn_get_variable(self, getter, *args, **kwargs)
181
182 def _rnn_get_variable(self, getter, *args, **kwargs):
--> 183 variable = getter(*args, **kwargs)
184 trainable = (variable in tf_variables.trainable_variables() or
185 (isinstance(variable, tf_variables.PartitionedVariable) and
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\python\ops\variable_scope.py in _true_getter(name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource)
350 trainable=trainable, collections=collections,
351 caching_device=caching_device, validate_shape=validate_shape,
--> 352 use_resource=use_resource)
353
354 if custom_getter is not None:
C:\Users\lenovo\Anaconda3\envs\py35\lib\site-packages\tensorflow\python\ops\variable_scope.py in _get_single_variable(self, name, shape, dtype, initializer, regularizer, partition_info, reuse, trainable, collections, caching_device, validate_shape, use_resource)
662 " Did you mean to set reuse=True in VarScope? "
663 "Originally defined at:\n\n%s" % (
--> 664 name, "".join(traceback.format_list(tb))))
665 found_var = self._vars[name]
666 if not shape.is_compatible_with(found_var.get_shape()):
The text was updated successfully, but these errors were encountered: