我设计了我的神经网络定制层,但我从我的代码得到一个错误。
我想要做的是在论文中描述一个关注层: SAGAN 。 而原来的TF码
class AttentionLayer(Layer):
def __init__(self, **kwargs):
super(AttentionLayer, self).__init__(**kwargs)
def build(self, input_shape):
input_dim = input_shape[-1]
filters_f_g = input_dim // 8
filters_h = input_dim
kernel_shape_f_g = (1, 1) + (input_dim, filters_f_g)
kernel_shape_h = (1, 1) + (input_dim, filters_h)
# Create a trainable weight variable for this layer:
self.gamma = self.add_weight(name='gamma', shape=[1], initializer='zeros', trainable=True)
self.kernel_f = self.add_weight(shape=kernel_shape_f_g,
initializer='glorot_uniform',
name='kernel')
self.kernel_g = self.add_weight(shape=kernel_shape_f_g,
initializer='glorot_uniform',
name='kernel')
self.kernel_h = self.add_weight(shape=kernel_shape_h,
initializer='glorot_uniform',
name='kernel')
self.bias_f = self.add_weight(shape=(filters_f_g,),
initializer='zeros',
name='bias')
self.bias_g = self.add_weight(shape=(filters_f_g,),
initializer='zeros',
name='bias')
self.bias_h = self.add_weight(shape=(filters_h,),
initializer='zeros',
name='bias')
super(AttentionLayer, self).build(input_shape)
def call(self, x):
def hw_flatten(x):
return K.reshape(x, shape=[x.shape[0], x.shape[1]*x.shape[2], x.shape[-1]])
f = K.conv2d(x, kernel=self.kernel_f, strides=(1, 1), padding='same') # [bs, h, w, c']
f = K.bias_add(f, self.bias_f)
g = K.conv2d(x, kernel=self.kernel_g, strides=(1, 1), padding='same') # [bs, h, w, c']
g = K.bias_add(g, self.bias_g)
h = K.conv2d(x, kernel=self.kernel_h, strides=(1, 1), padding='same') # [bs, h, w, c]
h = K.bias_add(h, self.bias_h)
# N = h * w
flatten_g = hw_flatten(g)
flatten_f = hw_flatten(f)
s = K.batch_dot(flatten_g, flatten_f, axes=1) # # [bs, N, N]
beta = K.softmax(s, axis=-1) # attention map
o = K.batch_dot(beta, hw_flatten(h)) # [bs, N, C]
o = K.reshape(o, shape=x.shape) # [bs, h, w, C]
x = self.gamma * o + x
return x
当我将在我的模型这一层,我得到一个错误:
TypeError: Expected binary or unicode string, got Dimension(None)
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-5-9d4e83945ade> in <module>()
5 X = Conv2D(64, kernel_size=5, strides=1, name='conv1')(X)
6 X = Activation('relu')(X)
----> 7 X = AttentionLayer()(X)
8 X = Flatten(name='flatten2')(X)
9 X = Dense(1000, activation='relu')(X)
/anaconda3/envs/pycharm/lib/python3.6/site-packages/keras/engine/topology.py in __call__(self, inputs, **kwargs)
617
618 # Actually call the layer, collecting output(s), mask(s), and shape(s).
--> 619 output = self.call(inputs, **kwargs)
620 output_mask = self.compute_mask(inputs, previous_mask)
621
~/Projects/inpainting/models/attention.py in call(self, x)
49
50 # N = h * w
---> 51 flatten_g = hw_flatten(g)
52 flatten_f = hw_flatten(f)
53 s = K.batch_dot(flatten_g, flatten_f, axes=1) # # [bs, N, N]
~/Projects/inpainting/models/attention.py in hw_flatten(x)
39 def call(self, x):
40 def hw_flatten(x):
---> 41 return K.reshape(x, shape=[x.shape[0], x.shape[1]*x.shape[2], x.shape[-1]])
42
43 f = K.conv2d(x, kernel=self.kernel_f, strides=(1, 1), padding='same') # [bs, h, w, c']
/anaconda3/envs/pycharm/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py in reshape(x, shape)
1896 A tensor.
1897 """
-> 1898 return tf.reshape(x, shape)
1899
1900
/anaconda3/envs/pycharm/lib/python3.6/site-packages/tensorflow/python/ops/gen_array_ops.py in reshape(tensor, shape, name)
6111 if _ctx is None or not _ctx._eager_context.is_eager:
6112 _, _, _op = _op_def_lib._apply_op_helper(
-> 6113 "Reshape", tensor=tensor, shape=shape, name=name)
6114 _result = _op.outputs[:]
6115 _inputs_flat = _op.inputs
/anaconda3/envs/pycharm/lib/python3.6/site-packages/tensorflow/python/framework/op_def_library.py in _apply_op_helper(self, op_type_name, name, **keywords)
511 except TypeError as err:
512 if dtype is None:
--> 513 raise err
514 else:
515 raise TypeError(
/anaconda3/envs/pycharm/lib/python3.6/site-packages/tensorflow/python/framework/op_def_library.py in _apply_op_helper(self, op_type_name, name, **keywords)
508 dtype=dtype,
509 as_ref=input_arg.is_ref,
--> 510 preferred_dtype=default_dtype)
511 except TypeError as err:
512 if dtype is None:
/anaconda3/envs/pycharm/lib/python3.6/site-packages/tensorflow/python/framework/ops.py in internal_convert_to_tensor(value, dtype, name, as_ref, preferred_dtype, ctx)
1102
1103 if ret is None:
-> 1104 ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
1105
1106 if ret is NotImplemented:
/anaconda3/envs/pycharm/lib/python3.6/site-packages/tensorflow/python/framework/constant_op.py in _constant_tensor_conversion_function(v, dtype, name, as_ref)
233 as_ref=False):
234 _ = as_ref
--> 235 return constant(v, dtype=dtype, name=name)
236
237
/anaconda3/envs/pycharm/lib/python3.6/site-packages/tensorflow/python/framework/constant_op.py in constant(value, dtype, shape, name, verify_shape)
212 tensor_value.tensor.CopyFrom(
213 tensor_util.make_tensor_proto(
--> 214 value, dtype=dtype, shape=shape, verify_shape=verify_shape))
215 dtype_value = attr_value_pb2.AttrValue(type=tensor_value.tensor.dtype)
216 const_tensor = g.create_op(
/anaconda3/envs/pycharm/lib/python3.6/site-packages/tensorflow/python/framework/tensor_util.py in make_tensor_proto(values, dtype, shape, verify_shape)
519 raise TypeError("Failed to convert object of type %s to Tensor. "
520 "Contents: %s. Consider casting elements to a "
--> 521 "supported type." % (type(values), values))
522 tensor_proto.string_val.extend(str_values)
523 return tensor_proto
TypeError: Failed to convert object of type <class 'list'> to Tensor. Contents: [Dimension(None), Dimension(64), Dimension(8)]. Consider casting elements to a supported type.
我试图让x_shape = x.shape.as_list()
在hw_flatten功能,但它没有工作,我不知道如何调试这个错误。