网络优化
def max_pool_2x2(x):
return tf.nn.max_pool(x,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME')
with slim.arg_scope([slim.conv2d],
activation_fn=tf.nn.relu,
normalizer_fn=slim.batch_norm,
normalizer_params=batch_norm_params,
weights_regularizer=slim.l2_regularizer(weight_decay)):
f = [end_points['pool5'], end_points['pool4'],
end_points['pool3'], end_points['pool2']]
F2[3]=max_pool_2x2(f[3])
C[3]=slim.conv2d(tf.concat([F2[3], f[3]], axis=-1), 64, 1)
H2(2)=tf.keras.layers.SeparableConv2D(c[3],64, (3,3), strides=(1, 1), padding='valid', data_format=None, dilation_rate=(1, 1), depth_multiplier=1)
F2[2]==max_pool_2x2(H2(2))
C[2]=slim.conv2d(tf.concat([F2[2], f[2]], axis=-1),128, 1)
H2(1)=tf.keras.layers.SeparableConv2D(c[3],128,(3,3), strides=(1, 1), padding='valid', data_format=None, dilation_rate=(1, 1), depth_multiplier=1)
for i in range(4):
print('Shape of f_{} {}'.format(i, f[i].shape))
g = [None, None, None, None]
h = [None, None, None, None]
num_outputs = [None, 128, 64, 32]
for i in range(4):
if i == 0:
h[i] = f[i]
else:
c1_1 = slim.conv2d(tf.concat([g[i-1], f[i]], axis=-1), num_outputs[i], 1)
H[i]=tf.keras.layers.SeparableConv2D(c1_1, num_outputs[i], (3,3), strides=(1, 1), padding='valid', data_format=None, dilation_rate=(1, 1), depth_multiplier=1)
if i == 2:
f[2]=tf.concat([F2[3], f[2]], axis=-1)
if i == 3:
f[2]=tf.concat([F2[2], f[1]], axis=-1)
f[2]=tf.concat([H2(1), f[1]], axis=-1)
if i <= 2:
g[i] = unpool(h[i])
else:
g[i] = slim.conv2d(h[i], num_outputs[i], 3)
print('Shape of h_{} {}, g_{} {}'.format(i, h[i].shape, i, g[i].shape))
结构: