// We"ll add the feature maps to a collection. In the paper they use
// one of vgg16"s layers as a feature map, so we start by adding it.
tf.add_to_collection("FEATURE_MAPS", base_net_endpoints[
scope + "/vgg_16/conv4/conv4_3"]
)
// TODO: check that the usage of `padding="VALID"` is correct
// TODO: check that the 1x1 convs actually use relu
After Change
// predictors.
with tf.variable_scope(vgg_conv4_3_name + "_norm"):
inputs_shape = vgg_conv4_3.get_shape()
inputs_rank = inputs_shape.ndims
dtype = vgg_conv4_3.dtype.base_dtype
norm_dim = tf.range(inputs_rank - 1, inputs_rank)
params_shape = inputs_shape[-1:]
vgg_conv4_3_norm = tf.nn.l2_normalize(
vgg_conv4_3, norm_dim, epsilon=1e-12
)
// Post scaling.
scale = variables.model_variable(
"gamma", shape=params_shape, dtype=dtype,
initializer=init_ops.ones_initializer()
)
vgg_conv4_3_norm = tf.multiply(vgg_conv4_3_norm, scale)
tf.add_to_collection("FEATURE_MAPS", vgg_conv4_3_norm)
// TODO: check that the usage of `padding="VALID"` is correct
// TODO: check that the 1x1 convs actually use relu
// Modifications to vgg16