// Different position only has differences in `upconv`
// for the outermost, the special is `tanh`
if outermost:
upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc,kernel_size=4, stride=2,padding=1)
downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4,
stride=2, padding=1)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
// for the innermost, the special is `inner_nc` instead of `inner_nc*2`
elif innermost:
upconv = InceptionUp(inner_nc, outer_nc) //nn.ConvTranspose2d(inner_nc, outer_nc,kernel_size=4, stride=2,padding=1)
down = [downrelu, downconv] // for the innermost, no submodule, and delete the bn
up = [uprelu, upconv, upnorm]
model = down + up
// else, the normal
else:
upconv = InceptionUp(inner_nc * 3, outer_nc) //nn.ConvTranspose2d(inner_nc * 2, outer_nc,kernel_size=4, stride=2,padding=1)
down = [downrelu, downconv, downnorm]
up = [uprelu, innerCosBefore, shift, upconv, upnorm]
if use_dropout:
model = down + [submodule] + up + [nn.Dropout(0.5)]
else:
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost: // if it is the outermost, directly pass the input in.
After Change
if input_nc is None:
input_nc = outer_nc
downconv = spectral_norm(nn.Conv2d(input_nc, inner_nc, kernel_size=4,
stride=2, padding=1), use_spectral_norm)
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc)
uprelu = nn.ReLU(True)
upnorm = norm_layer(outer_nc)
// As the downconv layer is outer_nc in and inner_nc out.
// So the shift define like this:
shift = InnerShiftTriple(opt.shift_sz, opt.stride, opt.mask_thred,
opt.triple_weight, layer_to_last=layer_to_last)
shift.set_mask(mask_global)
shift_list.append(shift)
// Add latent constraint
// Then add the constraint to the constrain layer list!
innerCos = InnerCos(strength=opt.strength, skip=opt.skip, layer_to_last=layer_to_last)
innerCos.set_mask(mask_global) // Here we need to set mask for innerCos layer too.
innerCos_list.append(innerCos)
// Different position only has differences in `upconv`
// for the outermost, the special is `tanh`
if outermost:
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
// for the innermost, the special is `inner_nc` instead of `inner_nc*2`
elif innermost:
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downrelu, downconv] // for the innermost, no submodule, and delete the bn
up = [uprelu, upconv, upnorm]
model = down + up
// else, the normal
else:
// shift triple differs in here. It is `*3` not `*2`.
upconv = spectral_norm(nn.ConvTranspose2d(inner_nc * 3, outer_nc,
kernel_size=4, stride=2,
padding=1), use_spectral_norm)
down = [downrelu, downconv, downnorm]
// shift should be placed after uprelu
// NB: innerCos are placed before shift. So need to add the latent gredient to
// to former part.
up = [uprelu, innerCos, shift, upconv, upnorm]
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost: // if it is the outermost, directly pass the input in.