#!/usr/bin/env python3 # -*- coding:utf-8 -*- ############################################################# # File: Generator.py # Created Date: Sunday January 16th 2022 # Author: Chen Xuanhong # Email: chenxuanhongzju@outlook.com # Last Modified: Sunday, 16th January 2022 11:42:14 pm # Modified By: Chen Xuanhong # Copyright (c) 2022 Shanghai Jiao Tong University ############################################################# import torch from torch import nn from torch.nn import init from torch.nn import functional as F class InstanceNorm(nn.Module): def __init__(self, epsilon=1e-8): """ @notice: avoid in-place ops. https://discuss.pytorch.org/t/encounter-the-runtimeerror-one-of-the-variables-needed-for-gradient-computation-has-been-modified-by-an-inplace-operation/836/3 """ super(InstanceNorm, self).__init__() self.epsilon = epsilon def forward(self, x): x = x - torch.mean(x, (2, 3), True) tmp = torch.mul(x, x) # or x ** 2 tmp = torch.rsqrt(torch.mean(tmp, (2, 3), True) + self.epsilon) return x * tmp class ApplyStyle(nn.Module): """ @ref: https://github.com/lernapparat/lernapparat/blob/master/style_gan/pytorch_style_gan.ipynb """ def __init__(self, latent_size, channels): super(ApplyStyle, self).__init__() self.linear = nn.Linear(latent_size, channels * 2) def forward(self, x, latent): style = self.linear(latent) # style => [batch_size, n_channels*2] shape = [-1, 2, x.size(1), 1, 1] style = style.view(shape) # [batch_size, 2, n_channels, ...] #x = x * (style[:, 0] + 1.) + style[:, 1] x = x * (style[:, 0] * 1 + 1.) + style[:, 1] * 1 return x class ResnetBlock_Adain(nn.Module): def __init__(self, dim, latent_size, padding_type, activation=nn.ReLU(True)): super(ResnetBlock_Adain, self).__init__() p = 0 conv1 = [] if padding_type == 'reflect': conv1 += [nn.ReflectionPad2d(1)] elif padding_type == 'replicate': conv1 += [nn.ReplicationPad2d(1)] elif padding_type == 'zero': p = 1 else: raise NotImplementedError('padding [%s] is not implemented' % padding_type) conv1 += [nn.Conv2d(dim, dim, kernel_size=3, padding = p), InstanceNorm()] self.conv1 = nn.Sequential(*conv1) self.style1 = ApplyStyle(latent_size, dim) self.act1 = activation p = 0 conv2 = [] if padding_type == 'reflect': conv2 += [nn.ReflectionPad2d(1)] elif padding_type == 'replicate': conv2 += [nn.ReplicationPad2d(1)] elif padding_type == 'zero': p = 1 else: raise NotImplementedError('padding [%s] is not implemented' % padding_type) conv2 += [nn.Conv2d(dim, dim, kernel_size=3, padding=p), InstanceNorm()] self.conv2 = nn.Sequential(*conv2) self.style2 = ApplyStyle(latent_size, dim) def forward(self, x, dlatents_in_slice): y = self.conv1(x) y = self.style1(y, dlatents_in_slice) y = self.act1(y) y = self.conv2(y) y = self.style2(y, dlatents_in_slice) out = x + y return out class Generator(nn.Module): def __init__( self, **kwargs ): super().__init__() chn = kwargs["g_conv_dim"] k_size = kwargs["g_kernel_size"] res_num = kwargs["res_num"] padding_size= int((k_size -1)/2) padding_type= 'reflect' activation = nn.ReLU(True) self.first_layer = nn.Sequential(nn.Conv2d(3, 64, kernel_size=3, padding=1), nn.BatchNorm2d(64), activation) ### downsample self.down1 = nn.Sequential(nn.Conv2d(64, 128, kernel_size=3, stride=2, padding=1), nn.BatchNorm2d(128), activation) self.down2 = nn.Sequential(nn.Conv2d(128, 256, kernel_size=3, stride=2, padding=1), nn.BatchNorm2d(256), activation) self.down3 = nn.Sequential(nn.Conv2d(256, 512, kernel_size=3, stride=2, padding=1), nn.BatchNorm2d(512), activation) self.down4 = nn.Sequential(nn.Conv2d(512, 512, kernel_size=3, stride=2, padding=1), nn.BatchNorm2d(512), activation) ### resnet blocks BN = [] for i in range(res_num): BN += [ ResnetBlock_Adain(512, latent_size=chn, padding_type=padding_type, activation=activation)] self.BottleNeck = nn.Sequential(*BN) self.up4 = nn.Sequential( nn.Upsample(scale_factor=2, mode='bilinear'), nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1), nn.BatchNorm2d(512), activation ) self.up3 = nn.Sequential( nn.Upsample(scale_factor=2, mode='bilinear'), nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1), nn.BatchNorm2d(256), activation ) self.up2 = nn.Sequential( nn.Upsample(scale_factor=2, mode='bilinear'), nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1), nn.BatchNorm2d(128), activation ) self.up1 = nn.Sequential( nn.Upsample(scale_factor=2, mode='bilinear'), nn.Conv2d(128, 64, kernel_size=3, stride=1, padding=1), nn.BatchNorm2d(64), activation ) self.last_layer = nn.Sequential(nn.Conv2d(64, 3, kernel_size=3, padding=1)) # self.__weights_init__() # def __weights_init__(self): # for layer in self.encoder: # if isinstance(layer,nn.Conv2d): # nn.init.xavier_uniform_(layer.weight) # for layer in self.encoder2: # if isinstance(layer,nn.Conv2d): # nn.init.xavier_uniform_(layer.weight) def forward(self, input, id): x = input # 3*224*224 skip1 = self.first_layer(x) skip2 = self.down1(skip1) skip3 = self.down2(skip2) skip4 = self.down3(skip3) res = self.down4(skip4) for i in range(len(self.BottleNeck)): x = self.BottleNeck[i](res, id) x = self.up4(x) x = self.up3(x) x = self.up2(x) x = self.up1(x) x = self.last_layer(x) return x