This commit is contained in:
chenxuanhong
2022-01-24 19:01:00 +08:00
parent 0f8c2f929e
commit 94534e2e30
10 changed files with 765 additions and 79 deletions
+25 -56
View File
@@ -5,7 +5,7 @@
# Created Date: Sunday January 16th 2022
# Author: Chen Xuanhong
# Email: chenxuanhongzju@outlook.com
# Last Modified: Thursday, 20th January 2022 10:51:02 pm
# Last Modified: Monday, 24th January 2022 6:47:22 pm
# Modified By: Chen Xuanhong
# Copyright (c) 2022 Shanghai Jiao Tong University
#############################################################
@@ -15,17 +15,16 @@ from torch import nn
from torch.nn import init
from torch.nn import functional as F
class InstanceNorm(nn.Module):
class Demodule(nn.Module):
def __init__(self, epsilon=1e-8):
"""
@notice: avoid in-place ops.
https://discuss.pytorch.org/t/encounter-the-runtimeerror-one-of-the-variables-needed-for-gradient-computation-has-been-modified-by-an-inplace-operation/836/3
"""
super(InstanceNorm, self).__init__()
super(Demodule, self).__init__()
self.epsilon = epsilon
def forward(self, x):
x = x - torch.mean(x, (2, 3), True)
tmp = torch.mul(x, x) # or x ** 2
tmp = torch.rsqrt(torch.mean(tmp, (2, 3), True) + self.epsilon)
return x * tmp
@@ -46,9 +45,22 @@ class ApplyStyle(nn.Module):
x = x * (style[:, 0] * 1 + 1.) + style[:, 1] * 1
return x
class ResnetBlock_Adain(nn.Module):
class Modulation(nn.Module):
def __init__(self, latent_size, channels):
super(Modulation, self).__init__()
self.linear = nn.Linear(latent_size, channels)
def forward(self, x, latent):
style = self.linear(latent) # style => [batch_size, n_channels*2]
shape = [-1, x.size(1), 1, 1]
style = style.view(shape) # [batch_size, 2, n_channels, ...]
#x = x * (style[:, 0] + 1.) + style[:, 1]
x = x * style
return x
class ResnetBlock_Modulation(nn.Module):
def __init__(self, dim, latent_size, padding_type, activation=nn.ReLU(True)):
super(ResnetBlock_Adain, self).__init__()
super(ResnetBlock_Modulation, self).__init__()
p = 0
conv1 = []
@@ -60,9 +72,9 @@ class ResnetBlock_Adain(nn.Module):
p = 1
else:
raise NotImplementedError('padding [%s] is not implemented' % padding_type)
conv1 += [nn.Conv2d(dim, dim, kernel_size=3, padding = p), InstanceNorm()]
conv1 += [nn.Conv2d(dim, dim, kernel_size=3, padding = p), Demodule()]
self.conv1 = nn.Sequential(*conv1)
self.style1 = ApplyStyle(latent_size, dim)
self.style1 = Modulation(latent_size, dim)
self.act1 = activation
p = 0
@@ -75,52 +87,9 @@ class ResnetBlock_Adain(nn.Module):
p = 1
else:
raise NotImplementedError('padding [%s] is not implemented' % padding_type)
conv2 += [nn.Conv2d(dim, dim, kernel_size=3, padding=p), InstanceNorm()]
conv2 += [nn.Conv2d(dim, dim, kernel_size=3, padding=p), Demodule()]
self.conv2 = nn.Sequential(*conv2)
self.style2 = ApplyStyle(latent_size, dim)
def forward(self, x, dlatents_in_slice):
y = self.conv1(x)
y = self.style1(y, dlatents_in_slice)
y = self.act1(y)
y = self.conv2(y)
y = self.style2(y, dlatents_in_slice)
out = x + y
return out
class newres(nn.Module):
def __init__(self, dim, latent_size, padding_type, activation=nn.ReLU(True)):
super(ResnetBlock_Adain, self).__init__()
p = 0
conv1 = []
if padding_type == 'reflect':
conv1 += [nn.ReflectionPad2d(1)]
elif padding_type == 'replicate':
conv1 += [nn.ReplicationPad2d(1)]
elif padding_type == 'zero':
p = 1
else:
raise NotImplementedError('padding [%s] is not implemented' % padding_type)
conv1 += [nn.Conv2d(dim, dim, kernel_size=1), InstanceNorm()]
self.conv1 = nn.Sequential(*conv1)
self.style1 = ApplyStyle(latent_size, dim)
self.act1 = activation
p = 0
conv2 = []
if padding_type == 'reflect':
conv2 += [nn.ReflectionPad2d(1)]
elif padding_type == 'replicate':
conv2 += [nn.ReplicationPad2d(1)]
elif padding_type == 'zero':
p = 1
else:
raise NotImplementedError('padding [%s] is not implemented' % padding_type)
conv2 += [nn.Conv2d(dim, dim, kernel_size=3, padding=p), InstanceNorm()]
self.conv2 = nn.Sequential(*conv2)
self.style2 = ApplyStyle(latent_size, dim)
self.style2 = Modulation(latent_size, dim)
def forward(self, x, dlatents_in_slice):
@@ -148,7 +117,7 @@ class Generator(nn.Module):
activation = nn.ReLU(True)
self.first_layer = nn.Sequential(nn.Conv2d(3, 64, kernel_size=3, padding=1),
self.stem = nn.Sequential(nn.Conv2d(3, 64, kernel_size=3, padding=1),
nn.BatchNorm2d(64), activation)
### downsample
self.down1 = nn.Sequential(nn.Conv2d(64, 128, kernel_size=3, stride=2, padding=1),
@@ -167,7 +136,7 @@ class Generator(nn.Module):
BN = []
for i in range(res_num):
BN += [
ResnetBlock_Adain(512, latent_size=chn, padding_type=padding_type, activation=activation)]
ResnetBlock_Modulation(512, latent_size=chn, padding_type=padding_type, activation=activation)]
self.BottleNeck = nn.Sequential(*BN)
self.up4 = nn.Sequential(
@@ -210,7 +179,7 @@ class Generator(nn.Module):
def forward(self, input, id):
x = input # 3*224*224
skip1 = self.first_layer(x)
skip1 = self.stem(x)
skip2 = self.down1(skip1)
skip3 = self.down2(skip2)
skip4 = self.down3(skip3)