1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
|
#将一些层封装成块,,注意这里有SN的参与
def block(input,output_channel,training=false):
x = BatchNormalization(input)
x = RELU()
convtr = conv2dT(output_channels,3,2,padding='same')
x = SN(convtr)
x = BatchNormalization()<-------------------------------------------
x = RELU()
convtr = conv2dT(output_channels,3,2,padding='same')
x = SN(convtr)
convtr = conv2dT(output_channels,3,2,padding='same')
x_ = SN(convtr)
return laters.add(x_,x)
def get_generator(num_classer,gf_dim=16,training=false):
z=input(name = noisy)
condition_label = imput()
one-hot-label = onehot
x = layers.Concatenate(z,one-hot-label)
x = SN()
...
x = Block()...
x = Attention_layer()(x)<---------------------*****************************
x= Block()...
x = BatchNormalization()
x = RELU()
conv = layers.conv2d()
output = SN(conv)(x)
return Model()
|