1414from paddle .trainer_config_helpers import *
1515
1616mode = get_config_arg ("mode" , str , "generator" )
17- assert mode in set (["generator" ,
18- "discriminator" ,
19- "generator_training" ,
20- "discriminator_training" ])
17+ assert mode in set ([
18+ "generator" , "discriminator" , "generator_training" , "discriminator_training"
19+ ])
2120
2221is_generator_training = mode == "generator_training"
2322is_discriminator_training = mode == "discriminator_training"
3837settings (
3938 batch_size = 128 ,
4039 learning_rate = 1e-4 ,
41- learning_method = AdamOptimizer (beta1 = 0.5 )
42- )
40+ learning_method = AdamOptimizer (beta1 = 0.5 ))
41+
4342
4443def discriminator (sample ):
4544 """
@@ -50,70 +49,87 @@ def discriminator(sample):
5049 of the sample is from real data.
5150 """
5251 param_attr = ParamAttr (is_static = is_generator_training )
53- bias_attr = ParamAttr (is_static = is_generator_training ,
54- initial_mean = 1.0 ,
55- initial_std = 0 )
56-
57- hidden = fc_layer (input = sample , name = "dis_hidden" , size = hidden_dim ,
58- bias_attr = bias_attr ,
59- param_attr = param_attr ,
60- act = ReluActivation ())
61-
62- hidden2 = fc_layer (input = hidden , name = "dis_hidden2" , size = hidden_dim ,
63- bias_attr = bias_attr ,
64- param_attr = param_attr ,
65- act = LinearActivation ())
66-
67- hidden_bn = batch_norm_layer (hidden2 ,
68- act = ReluActivation (),
69- name = "dis_hidden_bn" ,
70- bias_attr = bias_attr ,
71- param_attr = ParamAttr (is_static = is_generator_training ,
72- initial_mean = 1.0 ,
73- initial_std = 0.02 ),
74- use_global_stats = False )
75-
76- return fc_layer (input = hidden_bn , name = "dis_prob" , size = 2 ,
77- bias_attr = bias_attr ,
78- param_attr = param_attr ,
79- act = SoftmaxActivation ())
52+ bias_attr = ParamAttr (
53+ is_static = is_generator_training , initial_mean = 1.0 , initial_std = 0 )
54+
55+ hidden = fc_layer (
56+ input = sample ,
57+ name = "dis_hidden" ,
58+ size = hidden_dim ,
59+ bias_attr = bias_attr ,
60+ param_attr = param_attr ,
61+ act = ReluActivation ())
62+
63+ hidden2 = fc_layer (
64+ input = hidden ,
65+ name = "dis_hidden2" ,
66+ size = hidden_dim ,
67+ bias_attr = bias_attr ,
68+ param_attr = param_attr ,
69+ act = LinearActivation ())
70+
71+ hidden_bn = batch_norm_layer (
72+ hidden2 ,
73+ act = ReluActivation (),
74+ name = "dis_hidden_bn" ,
75+ bias_attr = bias_attr ,
76+ param_attr = ParamAttr (
77+ is_static = is_generator_training , initial_mean = 1.0 ,
78+ initial_std = 0.02 ),
79+ use_global_stats = False )
80+
81+ return fc_layer (
82+ input = hidden_bn ,
83+ name = "dis_prob" ,
84+ size = 2 ,
85+ bias_attr = bias_attr ,
86+ param_attr = param_attr ,
87+ act = SoftmaxActivation ())
88+
8089
8190def generator (noise ):
8291 """
8392 generator generates a sample given noise
8493 """
8594 param_attr = ParamAttr (is_static = is_discriminator_training )
86- bias_attr = ParamAttr (is_static = is_discriminator_training ,
87- initial_mean = 1.0 ,
88- initial_std = 0 )
89-
90- hidden = fc_layer (input = noise ,
91- name = "gen_layer_hidden" ,
92- size = hidden_dim ,
93- bias_attr = bias_attr ,
94- param_attr = param_attr ,
95- act = ReluActivation ())
96-
97- hidden2 = fc_layer (input = hidden , name = "gen_hidden2" , size = hidden_dim ,
98- bias_attr = bias_attr ,
99- param_attr = param_attr ,
100- act = LinearActivation ())
101-
102- hidden_bn = batch_norm_layer (hidden2 ,
103- act = ReluActivation (),
104- name = "gen_layer_hidden_bn" ,
105- bias_attr = bias_attr ,
106- param_attr = ParamAttr (is_static = is_discriminator_training ,
107- initial_mean = 1.0 ,
108- initial_std = 0.02 ),
109- use_global_stats = False )
110-
111- return fc_layer (input = hidden_bn ,
112- name = "gen_layer1" ,
113- size = sample_dim ,
114- bias_attr = bias_attr ,
115- param_attr = param_attr ,
116- act = LinearActivation ())
95+ bias_attr = ParamAttr (
96+ is_static = is_discriminator_training , initial_mean = 1.0 , initial_std = 0 )
97+
98+ hidden = fc_layer (
99+ input = noise ,
100+ name = "gen_layer_hidden" ,
101+ size = hidden_dim ,
102+ bias_attr = bias_attr ,
103+ param_attr = param_attr ,
104+ act = ReluActivation ())
105+
106+ hidden2 = fc_layer (
107+ input = hidden ,
108+ name = "gen_hidden2" ,
109+ size = hidden_dim ,
110+ bias_attr = bias_attr ,
111+ param_attr = param_attr ,
112+ act = LinearActivation ())
113+
114+ hidden_bn = batch_norm_layer (
115+ hidden2 ,
116+ act = ReluActivation (),
117+ name = "gen_layer_hidden_bn" ,
118+ bias_attr = bias_attr ,
119+ param_attr = ParamAttr (
120+ is_static = is_discriminator_training ,
121+ initial_mean = 1.0 ,
122+ initial_std = 0.02 ),
123+ use_global_stats = False )
124+
125+ return fc_layer (
126+ input = hidden_bn ,
127+ name = "gen_layer1" ,
128+ size = sample_dim ,
129+ bias_attr = bias_attr ,
130+ param_attr = param_attr ,
131+ act = LinearActivation ())
132+
117133
118134if is_generator_training :
119135 noise = data_layer (name = "noise" , size = noise_dim )
@@ -126,7 +142,8 @@ def generator(noise):
126142 label = data_layer (name = "label" , size = 1 )
127143 prob = discriminator (sample )
128144 cost = cross_entropy (input = prob , label = label )
129- classification_error_evaluator (input = prob , label = label , name = mode + '_error' )
145+ classification_error_evaluator (
146+ input = prob , label = label , name = mode + '_error' )
130147 outputs (cost )
131148
132149if is_generator :
0 commit comments