@@ -40,21 +40,24 @@ def __init__(self, n_layers, transfer_function=tf.nn.softplus, optimizer=tf.trai
40
40
41
41
def _initialize_weights (self ):
42
42
all_weights = dict ()
43
+ initializer = tf .contrib .layers .xavier_initializer ()
43
44
# Encoding network weights
44
45
encoder_weights = []
45
46
for layer in range (len (self .n_layers )- 1 ):
46
47
w = tf .Variable (
47
- autoencoder .Utils .xavier_init (self .n_layers [layer ],
48
- self .n_layers [layer + 1 ]))
49
- b = tf .Variable (tf .zeros ([self .n_layers [layer + 1 ]], dtype = tf .float32 ))
48
+ initializer ((self .n_layers [layer ], self .n_layers [layer + 1 ]),
49
+ dtype = tf .float32 ))
50
+ b = tf .Variable (
51
+ tf .zeros ([self .n_layers [layer + 1 ]], dtype = tf .float32 ))
50
52
encoder_weights .append ({'w' : w , 'b' : b })
51
53
# Recon network weights
52
54
recon_weights = []
53
55
for layer in range (len (self .n_layers )- 1 , 0 , - 1 ):
54
56
w = tf .Variable (
55
- autoencoder .Utils .xavier_init (self .n_layers [layer ],
56
- self .n_layers [layer - 1 ]))
57
- b = tf .Variable (tf .zeros ([self .n_layers [layer - 1 ]], dtype = tf .float32 ))
57
+ initializer ((self .n_layers [layer ], self .n_layers [layer - 1 ]),
58
+ dtype = tf .float32 ))
59
+ b = tf .Variable (
60
+ tf .zeros ([self .n_layers [layer - 1 ]], dtype = tf .float32 ))
58
61
recon_weights .append ({'w' : w , 'b' : b })
59
62
all_weights ['encode' ] = encoder_weights
60
63
all_weights ['recon' ] = recon_weights
0 commit comments