remove ConvolutionAwareInitializer from layers, because lr-dropout does the same thing.

This commit is contained in:
iperov 2021-07-15 00:50:11 +04:00
parent 2edac3df8c
commit f044c99ddc
3 changed files with 6 additions and 6 deletions

View File

@ -55,8 +55,8 @@ class Conv2D(nn.LayerBase):
if kernel_initializer is None:
kernel_initializer = tf.initializers.random_normal(0, 1.0, dtype=self.dtype)
if kernel_initializer is None:
kernel_initializer = nn.initializers.ca()
#if kernel_initializer is None:
# kernel_initializer = nn.initializers.ca()
self.weight = tf.get_variable("weight", (self.kernel_size,self.kernel_size,self.in_ch,self.out_ch), dtype=self.dtype, initializer=kernel_initializer, trainable=self.trainable )

View File

@ -38,8 +38,8 @@ class Conv2DTranspose(nn.LayerBase):
if kernel_initializer is None:
kernel_initializer = tf.initializers.random_normal(0, 1.0, dtype=self.dtype)
if kernel_initializer is None:
kernel_initializer = nn.initializers.ca()
#if kernel_initializer is None:
# kernel_initializer = nn.initializers.ca()
self.weight = tf.get_variable("weight", (self.kernel_size,self.kernel_size,self.out_ch,self.in_ch), dtype=self.dtype, initializer=kernel_initializer, trainable=self.trainable )
if self.use_bias:

View File

@ -68,8 +68,8 @@ class DepthwiseConv2D(nn.LayerBase):
if kernel_initializer is None:
kernel_initializer = tf.initializers.random_normal(0, 1.0, dtype=self.dtype)
if kernel_initializer is None:
kernel_initializer = nn.initializers.ca()
#if kernel_initializer is None:
# kernel_initializer = nn.initializers.ca()
self.weight = tf.get_variable("weight", (self.kernel_size,self.kernel_size,self.in_ch,self.depth_multiplier), dtype=self.dtype, initializer=kernel_initializer, trainable=self.trainable )