diff --git a/parakeet/models/waveflow.py b/parakeet/models/waveflow.py index 8b18c92..d58127b 100644 --- a/parakeet/models/waveflow.py +++ b/parakeet/models/waveflow.py @@ -24,7 +24,7 @@ def fold(x, n_group): Returns --------- - Tensor : [shape=(`*, time_steps // n_group, group)] + Tensor : [shape=(\*, time_steps // n_group, group)] Folded tensor. """ *spatial_shape, time_steps = x.shape @@ -230,7 +230,7 @@ class ResidualBlock(nn.Layer): res : Tensor [shape=(batch_size, channel, 1, width)] A row of the the residual output. - res : Tensor [shape=(batch_size, channel, 1, width)] + skip : Tensor [shape=(batch_size, channel, 1, width)] A row of the skip output. """ x_row_in = x_row @@ -349,7 +349,7 @@ class ResidualNet(nn.LayerList): res : Tensor [shape=(batch_size, channel, 1, width)] A row of the the residual output. - res : Tensor [shape=(batch_size, channel, 1, width)] + skip : Tensor [shape=(batch_size, channel, 1, width)] A row of the skip output. """ skip_connections = [] @@ -364,8 +364,8 @@ class Flow(nn.Layer): """A bijection (Reversable layer) that transform a density of latent variables p(Z) into a complex data distribution p(X). - It's an auto regressive flow. The `forward` method implements the - probability density estimation. The `inverse` method implements the + It's an auto regressive flow. The ``forward`` method implements the + probability density estimation. The ``inverse`` method implements the sampling. Parameters diff --git a/parakeet/models/wavenet.py b/parakeet/models/wavenet.py index c81c948..8e6f272 100644 --- a/parakeet/models/wavenet.py +++ b/parakeet/models/wavenet.py @@ -350,7 +350,7 @@ class ResidualNet(nn.LayerList): def start_sequence(self): """Prepare the ResidualNet to generate a new sequence. This method - should be called before starting calling `add_input` multiple times. + should be called before starting calling ``add_input`` multiple times. """ for block in self: block.start_sequence() @@ -372,7 +372,7 @@ class ResidualNet(nn.LayerList): Returns ---------- Tensor [shape=(B, C)] - T he skip connection for a step. This output is accumulated with + The skip connection for a step. This output is accumulated with that of other ResidualBlocks. """ for i, func in enumerate(self): @@ -514,7 +514,7 @@ class WaveNet(nn.Layer): Returns -------- Tensor: [shape=(B, C_output)] - A steo of the parameters of the output distributions. + A step of the parameters of the output distributions. """ # Causal Conv if self.loss_type == "softmax": @@ -714,7 +714,7 @@ class WaveNet(nn.Layer): Parameters ---------- y : Tensor [shape=(B, T, C_output)] - The parameterd of the output distribution. + The parameters of the output distribution. t : Tensor [shape=(B, T)] The target audio. diff --git a/setup.py b/setup.py index 6e95807..ee5f215 100644 --- a/setup.py +++ b/setup.py @@ -69,6 +69,7 @@ setup_info = dict( 'g2p_en', 'g2pM', 'yacs', + 'tensorboardX', ], extras_require={ 'doc': ["sphinx", "sphinx-rtd-theme", "numpydoc"],