1. fix typos;
2. add tensorboardX into install requirements.
This commit is contained in:
parent
aa205fd7bb
commit
f31643b33c
|
@ -24,7 +24,7 @@ def fold(x, n_group):
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
---------
|
---------
|
||||||
Tensor : [shape=(`*, time_steps // n_group, group)]
|
Tensor : [shape=(\*, time_steps // n_group, group)]
|
||||||
Folded tensor.
|
Folded tensor.
|
||||||
"""
|
"""
|
||||||
*spatial_shape, time_steps = x.shape
|
*spatial_shape, time_steps = x.shape
|
||||||
|
@ -230,7 +230,7 @@ class ResidualBlock(nn.Layer):
|
||||||
res : Tensor [shape=(batch_size, channel, 1, width)]
|
res : Tensor [shape=(batch_size, channel, 1, width)]
|
||||||
A row of the the residual output.
|
A row of the the residual output.
|
||||||
|
|
||||||
res : Tensor [shape=(batch_size, channel, 1, width)]
|
skip : Tensor [shape=(batch_size, channel, 1, width)]
|
||||||
A row of the skip output.
|
A row of the skip output.
|
||||||
"""
|
"""
|
||||||
x_row_in = x_row
|
x_row_in = x_row
|
||||||
|
@ -349,7 +349,7 @@ class ResidualNet(nn.LayerList):
|
||||||
res : Tensor [shape=(batch_size, channel, 1, width)]
|
res : Tensor [shape=(batch_size, channel, 1, width)]
|
||||||
A row of the the residual output.
|
A row of the the residual output.
|
||||||
|
|
||||||
res : Tensor [shape=(batch_size, channel, 1, width)]
|
skip : Tensor [shape=(batch_size, channel, 1, width)]
|
||||||
A row of the skip output.
|
A row of the skip output.
|
||||||
"""
|
"""
|
||||||
skip_connections = []
|
skip_connections = []
|
||||||
|
@ -364,8 +364,8 @@ class Flow(nn.Layer):
|
||||||
"""A bijection (Reversable layer) that transform a density of latent
|
"""A bijection (Reversable layer) that transform a density of latent
|
||||||
variables p(Z) into a complex data distribution p(X).
|
variables p(Z) into a complex data distribution p(X).
|
||||||
|
|
||||||
It's an auto regressive flow. The `forward` method implements the
|
It's an auto regressive flow. The ``forward`` method implements the
|
||||||
probability density estimation. The `inverse` method implements the
|
probability density estimation. The ``inverse`` method implements the
|
||||||
sampling.
|
sampling.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
|
|
|
@ -350,7 +350,7 @@ class ResidualNet(nn.LayerList):
|
||||||
|
|
||||||
def start_sequence(self):
|
def start_sequence(self):
|
||||||
"""Prepare the ResidualNet to generate a new sequence. This method
|
"""Prepare the ResidualNet to generate a new sequence. This method
|
||||||
should be called before starting calling `add_input` multiple times.
|
should be called before starting calling ``add_input`` multiple times.
|
||||||
"""
|
"""
|
||||||
for block in self:
|
for block in self:
|
||||||
block.start_sequence()
|
block.start_sequence()
|
||||||
|
@ -372,7 +372,7 @@ class ResidualNet(nn.LayerList):
|
||||||
Returns
|
Returns
|
||||||
----------
|
----------
|
||||||
Tensor [shape=(B, C)]
|
Tensor [shape=(B, C)]
|
||||||
T he skip connection for a step. This output is accumulated with
|
The skip connection for a step. This output is accumulated with
|
||||||
that of other ResidualBlocks.
|
that of other ResidualBlocks.
|
||||||
"""
|
"""
|
||||||
for i, func in enumerate(self):
|
for i, func in enumerate(self):
|
||||||
|
@ -514,7 +514,7 @@ class WaveNet(nn.Layer):
|
||||||
Returns
|
Returns
|
||||||
--------
|
--------
|
||||||
Tensor: [shape=(B, C_output)]
|
Tensor: [shape=(B, C_output)]
|
||||||
A steo of the parameters of the output distributions.
|
A step of the parameters of the output distributions.
|
||||||
"""
|
"""
|
||||||
# Causal Conv
|
# Causal Conv
|
||||||
if self.loss_type == "softmax":
|
if self.loss_type == "softmax":
|
||||||
|
@ -714,7 +714,7 @@ class WaveNet(nn.Layer):
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
y : Tensor [shape=(B, T, C_output)]
|
y : Tensor [shape=(B, T, C_output)]
|
||||||
The parameterd of the output distribution.
|
The parameters of the output distribution.
|
||||||
|
|
||||||
t : Tensor [shape=(B, T)]
|
t : Tensor [shape=(B, T)]
|
||||||
The target audio.
|
The target audio.
|
||||||
|
|
Loading…
Reference in New Issue