2020-02-11 16:56:28 +08:00
|
|
|
import paddle.fluid.dygraph as dg
|
|
|
|
import paddle.fluid.layers as layers
|
|
|
|
import paddle.fluid as fluid
|
|
|
|
import math
|
|
|
|
from parakeet.modules.customized import Conv1D
|
|
|
|
|
|
|
|
|
|
|
|
class PositionwiseFeedForward(dg.Layer):
|
|
|
|
''' A two-feed-forward-layer module '''
|
|
|
|
def __init__(self, d_in, num_hidden, filter_size, padding=0, use_cudnn=True, dropout=0.1):
|
|
|
|
super(PositionwiseFeedForward, self).__init__()
|
|
|
|
self.num_hidden = num_hidden
|
|
|
|
self.use_cudnn = use_cudnn
|
|
|
|
self.dropout = dropout
|
|
|
|
|
|
|
|
k = math.sqrt(1 / d_in)
|
2020-02-12 16:51:32 +08:00
|
|
|
self.w_1 = Conv1D(num_channels = d_in,
|
|
|
|
num_filters = num_hidden,
|
2020-02-11 16:56:28 +08:00
|
|
|
filter_size = filter_size,
|
|
|
|
padding=padding,
|
|
|
|
param_attr = fluid.ParamAttr(initializer=fluid.initializer.XavierInitializer()),
|
|
|
|
bias_attr = fluid.ParamAttr(initializer=fluid.initializer.Uniform(low=-k, high=k)),
|
2020-02-12 16:51:32 +08:00
|
|
|
use_cudnn = use_cudnn)
|
2020-02-11 16:56:28 +08:00
|
|
|
k = math.sqrt(1 / num_hidden)
|
2020-02-12 16:51:32 +08:00
|
|
|
self.w_2 = Conv1D(num_channels = num_hidden,
|
|
|
|
num_filters = d_in,
|
2020-02-11 16:56:28 +08:00
|
|
|
filter_size = filter_size,
|
|
|
|
padding=padding,
|
|
|
|
param_attr = fluid.ParamAttr(initializer=fluid.initializer.XavierInitializer()),
|
|
|
|
bias_attr = fluid.ParamAttr(initializer=fluid.initializer.Uniform(low=-k, high=k)),
|
2020-02-12 16:51:32 +08:00
|
|
|
use_cudnn = use_cudnn)
|
2020-02-11 16:56:28 +08:00
|
|
|
self.layer_norm = dg.LayerNorm(d_in)
|
|
|
|
|
|
|
|
def forward(self, input):
|
|
|
|
"""
|
|
|
|
Feed Forward Network.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
input (Variable): Shape(B, T, C), dtype: float32. The input value.
|
|
|
|
Returns:
|
|
|
|
output (Variable), Shape(B, T, C), the result after FFN.
|
|
|
|
"""
|
2020-02-12 16:51:32 +08:00
|
|
|
x = layers.transpose(input, [0,2,1])
|
2020-02-11 16:56:28 +08:00
|
|
|
#FFN Networt
|
2020-02-12 16:51:32 +08:00
|
|
|
x = self.w_2(layers.relu(self.w_1(x)))
|
2020-02-11 16:56:28 +08:00
|
|
|
|
|
|
|
# dropout
|
|
|
|
x = layers.dropout(x, self.dropout)
|
|
|
|
|
2020-02-12 16:51:32 +08:00
|
|
|
x = layers.transpose(x, [0,2,1])
|
2020-02-11 16:56:28 +08:00
|
|
|
# residual connection
|
|
|
|
x = x + input
|
|
|
|
|
|
|
|
#layer normalization
|
|
|
|
output = self.layer_norm(x)
|
|
|
|
|
|
|
|
return output
|