From 7e049a774463672b214f10b288bffe157ee2062d Mon Sep 17 00:00:00 2001 From: chenfeiyu Date: Fri, 25 Jun 2021 01:01:30 +0800 Subject: [PATCH] add expanuser for preprocess, updater backward testing for pwg --- examples/parallelwave_gan/baker/preprocess.py | 4 +- tests/test_pwg.py | 54 +++++++++++++------ 2 files changed, 40 insertions(+), 18 deletions(-) diff --git a/examples/parallelwave_gan/baker/preprocess.py b/examples/parallelwave_gan/baker/preprocess.py index 09f2004..3ae461b 100644 --- a/examples/parallelwave_gan/baker/preprocess.py +++ b/examples/parallelwave_gan/baker/preprocess.py @@ -232,8 +232,8 @@ def main(): print(vars(args)) print(C) - root_dir = Path(args.rootdir) - dumpdir = Path(args.dumpdir) + root_dir = Path(args.rootdir).expanduser() + dumpdir = Path(args.dumpdir).expanduser() dumpdir.mkdir(parents=True, exist_ok=True) wav_files = sorted(list((root_dir / "Wave").rglob("*.wav"))) diff --git a/tests/test_pwg.py b/tests/test_pwg.py index 41027b5..0978714 100644 --- a/tests/test_pwg.py +++ b/tests/test_pwg.py @@ -71,8 +71,8 @@ def test_convin_upsample_net(): print(out2.data.cpu().numpy()[0]) print("backward check") - print(net.conv_in.weight.numpy()[0]) - print(net2.conv_in.weight.data.cpu().numpy()[0]) + print(net.conv_in.weight.grad.numpy()[0]) + print(net2.conv_in.weight.grad.data.cpu().numpy()[0]) def test_residual_block(): @@ -87,23 +87,40 @@ def test_residual_block(): c = paddle.randn([4, 80, 180]) res, skip = net(x, c) res2, skip2 = net2(torch.as_tensor(x.numpy()), torch.as_tensor(c.numpy())) + + print("forward:") print(res.numpy()[0]) print(res2.data.cpu().numpy()[0]) print(skip.numpy()[0]) print(skip2.data.cpu().numpy()[0]) + (res.sum() + skip.sum()).backward() + (res2.sum() + skip2.sum()).backward() + + print("backward:") + print(net.conv.weight.grad.numpy().squeeze()[0]) + print(net2.conv.weight.grad.data.cpu().numpy().squeeze()[0]) + def test_pwg_generator(): net = PWGGenerator( + layers=9, + stacks=3, + upsample_scales=[4, 4, 4, 4], nonlinear_activation="LeakyReLU", - nonlinear_activation_params={"negative_slope": 0.2}) - net2 = pwgan.ParallelWaveGANGenerator(upsample_params={ - "upsample_scales": [4, 4, 4, 4], - "nonlinear_activation": "LeakyReLU", - "nonlinear_activation_params": { - "negative_slope": 0.2 - } - }).to(device) + nonlinear_activation_params={"negative_slope": 0.5}, + use_weight_norm=True) + net2 = pwgan.ParallelWaveGANGenerator( + layers=9, + stacks=3, + upsample_params={ + "upsample_scales": [4, 4, 4, 4], + "nonlinear_activation": "LeakyReLU", + "nonlinear_activation_params": { + "negative_slope": 0.5 + } + }, + use_weight_norm=True).to(device) summary(net) summary(net2) for k, v in net2.named_parameters(): @@ -112,8 +129,8 @@ def test_pwg_generator(): p.set_value(v.data.cpu().numpy().reshape([-1])) else: p.set_value(v.data.cpu().numpy()) - x = paddle.randn([4, 1, 180 * 256]) - c = paddle.randn([4, 80, 180 + 4]) + x = paddle.randn([4, 1, 80 * 256]) + c = paddle.randn([4, 80, 80 + 4]) synchronize() with timer(unit='s') as t: @@ -147,8 +164,13 @@ def test_pwg_generator(): print(out2.data.cpu().numpy()[0]) print("test backward:") - print(net.first_conv.weight.numpy()[0]) - print(net2.first_conv.weight.data.cpu().numpy()[0]) + print("wv") + print(net.first_conv.weight_v.grad.numpy().squeeze()) + print(net2.first_conv.weight_v.grad.data.cpu().numpy().squeeze()) + + print("wg") + print(net.first_conv.weight_g.grad.numpy().squeeze()) + print(net2.first_conv.weight_g.grad.data.cpu().numpy().squeeze()) # print(out.shape) @@ -195,8 +217,8 @@ def test_pwg_discriminator(): print(y2.data.cpu().numpy()[0]) print("test backward:") - print(net.conv_layers[0].weight.numpy()[0]) - print(net2.conv_layers[0].weight.data.cpu().numpy()[0]) + print(net.conv_layers[0].weight_v.grad.numpy().squeeze()) + print(net2.conv_layers[0].weight_v.grad.data.cpu().numpy().squeeze()) def test_residual_pwg_discriminator():