PaddlePaddle / models

Officially maintained, supported by PaddlePaddle, including CV, NLP, Speech, Rec, TS, big models and so on.
Apache License 2.0
6.9k stars 2.91k forks source link

[论文复现]BIGGAN SA模块 grad报错 #4794

Open yxhpy opened 4 years ago

yxhpy commented 4 years ago
import paddle.fluid as fluid
import paddle
from paddle.fluid import layers
import paddle.fluid.dygraph as dg
import matplotlib.pyplot as plt
import numpy as np

class SoftMax(dg.Layer):
  def __init__(self, **kwargs):
    super().__init__()
    self.kwargs = kwargs

  def forward(self, x):
    return layers.softmax(x, **self.kwargs)
class SpectralNorm(dg.SpectralNorm):
  def __init__(self, module, weight_name='weight', power_iterations=1, **kwargs):
    weight_shape = getattr(module, weight_name).shape
    if 'dim' not in kwargs:
      if isinstance(module, ( # dg.Conv1D, dg.Conv1DTranspose,
                          dg.Conv2D, dg.Conv2DTranspose,
                          dg.Conv3D, dg.Conv3DTranspose)):
          kwargs['dim'] = 0
      else:
          kwargs['dim'] = 1
    kwargs['power_iters'] = power_iterations
    if 'weight_shape' in kwargs:
      kwargs.pop('weight_shape')
    super().__init__(weight_shape, **kwargs)
    self.weight = getattr(module, weight_name)

    del module._parameters[weight_name]
    self.module = module
    self.weight_name = weight_name

  def forward(self, *args, **kwargs):
    weight_norm = super().forward(self.weight)
    setattr(self.module, self.weight_name, weight_norm)
    out = self.module(*args, **kwargs)
    return out
class SelfAttention(dg.Layer):
  def __init__(self, in_dim, activation=layers.relu):
    super().__init__()
    self.chanel_in = in_dim
    self.activation = activation

    self.theta = SpectralNorm(dg.Conv2D(in_dim, in_dim // 8, 1, bias_attr=False))
    self.phi = SpectralNorm(dg.Conv2D(in_dim, in_dim // 8, 1, bias_attr=False))
    self.pool = dg.Pool2D(2, 'max', 2)
    self.g = SpectralNorm(dg.Conv2D(in_dim, in_dim // 2, 1, bias_attr=False))
    self.o_conv = SpectralNorm(dg.Conv2D(in_dim // 2, in_dim, 1, bias_attr=False))
    self.gamma = self.create_parameter([1,], default_initializer=fluid.initializer.Constant(0.0))

    self.softmax = SoftMax(axis=-1)

  def forward(self, x):
    m_batchsize, C, width, height = x.shape
    N = height * width

    theta = self.theta(x)
    phi = self.phi(x)
    phi = self.pool(phi)
    phi = layers.reshape(phi,(m_batchsize, -1, N // 4))
    theta = layers.reshape(theta,(m_batchsize, -1, N))
    theta = layers.transpose(theta,(0, 2, 1))
    attention = self.softmax(layers.bmm(theta, phi))
    g = layers.reshape(self.pool(self.g(x)),(m_batchsize, -1, N // 4))
    attn_g = layers.reshape(layers.bmm(g, layers.transpose(attention,(0, 2, 1))),(m_batchsize, -1, width, height))
    out = self.o_conv(attn_g)
    return self.gamma * out + x
with fluid.dygraph.guard():
    x = fluid.layers.uniform_random(shape=(10, 1024, 28 ,28))
    x = fluid.dygraph.to_variable(x)
    x.stop_gradient=False
    y = SelfAttention(in_dim=1024)(x)
    grads = fluid.dygraph.grad(y, [x])[0]
    print(grads)

---------------------------------------------------------------------------EnforceNotMet                             Traceback (most recent call last)<ipython-input-6-eb997cf843ba> in <module>
      4     x.stop_gradient=False
      5     y = SelfAttention(in_dim=1024)(x)
----> 6     grads = fluid.dygraph.grad(y, [x])[0]
      7     print(grads)
</opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/decorator.py:decorator-gen-153> in grad(outputs, inputs, grad_outputs, retain_graph, create_graph, only_inputs, allow_unused, no_grad_vars, backward_strategy)
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/wrapped_decorator.py in __impl__(func, *args, **kwargs)
     23     def __impl__(func, *args, **kwargs):
     24         wrapped_func = decorator_func(func)
---> 25         return wrapped_func(*args, **kwargs)
     26 
     27     return __impl__
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/framework.py in __impl__(*args, **kwargs)
    214         assert in_dygraph_mode(
    215         ), "We Only support %s in imperative mode, please use fluid.dygraph.guard() as context to run it in imperative Mode" % func.__name__
--> 216         return func(*args, **kwargs)
    217 
    218     return __impl__
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/base.py in grad(outputs, inputs, grad_outputs, retain_graph, create_graph, only_inputs, allow_unused, no_grad_vars, backward_strategy)
    487     return core.dygraph_partial_grad(
    488         inputs, outputs, grad_outputs, no_grad_vars, place, backward_strategy,
--> 489         create_graph, retain_graph, allow_unused, only_inputs)
    490 
    491 
EnforceNotMet: 

--------------------------------------------
C++ Call Stacks (More useful to developers):
--------------------------------------------
0   std::string paddle::platform::GetTraceBackString<std::string const&>(std::string const&, char const*, int)
1   paddle::platform::EnforceNotMet::EnforceNotMet(std::string const&, char const*, int)
2   paddle::imperative::ReadyGradVarInfoMap::GetTarget(paddle::imperative::VariableWrapper const*) const
3   paddle::imperative::PartialGradTask::CreateResult()
4   paddle::imperative::PartialGradTask::Run()
5   paddle::imperative::PartialGradEngine::Execute()

----------------------
Error Message Summary:
----------------------
PermissionDeniedError: Target var tmp_0@GRAD should not be nullptr
  [Hint: iter->second should not be null.] at (/paddle/paddle/fluid/imperative/partial_grad_engine.cc:501)
saxon-zh commented 4 years ago

您好,请稍等,我们的 @shippingwang 跟进看一下。

wzzju commented 4 years ago

您好!我测试了下这个问题会在Paddle 1.8.3版本中出现,但是在目前的develop分支中该问题已经修复。目前有两种解决方案:

  1. 等PaddlePaddle1.8.4版本发布。
  2. 自己动手从源码编译Paddle,详细参考 https://www.paddlepaddle.org.cn/documentation/docs/zh/install/compile/compile_Ubuntu.html
yxhpy commented 4 years ago

您好!我测试了下一个问题会在Paddle 1.8.3版本中出现,但是在当前的开发分支中该问题已经修复。目前有两种解决方案:

  1. 等PaddlePaddle1.8.4版本发布。
  2. 自己动手从二进制编译Paddle,详细参考https://www.paddlepaddle.org.cn/documentation/docs/zh/install/compile/compile_Ubuntu.html。

好的,我试试第二种方案吧