HIPS / autograd

Efficiently computes derivatives of NumPy code.
MIT License
6.94k stars 905 forks source link

np.meshgrid Support #457

Open anhhuyalex opened 5 years ago

anhhuyalex commented 5 years ago

I am trying to run

from __future__ import absolute_import
from __future__ import print_function
import matplotlib.pyplot as plt

import autograd
import autograd.numpy as np
import autograd.numpy.random as npr
import autograd.scipy.stats.multivariate_normal as mvn
import autograd.scipy.stats.norm as norm

from autograd import grad
from autograd.misc.optimizers import adam

def f(x, t):
    X, Y = np.meshgrid(x, x)
    return np.exp(X)
gradient = grad(f)
par = np.array([1,2,3,4], dtype = float)
adam(gradient, par, step_size=0.1, num_iters=10)

Running into the error

KeyError                                  Traceback (most recent call last)
<ipython-input-11-f3de8583064a> in <module>
      1 par = np.array([1,2,3,4], dtype = float)
----> 2 adam(gradient, par, step_size=0.1, num_iters=10)

/anaconda3/envs/stan_env/lib/python3.6/site-packages/autograd/misc/optimizers.py in _optimize(grad, x0, callback, *args, **kwargs)
     26         else:
     27             _callback = None
---> 28         return unflatten(optimize(_grad, _x0, _callback, *args, **kwargs))
     29 
     30     return _optimize

/anaconda3/envs/stan_env/lib/python3.6/site-packages/autograd/misc/optimizers.py in adam(grad, x, callback, num_iters, step_size, b1, b2, eps)
     62     v = np.zeros(len(x))
     63     for i in range(num_iters):
---> 64         g = grad(x, i)
     65         if callback: callback(x, i, g)
     66         m = (1 - b1) * g      + b1 * m  # First  moment estimate.

/anaconda3/envs/stan_env/lib/python3.6/site-packages/autograd/misc/optimizers.py in <lambda>(x, i)
     21     def _optimize(grad, x0, callback=None, *args, **kwargs):
     22         _x0, unflatten = flatten(x0)
---> 23         _grad = lambda x, i: flatten(grad(unflatten(x), i))[0]
     24         if callback:
     25             _callback = lambda x, i, g: callback(unflatten(x), i, unflatten(g))

/anaconda3/envs/stan_env/lib/python3.6/site-packages/autograd/wrap_util.py in nary_f(*args, **kwargs)
     18             else:
     19                 x = tuple(args[i] for i in argnum)
---> 20             return unary_operator(unary_f, x, *nary_op_args, **nary_op_kwargs)
     21         return nary_f
     22     return nary_operator

/anaconda3/envs/stan_env/lib/python3.6/site-packages/autograd/differential_operators.py in grad(fun, x)
     22     arguments as `fun`, but returns the gradient instead. The function `fun`
     23     should be scalar-valued. The gradient has the same type as the argument."""
---> 24     vjp, ans = _make_vjp(fun, x)
     25     if not vspace(ans).size == 1:
     26         raise TypeError("Grad only applies to real scalar-output functions. "

/anaconda3/envs/stan_env/lib/python3.6/site-packages/autograd/core.py in make_vjp(fun, x)
      8 def make_vjp(fun, x):
      9     start_node = VJPNode.new_root(x)
---> 10     end_value, end_node =  trace(start_node, fun, x)
     11     if end_node is None:
     12         def vjp(g): return vspace(x).zeros()

/anaconda3/envs/stan_env/lib/python3.6/site-packages/autograd/tracer.py in trace(start_node, fun, x)
      8     with trace_stack.new_trace() as t:
      9         start_box = new_box(x, t, start_node)
---> 10         end_box = fun(start_box)
     11         if isbox(end_box) and end_box._trace == start_box._trace:
     12             return end_box._value, end_box._node

/anaconda3/envs/stan_env/lib/python3.6/site-packages/autograd/wrap_util.py in unary_f(x)
     13                 else:
     14                     subargs = subvals(args, zip(argnum, x))
---> 15                 return fun(*subargs, **kwargs)
     16             if isinstance(argnum, int):
     17                 x = args[argnum]

<ipython-input-9-edaeb31e00b9> in f(x, t)
      1 def f(x, t):
----> 2     X, Y = np.meshgrid(x, x)
      3     return np.exp(X)

/anaconda3/envs/stan_env/lib/python3.6/site-packages/autograd/tracer.py in f_wrapped(*args, **kwargs)
     43             argnums = tuple(argnum    for argnum, _   in boxed_args)
     44             ans = f_wrapped(*argvals, **kwargs)
---> 45             node = node_constructor(ans, f_wrapped, argvals, kwargs, argnums, parents)
     46             return new_box(ans, trace, node)
     47         else:

/anaconda3/envs/stan_env/lib/python3.6/site-packages/autograd/core.py in __init__(self, value, fun, args, kwargs, parent_argnums, parents)
     28     def __init__(self, value, fun, args, kwargs, parent_argnums, parents):
     29         self.parents = parents
---> 30         self.vjp = primitive_vjps[fun](parent_argnums, value, args, kwargs)
     31 
     32     def initialize_root(self, value):

KeyError: <function primitive.<locals>.f_wrapped at 0x1123052f0>

It seems to me like I'm doing reasonable things so I was just wondering whether this is an as yet unsupported feature or if what I'm doing doesn't make sense.

Thank you in advance.

duvenaud commented 5 years ago

We never implemented the gradient of np.meshgrid, unfortunately. I think it wouldn't be too hard. However normally a missing gradient should give a clear error message, I'm not sure what went wrong here.