Closed giiyms closed 1 year ago
Longer error message:
C:\envs\nutpie_debug\lib\site-packages\pymc\util.py:501: FutureWarning: The tag attribute observations is deprecated. Use model.rvs_to_values[rv] instead
warnings.warn(
Backend TkAgg is interactive backend. Turning interactive mode on.
Traceback (most recent call last):
File "C:\envs\nutpie_debug\lib\runpy.py", line 196, in _run_module_as_main
return _run_code(code, main_globals, None,
File "C:\envs\nutpie_debug\lib\runpy.py", line 86, in _run_code
exec(code, run_globals)
File "c:\.vscode\extensions\ms-python.python-2023.2.0\pythonFiles\lib\python\debugpy\__main__.py", line 39, in
<module>
cli.main()
File "c:\.vscode\extensions\ms-python.python-2023.2.0\pythonFiles\lib\python\debugpy/..\debugpy\server\cli.py", line 430, in main
run()
File "c:\.vscode\extensions\ms-python.python-2023.2.0\pythonFiles\lib\python\debugpy/..\debugpy\server\cli.py", line 284, in run_file
runpy.run_path(target, run_name="__main__")
File "c:\.vscode\extensions\ms-python.python-2023.2.0\pythonFiles\lib\python\debugpy\_vendored\pydevd\_pydevd_bundle\pydevd_runpy.py", line 321, in run_path
return _run_module_code(code, init_globals, run_name,
File "c:\.vscode\extensions\ms-python.python-2023.2.0\pythonFiles\lib\python\debugpy\_vendored\pydevd\_pydevd_bundle\pydevd_runpy.py", line 135, in _run_module_code
_run_code(code, mod_globals, init_globals,
File "c:\.vscode\extensions\ms-python.python-2023.2.0\pythonFiles\lib\python\debugpy\_vendored\pydevd\_pydevd_bundle\pydevd_runpy.py", line 124, in _run_code
exec(code, run_globals)
File "C:\Dev\python\trame-htbctool\nutpie_bug_example.py", line 57, in <module>
compiled_model = nutpie.compile_pymc_model(model)
File "C:\envs\nutpie_debug\lib\site-packages\nutpie\compile_pymc.py", line 121, in compile_pymc_model
logp_numba = numba.cfunc(c_sig, **kwargs)(logp_numba_raw)
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\decorators.py", line 282, in wrapper
res.compile()
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\compiler_lock.py", line 35, in _acquire_compile_lock
return func(*args, **kwargs)
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\ccallback.py", line 67, in compile
cres = self._compile_uncached()
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\ccallback.py", line 81, in _compile_uncached
return self._compiler.compile(sig.args, sig.return_type)
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\dispatcher.py", line 129, in compile
raise retval
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\dispatcher.py", line 139, in _compile_cached
retval = self._compile_core(args, return_type)
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\dispatcher.py", line 152, in _compile_core
cres = compiler.compile_extra(self.targetdescr.typing_context,
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\compiler.py", line 716, in compile_extra
return pipeline.compile_extra(func)
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\compiler.py", line 452, in compile_extra
return self._compile_bytecode()
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\compiler.py", line 520, in _compile_bytecode
return self._compile_core()
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\compiler.py", line 499, in _compile_core
raise e
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\compiler.py", line 486, in _compile_core
pm.run(self.state)
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\compiler_machinery.py", line 368, in run
raise patched_exception
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\compiler_machinery.py", line 356, in run
self._runPass(idx, pass_inst, state)
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\compiler_lock.py", line 35, in _acquire_compile_lock
return func(*args, **kwargs)
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\compiler_machinery.py", line 311, in _runPass
mutated |= check(pss.run_pass, internal_state)
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\compiler_machinery.py", line 273, in check
mangled = func(compiler_state)
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\typed_passes.py", line 105, in run_pass
typemap, return_type, calltypes, errs = type_inference_stage(
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\typed_passes.py", line 83, in type_inference_stage
errs = infer.propagate(raise_errors=raise_errors)
File "C:\envs\nutpie_debug\lib\site-packages\numba\core\typeinfer.py", line 1086, in propagate
raise errors[0]
numba.core.errors.TypingError: Failed in nopython mode pipeline (step: nopython frontend)
Failed in nopython mode pipeline (step: nopython frontend)
No implementation of function Function(<function numba_funcify_Elemwise.<locals>.elemwise at 0x0000022D579439A0>) found for signature:
>>> elemwise(readonly array(float64, 0d, C), float64, readonly array(float64, 0d, C), float64, readonly array(float64, 0d, C), readonly array(float64, 0d, C), readonly array(float32, 0d, C), readonly array(float64, 0d, C), float64, array(float64, 0d,
C))
There are 2 candidate implementations:
- Of which 2 did not match due to:
Overload in function 'numba_funcify_Elemwise.<locals>.ov_elemwise': File: pytensor\link\numba\dispatch\elemwise.py: Line 687.
With argument(s): '(readonly array(float64, 0d, C), float64, readonly array(float64, 0d, C), float64, readonly array(float64, 0d, C), readonly array(float64, 0d, C), readonly array(float32, 0d, C), readonly array(float64, 0d, C), float64, array(float64, 0d, C))':
Rejected as the implementation raised a specific error:
TypingError: Failed in nopython mode pipeline (step: nopython frontend)
No implementation of function Function(<intrinsic _vectorized>) found for signature:
>>> _vectorized(type(CPUDispatcher(<function numba_funcified_fgraph at 0x0000022D57941E10>)), Literal[str](gASVDgAAAAAAAAAoKSkpKSkpKSkpKXSULg==
), Literal[str](gASVBAAAAAAAAAAphZQu
), Literal[str](gASVDQAAAAAAAACMB2Zsb2F0NjSUhZQu
), Literal[str](gASVCQAAAAAAAABLAEsIhpSFlC4=
), StarArgTuple(readonly array(float64, 0d, C), float64, readonly array(float64, 0d, C), float64, readonly array(float64, 0d, C), readonly array(float64, 0d, C), readonly array(float32, 0d, C), readonly array(float64, 0d, C), float64, array(float64, 0d, C)))
There are 2 candidate implementations:
- Of which 1 did not match due to:
Intrinsic in function '_vectorized': File: pytensor\link\numba\dispatch\elemwise.py: Line 466.
With argument(s): '(type(CPUDispatcher(<function numba_funcified_fgraph at 0x0000022D57941E10>)), unicode_type, unicode_type, unicode_type, unicode_type, StarArgTuple(readonly array(float64, 0d, C), float64, readonly array(float64, 0d, C), float64, readonly array(float64, 0d, C), readonly array(float64, 0d, C), readonly array(float32, 0d, C), readonly array(float64, 0d, C), float64, array(float64, 0d, C)))':
Rejected as the implementation raised a specific error:
TypingError: input_bc_patterns must be literal.
raised from C:\envs\nutpie_debug\lib\site-packages\pytensor\link\numba\dispatch\elemwise.py:486
- Of which 1 did not match due to:
Intrinsic in function '_vectorized': File: pytensor\link\numba\dispatch\elemwise.py: Line 466.
With argument(s): '(type(CPUDispatcher(<function numba_funcified_fgraph at 0x0000022D57941E10>)), Literal[str](gASVDgAAAAAAAAAoKSkpKSkpKSkpKXSULg==
), Literal[str](gASVBAAAAAAAAAAphZQu
), Literal[str](gASVDQAAAAAAAACMB2Zsb2F0NjSUhZQu
), Literal[str](gASVCQAAAAAAAABLAEsIhpSFlC4=
), StarArgTuple(readonly array(float64, 0d, C), float64, readonly array(float64, 0d, C), float64, readonly array(float64, 0d, C), readonly array(float64, 0d, C), readonly array(float32, 0d, C), readonly array(float64, 0d, C), float64, array(float64, 0d, C)))':
Rejected as the implementation raised a specific error:
TypingError: Inputs to elemwise must be arrays.
raised from C:\envs\nutpie_debug\lib\site-packages\pytensor\link\numba\dispatch\elemwise.py:514
During: resolving callee type: Function(<intrinsic _vectorized>)
During: typing of call at C:\envs\nutpie_debug\lib\site-packages\pytensor\link\numba\dispatch\elemwise.py (648)
File "..\..\..\envs\nutpie_debug\lib\site-packages\pytensor\link\numba\dispatch\elemwise.py", line 648:
def elemwise_wrapper(*inputs):
return _vectorized(
^
raised from C:\envs\nutpie_debug\lib\site-packages\numba\core\typeinfer.py:1086
During: resolving callee type: Function(<function numba_funcify_Elemwise.<locals>.elemwise at 0x0000022D579439A0>)
During: typing of call at C:\AppData\Local\Temp\tmpgsgit4yd (445)
File "..\..\..\AppData\Local\Temp\tmpgsgit4yd", line 445:
def numba_funcified_fgraph(_joined_variables):
<source elided>
# Elemwise{Composite{((i0 + Switch(AND(GE(i1, i2), LE(i3, i4)), i5, i6)) - ((i7 * scalar_softplus(i8)) + i9))}}[(0, 8)](TensorConstant{5.991464547107982}, InplaceDimShuffle{}.0, TensorConstant{-200.0}, InplaceDimShuffle{}.0, TensorConstant{200.0},
TensorConstant{-5.991464547107982}, TensorConstant{-inf}, TensorConstant{2.0}, InplaceDimShuffle{}.0, Reshape{0}.0)
tensor_variable_186 = elemwise_102(tensor_constant_31, tensor_variable_126, tensor_constant_32, tensor_variable_126, tensor_constant_33, tensor_constant_34, tensor_constant_25, tensor_constant_35, tensor_variable_91, tensor_variable_18)
^
During: resolving callee type: type(CPUDispatcher(<function numba_funcified_fgraph at 0x0000022D58135BD0>))
During: typing of call at C:\envs\nutpie_debug\lib\site-packages\nutpie\compile_pymc.py (265)
During: resolving callee type: type(CPUDispatcher(<function numba_funcified_fgraph at 0x0000022D58135BD0>))
During: typing of call at C:\envs\nutpie_debug\lib\site-packages\nutpie\compile_pymc.py (265)
File "..\..\..\envs\nutpie_debug\lib\site-packages\nutpie\compile_pymc.py", line 265:
def extract_shared(x, user_data_):
return inner(x)
^
That you for reporting this.
It is indeed a problem in pytensor. Shorter reproducer:
with pm.Model() as model:
pm.Uniform("x", 0., 1., shape=1)
# Fails
func = pytensor.function(model.value_vars, model.logp(), mode="NUMBA")
func(np.zeros(1))
# Works
#func = pytensor.function(model.value_vars, model.logp())
#func(np.zeros(1))
In the meantime you can work around this by defining params
as an array in the first place:
params_arr = pm.Uniform(f"params", -perturbations, perturbations, shape=len(perturbations))
Or if you want to have individual param distributions by defining those as scalars and then stacking them:
params = [
pm.Uniform(f"params{idx}", -param, param)
for idx, param in enumerate(perturbations)
]
params_arr = pm.math.stack(params, axis=0)
Should we open a pytensor issue?
Hi thanks for quick responses. I can't use the shape property as not all input params will be uniform.
The pm.math.stack(params, axis=0) does not work for me.
ValueError: Size length is incompatible with batched dimensions of parameter 1 thermalmodel:
len(size) = 1, len(batched dims thermalmodel) = 2. Size length must be 0 or >= 2
Is there anyway to see debug and see what the current value of params_arr is?
If I do:
params_arr = pm.math.stack(params, axis=0).reshape(-1,1)
This allows the model is start compiling but then I get the same Typing error as before.
@twiecki
Should we open a pytensor issue? I opened a PR already...
@giiyms
That sounds to me like some random variable in params
still has rank 1, (ie you are passing shape=1
). The default is shape=()
, which is different. Could you double check if that's the case?
@aseyboldt you are correct thank you. It works again. Appreciate the help.
Should I close this now or do you want to close it once the pytensor bug is fixed?
Hello,
I am getting a vectorized typing error from numba.
Any ideas how to fix this?
Environment: