Closed MichaelCoulter closed 1 year ago
Hm, this oddly seems like an issue with numba compiling the code. Can you use %debug to make sure the inputs make sense? Otherwise, this might be something to report to spikeinterface.
the spikeinterface folks pointed out i was not using the latest version of numba, so i upgraded that and now get a new error. this looks like it might be related to the recent int / float data type change
Running spike sorting on {'nwb_file_name': 'CH101_20210624_.nwb', 'sort_group_id': 11, 'sort_interval_name': 'raw data valid times', 'preproc_params_name': 'franklab_tetrode_hippocampus', 'team_name': 'mcoulter section', 'sorter': 'clusterless_thresholder', 'sorter_params_name': 'clusterless_fixed_2', 'artifact_removed_interval_list_name': 'CH101_20210624_.nwb_raw data valid times_11_franklab_tetrode_hippocampus_ampl_100_2ms_artifact_removed_valid_times'}...
detect peaks: 0%
0/2683 [00:00<?, ?it/s]
/home/mcoulter/spikeinterface/src/spikeinterface/sortingcomponents/peak_detection.py:442: NumbaWarning:
Compilation is falling back to object mode WITH looplifting enabled because Function _numba_detect_peak_neg failed at nopython mode lowering due to: LLVM IR parsing error
<string>:1455:19: error: invalid cast opcode for cast from 'i16' to 'float'
%".949" = fpext i16 %".890" to float
^
@numba.jit(parallel=False)
20-Apr-23 09:29:48 finding looplift candidates
/home/mcoulter/spikeinterface/src/spikeinterface/sortingcomponents/peak_detection.py:442: NumbaWarning:
Compilation is falling back to object mode WITHOUT looplifting enabled because Function "_numba_detect_peak_neg" failed type inference due to: Cannot determine Numba type of <class 'numba.core.dispatcher.LiftedLoop'>
File "../../spikeinterface/src/spikeinterface/sortingcomponents/peak_detection.py", line 446:
def _numba_detect_peak_neg(traces, traces_center, peak_mask, exclude_sweep_size,
<source elided>
num_chans = traces_center.shape[1]
for chan_ind in range(num_chans):
^
@numba.jit(parallel=False)
/home/mcoulter/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/object_mode_passes.py:151: NumbaWarning: Function "_numba_detect_peak_neg" was compiled in object mode without forceobj=True, but has lifted loops.
File "../../spikeinterface/src/spikeinterface/sortingcomponents/peak_detection.py", line 445:
def _numba_detect_peak_neg(traces, traces_center, peak_mask, exclude_sweep_size,
<source elided>
abs_threholds, peak_sign, neighbours_mask):
num_chans = traces_center.shape[1]
^
warnings.warn(errors.NumbaWarning(warn_msg,
/home/mcoulter/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/object_mode_passes.py:161: NumbaDeprecationWarning:
Fall-back from the nopython compilation path to the object mode compilation path has been detected, this is deprecated behaviour.
For more information visit https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit
File "../../spikeinterface/src/spikeinterface/sortingcomponents/peak_detection.py", line 445:
def _numba_detect_peak_neg(traces, traces_center, peak_mask, exclude_sweep_size,
<source elided>
abs_threholds, peak_sign, neighbours_mask):
num_chans = traces_center.shape[1]
^
warnings.warn(errors.NumbaDeprecationWarning(msg,
/home/mcoulter/spikeinterface/src/spikeinterface/sortingcomponents/peak_detection.py:442: NumbaWarning:
Compilation is falling back to object mode WITHOUT looplifting enabled because Function _numba_detect_peak_neg failed at nopython mode lowering due to: LLVM IR parsing error
<string>:1416:19: error: invalid cast opcode for cast from 'i16' to 'float'
%".930" = fpext i16 %".871" to float
^
@numba.jit(parallel=False)
---------------------------------------------------------------------------
CompilerError Traceback (most recent call last)
Cell In [3], line 54
52 position_info_param_name = "default_decoding"
53 # want to skip tet 100 and 101,
---> 54 populate_spike_threshold(recording_keys[2:3],
55 mark_param_name=mark_param_name,
56 position_info_param_name=position_info_param_name )
Cell In [3], line 14, in populate_spike_threshold(spikesorting_selection_keys, mark_param_name, position_info_param_name)
1 def populate_spike_threshold(
2 spikesorting_selection_keys: list,
3 mark_param_name='default',
(...)
8 # either do spike sorting or marks - not both
9 ## Populate spike sorting
10 SpikeSortingSelection().insert(
11 spikesorting_selection_keys,
12 skip_duplicates=True,
13 )
---> 14 SpikeSorting.populate(spikesorting_selection_keys)
16 ## Skip any curation
17 curation_keys = [Curation.insert_curation(
18 key) for key in spikesorting_selection_keys]
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/datajoint/autopopulate.py:230, in AutoPopulate.populate(self, suppress_errors, return_exception_objects, reserve_jobs, order, limit, max_calls, display_progress, processes, make_kwargs, *restrictions)
226 if processes == 1:
227 for key in (
228 tqdm(keys, desc=self.__class__.__name__) if display_progress else keys
229 ):
--> 230 error = self._populate1(key, jobs, **populate_kwargs)
231 if error is not None:
232 error_list.append(error)
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/datajoint/autopopulate.py:281, in AutoPopulate._populate1(self, key, jobs, suppress_errors, return_exception_objects, make_kwargs)
279 self.__class__._allow_insert = True
280 try:
--> 281 make(dict(key), **(make_kwargs or {}))
282 except (KeyboardInterrupt, SystemExit, Exception) as error:
283 try:
File ~/spyglass/src/spyglass/spikesorting/spikesorting_sorting.py:203, in SpikeSorting.make(self, key)
200 sorter_params.pop("whiten", None)
202 # Detect peaks for clusterless decoding
--> 203 detected_spikes = detect_peaks(recording, **sorter_params)
204 sorting = si.NumpySorting.from_times_labels(
205 times_list=detected_spikes["sample_ind"],
206 labels_list=np.zeros(len(detected_spikes), dtype=np.int),
207 sampling_frequency=recording.get_sampling_frequency(),
208 )
209 else:
File ~/spikeinterface/src/spikeinterface/sortingcomponents/peak_detection.py:103, in detect_peaks(recording, method, pipeline_nodes, gather_mode, folder, names, **kwargs)
99 init_args = (recording, method, method_args, extra_margin, pipeline_nodes)
100 processor = ChunkRecordingExecutor(recording, func, init_func, init_args,
101 gather_func=gather_func, job_name='detect peaks',
102 mp_context=mp_context, **job_kwargs)
--> 103 processor.run()
105 outs = gather_func.finalize_buffers(squeeze_output=squeeze_output)
106 return outs
File ~/spikeinterface/src/spikeinterface/core/job_tools.py:344, in ChunkRecordingExecutor.run(self)
342 worker_ctx = self.init_func(*self.init_args)
343 for segment_index, frame_start, frame_stop in all_chunks:
--> 344 res = self.func(segment_index, frame_start, frame_stop, worker_ctx)
345 if self.handle_returns:
346 returns.append(res)
File ~/spikeinterface/src/spikeinterface/sortingcomponents/peak_detection.py:154, in _detect_peaks_chunk(segment_index, start_frame, end_frame, worker_ctx)
151 trace_detection = traces
153 # TODO: handle waveform returns
--> 154 peak_sample_ind, peak_chan_ind = method_class.detect_peaks(trace_detection, *method_args)
156 if extra_margin > 0:
157 peak_sample_ind += extra_margin
File ~/spikeinterface/src/spikeinterface/sortingcomponents/peak_detection.py:357, in DetectPeakLocallyExclusive.detect_peaks(cls, traces, peak_sign, abs_threholds, exclude_sweep_size, neighbours_mask)
354 peak_mask_pos = peak_mask.copy()
356 peak_mask = traces_center < -abs_threholds[None, :]
--> 357 peak_mask = _numba_detect_peak_neg(traces, traces_center, peak_mask, exclude_sweep_size,
358 abs_threholds, peak_sign, neighbours_mask)
360 if peak_sign == 'both':
361 peak_mask = peak_mask | peak_mask_pos
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/dispatcher.py:487, in _DispatcherBase._compile_for_args(self, *args, **kws)
485 e.patch_message('\n'.join((str(e).rstrip(), help_msg)))
486 # ignore the FULL_TRACEBACKS config, this needs reporting!
--> 487 raise e
488 finally:
489 self._types_active_call = []
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/dispatcher.py:420, in _DispatcherBase._compile_for_args(self, *args, **kws)
418 return_val = None
419 try:
--> 420 return_val = self.compile(tuple(argtypes))
421 except errors.ForceLiteralArg as e:
422 # Received request for compiler re-entry with the list of arguments
423 # indicated by e.requested_args.
424 # First, check if any of these args are already Literal-ized
425 already_lit_pos = [i for i in e.requested_args
426 if isinstance(args[i], types.Literal)]
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/dispatcher.py:1197, in LiftedCode.compile(self, sig)
1191 ev_details = dict(
1192 dispatcher=self,
1193 args=args,
1194 return_type=return_type,
1195 )
1196 with ev.trigger_event("numba:compile", data=ev_details):
-> 1197 cres = compiler.compile_ir(typingctx=self.typingctx,
1198 targetctx=self.targetctx,
1199 func_ir=cloned_func_ir,
1200 args=args,
1201 return_type=return_type,
1202 flags=flags, locals=self.locals,
1203 lifted=(),
1204 lifted_from=self.lifted_from,
1205 is_lifted_loop=True,)
1207 # Check typing error if object mode is used
1208 if (cres.typing_error is not None and
1209 not flags.enable_pyobject):
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/compiler.py:754, in compile_ir(typingctx, targetctx, func_ir, args, return_type, flags, locals, lifted, lifted_from, is_lifted_loop, library, pipeline_class)
750 return pipeline.compile_ir(func_ir=the_ir, lifted=lifted,
751 lifted_from=lifted_from)
753 # compile with rewrites off, IR shouldn't be mutated irreparably
--> 754 norw_cres = compile_local(func_ir.copy(), norw_flags)
756 # try and compile with rewrites on if no_rewrites was not set in the
757 # original flags, IR might get broken but we've got a CompileResult
758 # that's usable from above.
759 rw_cres = None
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/compiler.py:750, in compile_ir.<locals>.compile_local(the_ir, the_flags)
747 def compile_local(the_ir, the_flags):
748 pipeline = pipeline_class(typingctx, targetctx, library,
749 args, return_type, the_flags, locals)
--> 750 return pipeline.compile_ir(func_ir=the_ir, lifted=lifted,
751 lifted_from=lifted_from)
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/compiler.py:462, in CompilerBase.compile_ir(self, func_ir, lifted, lifted_from)
459 self.state.nargs = self.state.func_ir.arg_count
461 FixupArgs().run_pass(self.state)
--> 462 return self._compile_ir()
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/compiler.py:527, in CompilerBase._compile_ir(self)
523 """
524 Populate and run pipeline for IR input
525 """
526 assert self.state.func_ir is not None
--> 527 return self._compile_core()
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/compiler.py:499, in CompilerBase._compile_core(self)
497 self.state.status.fail_reason = e
498 if is_final_pipeline:
--> 499 raise e
500 else:
501 raise CompilerError("All available pipelines exhausted")
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/compiler.py:486, in CompilerBase._compile_core(self)
484 res = None
485 try:
--> 486 pm.run(self.state)
487 if self.state.cr is not None:
488 break
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/compiler_machinery.py:368, in PassManager.run(self, state)
365 msg = "Failed in %s mode pipeline (step: %s)" % \
366 (self.pipeline_name, pass_desc)
367 patched_exception = self._patch_error(msg, e)
--> 368 raise patched_exception
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/compiler_machinery.py:356, in PassManager.run(self, state)
354 pass_inst = _pass_registry.get(pss).pass_inst
355 if isinstance(pass_inst, CompilerPass):
--> 356 self._runPass(idx, pass_inst, state)
357 else:
358 raise BaseException("Legacy pass in use")
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/compiler_lock.py:35, in _CompilerLock.__call__.<locals>._acquire_compile_lock(*args, **kwargs)
32 @functools.wraps(func)
33 def _acquire_compile_lock(*args, **kwargs):
34 with self:
---> 35 return func(*args, **kwargs)
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/compiler_machinery.py:318, in PassManager._runPass(self, index, pss, internal_state)
315 # Check that if the pass is an instance of a FunctionPass that it hasn't
316 # emitted ir.Dels.
317 if isinstance(pss, FunctionPass):
--> 318 enforce_no_dels(internal_state.func_ir)
320 if self._ENFORCING:
321 # TODO: Add in self consistency enforcement for
322 # `func_ir._definitions` etc
323 if _pass_registry.get(pss.__class__).mutates_CFG:
File ~/anaconda3/envs/spyglass3/lib/python3.9/site-packages/numba/core/ir_utils.py:2194, in enforce_no_dels(func_ir)
2192 if dels:
2193 msg = "Illegal IR, del found at: %s" % dels[0]
-> 2194 raise CompilerError(msg, loc=dels[0].loc)
CompilerError: Failed in object mode pipeline (step: remove phis nodes)
Illegal IR, del found at: del $66for_iter.3
File "../../spikeinterface/src/spikeinterface/sortingcomponents/peak_detection.py", line 450:
def _numba_detect_peak_neg(traces, traces_center, peak_mask, exclude_sweep_size,
<source elided>
continue
for neighbour in range(num_chans):
^
@MichaelCoulter can you pull from this repo and reinstall your environment and try again? Alternatively you can just try to downgrade numpy to something below 1.24
got this error when trying running spikesorting.populate() for the clusterless thresholder.