SpikeInterface / spikeinterface

A Python-based module for creating flexible and robust spike sorting pipelines.
https://spikeinterface.readthedocs.io
MIT License
517 stars 186 forks source link

unable to import spikeGLX neuropixels recordings with certain name, already found the bug! #3323

Closed DaohanZhang closed 2 months ago

DaohanZhang commented 2 months ago

Hey! I found another minor bug which i did not encounter in the old version. I found i can only load recordings with file name ended with _g0_t0. , which confused me for a while. And i found the bug here (site-packages/neo/rawio/spikeglxrawio.py:444). bug

import os
os.system('newgrp docker')
os.system('export KILOSORT2_5_PATH=/home/zhangdaohan20h/Kilosort-kilosort25/')
import numpy as np
import matplotlib.pyplot as plt
import spikeinterface.full as si  # import core only
import spikeinterface.extractors as se
import spikeinterface.preprocessing as spre
import spikeinterface.sorters as ss
import spikeinterface.postprocessing as spost
import spikeinterface.qualitymetrics as sqm
import spikeinterface.comparison as sc
import spikeinterface.exporters as sexp
import spikeinterface.curation as scur
import spikeinterface.widgets as sw
from spikeinterface.sortingcomponents.peak_detection import detect_peaks
from spikeinterface.sortingcomponents.peak_localization import localize_peaks
print('#')
import sys
sys.path.append('/share/home/zhangdaohan20h/dredge-main/python')
from dredge.dredge_ap import register
from dredge.dredge_lfp import register_online_lfp
# this has some helpers for plotting
import dredge.motion_util as mu
/home/zhangdaohan20h/.conda/envs/kilosort4/lib/python3.9/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html
  from .autonotebook import tqdm as notebook_tqdm

#
# preprocessing parameters
# brain surface cutoff point in microns
cutoff_um = None
# start and end times in seconds
t_start = 10.0
t_end = 40.0
# load up the spikeglx recording
#lfprec = si.read_spikeglx("/home/zhangdaohan20h/public_data/NPX_examples/240719_g0/240719_g0_imec0", stream_id="imec0.lf")
## this file named 240719_g0_t0.imec0.ap.bin load smoothly
lfpraw = se.read_spikeglx("/home/zhangdaohan20h/public_data/NPX_examples/MGH_Pt02/", load_sync_channel=False, 
                          stream_id="imec0.lf")
## the file named Pt02.imec0.ap.bin & Pt02.imec0.ap.meta
lfpraw
/home/zhangdaohan20h/public_data/NPX_examples/MGH_Pt02/Pt02.imec0.ap.meta

---------------------------------------------------------------------------

ValueError                                Traceback (most recent call last)

Cell In[7], line 7
      1 # here, we do a preprocessing which downsamples to 250Hz for registration purposes
      2 # below, when interpolating, we will not downsample.
      3 
      4 # load up the spikeglx recording
      5 #lfprec = si.read_spikeglx("/home/zhangdaohan20h/public_data/NPX_examples/240719_g0/240719_g0_imec0", stream_id="imec0.lf")
----> 7 lfpraw = se.read_spikeglx("/home/zhangdaohan20h/public_data/NPX_examples/MGH_Pt02/", load_sync_channel=False, 
      8                           stream_id="imec0.lf")
      9 lfpraw

File ~/.conda/envs/kilosort4/lib/python3.9/site-packages/spikeinterface/extractors/neoextractors/spikeglx.py:58, in SpikeGLXRecordingExtractor.__init__(self, folder_path, load_sync_channel, stream_id, stream_name, all_annotations, use_names_as_ids)
     48 def __init__(
     49     self,
     50     folder_path,
   (...)
     55     use_names_as_ids: bool = False,
     56 ):
     57     neo_kwargs = self.map_to_neo_kwargs(folder_path, load_sync_channel=load_sync_channel)
---> 58     NeoBaseRecordingExtractor.__init__(
     59         self,
     60         stream_id=stream_id,
     61         stream_name=stream_name,
     62         all_annotations=all_annotations,
     63         use_names_as_ids=use_names_as_ids,
     64         **neo_kwargs,
     65     )
     67     # open the corresponding stream probe for LF and AP
     68     # if load_sync_channel=False
     69     if "nidq" not in self.stream_id and not load_sync_channel:

File ~/.conda/envs/kilosort4/lib/python3.9/site-packages/spikeinterface/extractors/neoextractors/neobaseextractor.py:188, in NeoBaseRecordingExtractor.__init__(self, stream_id, stream_name, block_index, all_annotations, use_names_as_ids, **neo_kwargs)
    158 def __init__(
    159     self,
    160     stream_id: Optional[str] = None,
   (...)
    165     **neo_kwargs: Dict[str, Any],
    166 ) -> None:
    167     """
    168     Initialize a NeoBaseRecordingExtractor instance.
    169 
   (...)
    185 
    186     """
--> 188     _NeoBaseExtractor.__init__(self, block_index, **neo_kwargs)
    190     kwargs = dict(all_annotations=all_annotations)
    191     if block_index is not None:

File ~/.conda/envs/kilosort4/lib/python3.9/site-packages/spikeinterface/extractors/neoextractors/neobaseextractor.py:27, in _NeoBaseExtractor.__init__(self, block_index, **neo_kwargs)
     23 def __init__(self, block_index, **neo_kwargs):
     24 
     25     # Avoids double initiation of the neo reader if it was already done in the __init__ of the child class
     26     if not hasattr(self, "neo_reader"):
---> 27         self.neo_reader = self.get_neo_io_reader(self.NeoRawIOClass, **neo_kwargs)
     29     if self.neo_reader.block_count() > 1 and block_index is None:
     30         raise Exception(
     31             "This dataset is multi-block. Spikeinterface can load one block at a time. "
     32             "Use 'block_index' to select the block to be loaded."
     33         )

File ~/.conda/envs/kilosort4/lib/python3.9/site-packages/spikeinterface/extractors/neoextractors/neobaseextractor.py:66, in _NeoBaseExtractor.get_neo_io_reader(cls, raw_class, **neo_kwargs)
     64 neoIOclass = getattr(rawio_module, raw_class)
     65 neo_reader = neoIOclass(**neo_kwargs)
---> 66 neo_reader.parse_header()
     68 return neo_reader

File ~/.conda/envs/kilosort4/lib/python3.9/site-packages/neo/rawio/baserawio.py:189, in BaseRawIO.parse_header(self)
    176 """
    177 Parses the header of the file(s) to allow for faster computations
    178 for all other functions
    179 
    180 """
    181 # this must create
    182 # self.header['nb_block']
    183 # self.header['nb_segment']
   (...)
    186 # self.header['spike_channels']
    187 # self.header['event_channels']
--> 189 self._parse_header()
    190 self._check_stream_signal_channel_characteristics()
    191 self.is_header_parsed = True

File ~/.conda/envs/kilosort4/lib/python3.9/site-packages/neo/rawio/spikeglxrawio.py:119, in SpikeGLXRawIO._parse_header(self)
    118 def _parse_header(self):
--> 119     self.signals_info_list = scan_files(self.dirname)
    121     # sort stream_name by higher sampling rate first
    122     srates = {info["stream_name"]: info["sampling_rate"] for info in self.signals_info_list}

File ~/.conda/envs/kilosort4/lib/python3.9/site-packages/neo/rawio/spikeglxrawio.py:351, in scan_files(dirname)
    349 meta = read_meta_file(meta_filename)
    350 print(meta_filename)################################################
--> 351 info = extract_stream_info(meta_filename, meta)
    353 info["meta_file"] = str(meta_filename)
    354 info["bin_file"] = str(bin_filename)

File ~/.conda/envs/kilosort4/lib/python3.9/site-packages/neo/rawio/spikeglxrawio.py:495, in extract_stream_info(meta_file, meta)
    493     has_sync_trace = False
    494 fname = Path(meta_file).stem
--> 495 run_name, gate_num, trigger_num, device, stream_kind = parse_spikeglx_fname(fname)
    497 if "imec" in fname.split(".")[-2]:
    498     device = fname.split(".")[-2]

File ~/.conda/envs/kilosort4/lib/python3.9/site-packages/neo/rawio/spikeglxrawio.py:444, in parse_spikeglx_fname(fname)
    442 re_else_nidq = re.findall(r"(\S*)\.(\S*)", fname)
    443 if len(re_else) == 1:
--> 444     run_name, device, stream_kind = re_else_nidq[0]
    445     gate_num, trigger_num = None, None
    446 elif len(re_else_nidq) == 1:
    447     # easy case for nidaq, example: sglx_xxx.nidq

ValueError: not enough values to unpack (expected 3, got 2)
chrishalcrow commented 2 months ago

Hi @DaohanZhang , this is actually a problem with the neo package, not spikeinterface. Could you open an issue over there please (https://github.com/NeuralEnsemble/python-neo/issues)? Thanks!