f-hamidlab / nuclearpy

MIT License
0 stars 0 forks source link

Analizer UMAP and show cells #26

Open Marcel-Salier opened 2 years ago

Marcel-Salier commented 2 years ago

I got new data from other experiments using NGN2 and I cannot find the induced neurons anymore. The UMAP is not helping as it was helping before. First part of filtering nuclei are find but it seems that the ICC markers are not strong to cluster cells. We should discuss this a bit. When everything fails we also used to use hand picking nucleus of iNs.

I pulled the changes but the show cells after the umap is getting error:


ValueError Traceback (most recent call last) Input In [29], in <cell line: 1>() ----> 1 obj.showCells(RGB_contrasts=[4,3,4], n=5, ch2show={'red':'lamB', 'green':'b3'}, filter = "leiden == '2'")

File c:\users\bb_lab\documents\github2\ng_tools\ngtools\analyzer.py:743, in Analyzor.showCells(self, n, ch2show, order_by, ascending, fig_height, fig_width, show_nucleus, RGB_contrasts, uniqID, cells, filter) 740 df = df.loc[cells,] 742 if type(filter) is str: --> 743 obj.filterCells(filter = filter) 745 show_cell(df, order_by, fig_height, fig_width, show_nucleus, RGB_contrasts, uniqID, ch2show, n, 746 obj.meta['channels'], ascending, cells)

File c:\users\bb_lab\documents\github2\ng_tools\ngtools\analyzer.py:910, in Analyzor.filterCells(self, filter, data_type, cells, inplace) 908 self.data['raw'] = self.data['raw'].loc[cells,] 909 self.data['norm'] = self.data['norm'].loc[cells,] --> 910 self.updateAData() 911 else: 912 dat = self.copy()

File c:\users\bb_lab\documents\github2\ng_tools\ngtools\analyzer.py:515, in Analyzor.updateAData(self) 513 def updateAData(self): 514 self.buildAData(self.excfeat) --> 515 self.normAData()

File c:\users\bb_lab\documents\github2\ng_tools\ngtools\analyzer.py:1023, in Analyzor.normAData(self, method) 1013 def normAData(self, method = "standardscaler"): 1014 """ 1015 Normalize data for dimensional reduction 1016 (...) 1020 1021 """ -> 1023 self.adata.X = _normalise_data(self.adata.X, method = method) 1024 sc.pp.scale(self.adata, max_value=10)

File c:\users\bb_lab\documents\github2\ng_tools\ngtools\analyzer.py:130, in _normalise_data(X, method, copy) 127 X = X.copy() if copy else X 129 if method.lower() == "standardscaler": --> 130 X = StandardScaler().fit_transform(X) 131 elif method.lower() == "minmaxscaler": 132 X = MinMaxScaler().fit_transform(X)

File ~\anaconda3\envs\ngtools\lib\site-packages\sklearn\base.py:867, in TransformerMixin.fit_transform(self, X, y, fit_params) 863 # non-optimized default implementation; override when a better 864 # method is possible for a given clustering algorithm 865 if y is None: 866 # fit method of arity 1 (unsupervised transformation) --> 867 return self.fit(X, fit_params).transform(X) 868 else: 869 # fit method of arity 2 (supervised transformation) 870 return self.fit(X, y, **fit_params).transform(X)

File ~\anaconda3\envs\ngtools\lib\site-packages\sklearn\preprocessing_data.py:809, in StandardScaler.fit(self, X, y, sample_weight) 807 # Reset internal state before fitting 808 self._reset() --> 809 return self.partial_fit(X, y, sample_weight)

File ~\anaconda3\envs\ngtools\lib\site-packages\sklearn\preprocessing_data.py:844, in StandardScaler.partial_fit(self, X, y, sample_weight) 812 """Online computation of mean and std on X for later scaling. 813 814 All of X is processed as a single batch. This is intended for cases (...) 841 Fitted scaler. 842 """ 843 first_call = not hasattr(self, "n_samplesseen") --> 844 X = self._validate_data( 845 X, 846 accept_sparse=("csr", "csc"), 847 dtype=FLOAT_DTYPES, 848 force_all_finite="allow-nan", 849 reset=first_call, 850 ) 851 n_features = X.shape[1] 853 if sample_weight is not None:

File ~\anaconda3\envs\ngtools\lib\site-packages\sklearn\base.py:577, in BaseEstimator._validate_data(self, X, y, reset, validate_separately, check_params) 575 raise ValueError("Validation should be done on X, y or both.") 576 elif not no_val_X and no_val_y: --> 577 X = check_array(X, input_name="X", check_params) 578 out = X 579 elif no_val_X and not no_val_y:

File ~\anaconda3\envs\ngtools\lib\site-packages\sklearn\utils\validation.py:909, in check_array(array, accept_sparse, accept_large_sparse, dtype, order, copy, force_all_finite, ensure_2d, allow_nd, ensure_min_samples, ensure_min_features, estimator, input_name) 907 n_samples = _num_samples(array) 908 if n_samples < ensure_min_samples: --> 909 raise ValueError( 910 "Found array with %d sample(s) (shape=%s) while a" 911 " minimum of %d is required%s." 912 % (n_samples, array.shape, ensure_min_samples, context) 913 ) 915 if ensure_min_features > 0 and array.ndim == 2: 916 n_features = array.shape[1]

ValueError: Found array with 0 sample(s) (shape=(0, 19)) while a minimum of 1 is required by StandardScaler.