Starlitnightly / omicverse

A python library for multi omics included bulk, single cell and spatial RNA-seq analysis.
https://starlitnightly.github.io/omicverse/
GNU General Public License v3.0
469 stars 57 forks source link

Bugs that occur when running VIA #211

Open Zstark11 opened 1 week ago

Zstark11 commented 1 week ago
#run code
import omicverse as ov
import scanpy as sc
import matplotlib.pyplot as plt
ov.utils.ov_plot_set()
adata = ov.read('VIA-test-P.h5ad')
adata
AnnData object with n_obs × n_vars = 12017 × 3000
    obs: 'sample_batch', 'initial_size_unspliced', 'initial_size_spliced', 'initial_size', 'batch', 'nUMIs', 'mito_perc', 'detected_genes', 'cell_complexity', 'n_genes', 'doublet_score', 'predicted_doublet', 'passing_mt', 'passing_nUMIs', 'passing_ngenes', 'leiden_res1', 'scsa_celltype_cellmarker-1'
    var: 'gene_ids', 'feature_types', 'Accession', 'Chromosome', 'End', 'Start', 'Strand', 'mt', 'n_cells', 'percent_cells', 'robust', 'highly_variable_rank', 'means', 'variances', 'variances_norm', 'highly_variable_nbatches', 'highly_variable_features'
    uns: 'batch_colors', 'hvg', 'layers_counts', 'leiden_res1', 'leiden_res1_colors', 'log1p', 'neighbors', 'pca', 'rank_genes_groups', 'scaled|original|cum_sum_eigenvalues', 'scaled|original|pca_var_ratios', 'scrublet', 'scsa_celltype_cellmarker-1_colors', 'umap'
    obsm: 'X_harmony', 'X_mde_harmony', 'X_mde_pca', 'X_pca', 'X_umap', 'scaled|original|X_pca'
    varm: 'PCs', 'scaled|original|pca_loadings'
    layers: 'ambiguous', 'counts', 'lognorm', 'matrix', 'scaled', 'spliced', 'unspliced'
    obsp: 'connectivities', 'distances'
v0 = ov.single.pyVIA(adata=adata,adata_key='X_pca',adata_ncomps=80, basis='X_umap',
                         clusters='scsa_celltype_cellmarker-1',knn=30,random_seed=4,root_user=None,)
v0.run()
2024-11-11 09:09:19.117907  Running VIA over input data of 12017 (samples) x 80 (features)
2024-11-11 09:09:19.118006  Knngraph has 30 neighbors
2024-11-11 09:09:22.741861  Finished global pruning of 30-knn graph used for clustering at level of 0.15. Kept 41.5 % of edges. 
2024-11-11 09:09:22.798817  Number of connected components used for clustergraph  is 1
2024-11-11 09:09:23.462553  Commencing community detection
2024-11-11 09:09:24.544915  Finished running Leiden algorithm. Found 445 clusters.
2024-11-11 09:09:24.550619  Merging 418 very small clusters (<10)
2024-11-11 09:09:24.557405  Finished detecting communities. Found 27 communities
2024-11-11 09:09:24.558368  Making cluster graph. Global cluster graph pruning level: 0.15
2024-11-11 09:09:24.592127  Graph has 1 connected components before pruning
2024-11-11 09:09:24.594545  Graph has 2 connected components after pruning
2024-11-11 09:09:24.595619  Graph has 1 connected components after reconnecting
2024-11-11 09:09:24.596126  0.0% links trimmed from local pruning relative to start
2024-11-11 09:09:24.596154  68.0% links trimmed from global pruning relative to start
2024-11-11 09:09:24.599008  Starting make edgebundle viagraph...
2024-11-11 09:09:24.599032  Make via clustergraph edgebundle
---------------------------------------------------------------------------
ImportError                               Traceback (most recent call last)
File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/dask/dataframe/__init__.py:59](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/dask/dataframe/__init__.py#line=58)
     58 from dask.base import compute
---> 59 from dask.dataframe import backends, dispatch, methods, rolling
     60 from dask.dataframe._testing import test_dataframe

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/dask/dataframe/backends.py:14](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/dask/dataframe/backends.py#line=13)
     13 from dask.backends import CreationDispatch, DaskBackendEntrypoint
---> 14 from dask.dataframe._compat import PANDAS_GE_220, is_any_real_numeric_dtype
     15 from dask.dataframe.core import DataFrame, Index, Scalar, Series, _Frame

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/dask/dataframe/_compat.py:9](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/dask/dataframe/_compat.py#line=8)
      7 from dask._compatibility import import_optional_dependency
----> 9 import_optional_dependency("pandas")
     10 import_optional_dependency("numpy")

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/dask/_compatibility.py:139](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/dask/_compatibility.py#line=138), in import_optional_dependency(name, extra, min_version, errors)
    138 elif errors == "raise":
--> 139     raise ImportError(msg)
    140 else:

ImportError: Dask requires version '2.0.0' or newer of 'pandas' (version '1.5.3' currently installed).

The above exception was the direct cause of the following exception:

ImportError                               Traceback (most recent call last)
Cell In[7], line 1
----> 1 v0.run()

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/omicverse/single/_via.py:146](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/omicverse/single/_via.py#line=145), in pyVIA.run(self)
    142 def run(self):
    143     """calculate the via graph and pseudotime
    144     
    145     """
--> 146     self.model.run_VIA()

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/omicverse/via/core.py:3141](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/omicverse/via/core.py#line=3140), in VIA.run_VIA(self)
   3139 self.knn_struct = _construct_knn(self.data, knn=self.knn, distance=self.distance, num_threads=self.num_threads)
   3140 st = time.time()
-> 3141 self.run_subPARC()
   3142 run_time = time.time() - st
   3143 print(f'{datetime.now()}\tTime elapsed {round(run_time,1)} seconds')

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/omicverse/via/core.py:2104](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/omicverse/via/core.py#line=2103), in VIA.run_subPARC(self)
   2101 if self.edgebundle_pruning_twice ==False:
   2102     #print('creating bundle with single round of global pruning at a level of', self.edgebundle_pruning)
   2103     print(f"{datetime.now()}\tStarting make edgebundle viagraph...")
-> 2104     self.hammerbundle_cluster = make_edgebundle_viagraph(layout,g_layout)
   2106 # globally trimmed link
   2107 self.edgelist_unique = set(tuple(sorted(l)) for l in zip(*locallytrimmed_sparse_vc.nonzero()))  # keep only one of (0,1) and (1,0)

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/omicverse/via/utils_via.py:863](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/omicverse/via/utils_via.py#line=862), in make_edgebundle_viagraph(layout, graph, initial_bandwidth, decay, edgebundle_pruning, via_object)
    860 edges = pd.DataFrame([e.tuple for e in graph.es], columns=['source', 'target'])
    862 edges['weight'] = graph.es['weight']
--> 863 from datashader.bundling import connect_edges, hammer_bundle
    864 hb = hammer_bundle(nodes, edges, weight='weight',initial_bandwidth = initial_bandwidth, decay=decay) #default bw=0.05, dec=0.7
    865 print(f'{datetime.now()}\tHammer dims: Nodes shape: {nodes.shape} Edges shape: {edges.shape}')

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/datashader/__init__.py:9](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/datashader/__init__.py#line=8)
      5 import param
      6 __version__ = str(param.version.Version(fpath=__file__, archive_commit="$Format:%h$",
      7                                         reponame="datashader"))
----> 9 from .core import Canvas                                 # noqa (API import)
     10 from .reductions import *                                # noqa (API import)
     11 from .glyphs import Point                                # noqa (API import)

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/datashader/core.py:10](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/datashader/core.py#line=9)
      8 import numpy as np
      9 import pandas as pd
---> 10 import dask.dataframe as dd
     11 import dask.array as da
     12 from packaging.version import Version

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/dask/dataframe/__init__.py:129](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/dask/dataframe/__init__.py#line=128)
    122 except ImportError as e:
    123     msg = (
    124         "Dask dataframe requirements are not installed[.\n\n](http://biotrainee.vip:8102/user/t170544/lab/tree/%E5%8D%95%E7%BB%86%E8%83%9E%E6%95%B0%E6%8D%AE/n/n)"
    125         "Please either conda or pip install as follow[s:\n\n](file:///S:/n/n)"
    126         "  conda install dask                     # either conda install\n"
    127         '  python -m pip install "dask[dataframe]" --upgrade  # or python -m pip install'
    128     )
--> 129     raise ImportError(msg) from e
    132 if _dask_expr_enabled():
    133     try:

ImportError: Dask dataframe requirements are not installed.

Please either conda or pip install as follows:

  conda install dask                     # either conda install
  python -m pip install "dask[dataframe]" --upgrade  # or python -m pip install
import dask
print(dask.__version__)
2024.11.0

Dask shows that it has been installed

My data contains' duplicated 'and' unspliced 'layers, but there are no layers such as' palantir_pseudotime', 'palantir_diff_potential', 'palantir_branchsprobs_cell_types',' palantir_branchsprobs', 'X_pca' in the example data

Zstark11 commented 1 week ago

Solved issue: After updating the pandas version from 1.5.3 to 2.2.3, it successfully ran

Zstark11 commented 1 week ago

The subsequent code also encountered errors

fig,ax1,ax2=v0.plot_trajectory_gams(basis='X_umap',clusters='scsa_celltype_cellmarker-1',draw_all_curves=False)
plt.savefig('via_fig4.pdf',dpi=300,bbox_inches='tight')
plt.show(block=True)
0% (0 of 11) |                         | Elapsed Time: 0:00:00 ETA:  --:--:--
---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
Cell In[10], line 1
----> 1 fig,ax1,ax2=v0.plot_trajectory_gams(basis='X_umap',clusters='scsa_celltype_cellmarker-1',draw_all_curves=False)
      2 #fig.savefig('figures/via_fig3.png',dpi=300,bbox_inches = 'tight')
      3 plt.savefig('via_fig4.pdf',dpi=300,bbox_inches='tight')

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/omicverse/single/_via.py:330](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/omicverse/single/_via.py#line=329), in pyVIA.plot_trajectory_gams(self, clusters, basis, via_fine, idx, title_str, draw_all_curves, arrow_width_scale_factor, scatter_size, scatter_alpha, figsize, linewidth, marker_edgewidth, cmap_pseudotime, dpi, highlight_terminal_states, use_maxout_edgelist)
    328 self.adata.obs[clusters]=self.adata.obs[clusters].astype('category')
    329 embedding=self.adata.obsm[basis]
--> 330 fig,ax1,ax2 = draw_trajectory_gams_pyomic(adata=self.adata,clusters=clusters,via_object=self.model, 
    331                                     via_fine=via_fine, embedding=embedding, idx=idx,
    332                                     title_str=title_str, draw_all_curves=draw_all_curves, arrow_width_scale_factor=arrow_width_scale_factor,
    333                                     scatter_size=scatter_size, scatter_alpha=scatter_alpha,figsize=figsize,
    334                                     linewidth=linewidth, marker_edgewidth=marker_edgewidth, cmap_pseudotime=cmap_pseudotime,dpi=dpi,
    335                                     highlight_terminal_states=highlight_terminal_states, use_maxout_edgelist=use_maxout_edgelist)
    336 return fig,ax1,ax2

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/omicverse/single/_via.py:809](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/omicverse/single/_via.py#line=808), in draw_trajectory_gams_pyomic(adata, clusters, via_object, via_fine, embedding, idx, title_str, draw_all_curves, arrow_width_scale_factor, scatter_size, scatter_alpha, figsize, linewidth, marker_edgewidth, cmap_pseudotime, dpi, highlight_terminal_states, use_maxout_edgelist)
    806 y_val = y_val.reshape((len(y_val), -1))
    807 xp = np.linspace(minx, maxx, 500)
--> 809 gam50 = pg.LinearGAM(n_splines=4, spline_order=3, lam=10).gridsearch(x_val, y_val)
    810 XX = gam50.generate_X_grid(term=0, n=500)
    811 preds = gam50.predict(XX)

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/pygam/pygam.py:1894](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/pygam/pygam.py#line=1893), in GAM.gridsearch(self, X, y, weights, return_scores, keep_best, objective, progress, **param_grids)
   1892         coef = models[-1].coef_
   1893         gam.set_params(coef_=coef, force=True, verbose=False)
-> 1894     gam.fit(X, y, weights)
   1896 except ValueError as error:
   1897     msg = str(error) + '\non model with param[s:\n](file:///S:/n)' + str(param_grid)

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/pygam/pygam.py:920](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/pygam/pygam.py#line=919), in GAM.fit(self, X, y, weights)
    917 self.statistics_['m_features'] = X.shape[1]
    919 # optimize
--> 920 self._pirls(X, y, weights)
    921 # if self._opt == 0:
    922 #     self._pirls(X, y, weights)
    923 # if self._opt == 1:
    924 #     self._pirls_naive(X, y)
    925 return self

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/pygam/pygam.py:686](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/pygam/pygam.py#line=685), in GAM._pirls(self, X, Y, weights)
    669 def _pirls(self, X, Y, weights):
    670     """
    671     Performs stable PIRLS iterations to estimate GAM coefficients
    672 
   (...)
    684     None
    685     """
--> 686     modelmat = self._modelmat(X) # build a basis matrix for the GLM
    687     n, m = modelmat.shape
    689     # initialize GLM coefficients if model is not yet fitted

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/pygam/pygam.py:459](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/pygam/pygam.py#line=458), in GAM._modelmat(self, X, term)
    437 """
    438 Builds a model matrix, B, out of the spline basis for each feature
    439 
   (...)
    453     containing model matrix of the spline basis for selected features
    454 """
    455 X = check_X(X, n_feats=self.statistics_['m_features'],
    456             edge_knots=self.edge_knots_, dtypes=self.dtype,
    457             features=self.feature, verbose=self.verbose)
--> 459 return self.terms.build_columns(X, term=term)

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/pygam/terms.py:1719](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/pygam/terms.py#line=1718), in TermList.build_columns(self, X, term, verbose)
   1717 columns = []
   1718 for term_id in term:
-> 1719     columns.append(self._terms[term_id].build_columns(X, verbose=verbose))
   1720 return sp.sparse.hstack(columns, format='csc')

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/pygam/terms.py:784](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/pygam/terms.py#line=783), in SplineTerm.build_columns(self, X, verbose)
    768 """construct the model matrix columns for the term
    769 
    770 Parameters
   (...)
    780 scipy sparse array with n rows
    781 """
    782 X[:, self.feature][:, np.newaxis]
--> 784 splines = b_spline_basis(X[:, self.feature],
    785                          edge_knots=self.edge_knots_,
    786                          spline_order=self.spline_order,
    787                          n_splines=self.n_splines,
    788                          sparse=True,
    789                          periodic=self.basis in ['cp'],
    790                          verbose=verbose)
    792 if self.by is not None:
    793     splines = splines.multiply(X[:, self.by][:, np.newaxis])

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/pygam/utils.py:649](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/pygam/utils.py#line=648), in b_spline_basis(x, edge_knots, n_splines, spline_order, sparse, periodic, verbose)
    646 aug_knots[-1] += 1e-9 # want last knot inclusive
    648 # prepare Haar Basis
--> 649 bases = (x >= aug_knots[:-1]).astype(np.int) * \
    650         (x < aug_knots[1:]).astype(np.int)
    651 bases[-1] = bases[-2][::-1] # force symmetric bases at 0 and 1
    653 # do recursion from Hastie et al. vectorized

File [~/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/numpy/__init__.py:324](http://biotrainee.vip:8102/home/data/miniconda3/envs/ZGY-1/lib/python3.10/site-packages/numpy/__init__.py#line=323), in __getattr__(attr)
    319     warnings.warn(
    320         f"In the future `np.{attr}` will be defined as the "
    321         "corresponding NumPy scalar.", FutureWarning, stacklevel=2)
    323 if attr in __former_attrs__:
--> 324     raise AttributeError(__former_attrs__[attr])
    326 if attr == 'testing':
    327     import numpy.testing as testing

AttributeError: module 'numpy' has no attribute 'int'.
`np.int` was a deprecated alias for the builtin `int`. To avoid this error in existing code, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.
The aliases was originally deprecated in NumPy 1.20; for more details and guidance see the original release note at:
    https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations

Do you need to lower the numpy version to below 1.20.0?

ZHANGDAPA commented 1 day ago

had the same problem image

ZHANGDAPA commented 9 hours ago

When I lowered Numpy to version 1.19.5, omicverse showed an error

ImportError Traceback (most recent call last) Cell In[1], line 1 ----> 1 import omicverse as ov

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/init.py:11 8 from pkg_resources import get_distribution 9 version = lambda name: get_distribution(name).version ---> 11 from . import bulk,single,utils,bulk2single,pp,space,pl,externel 12 #usually 13 from .utils._data import read

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/bulk/init.py:8 1 r""" 2 bulk (A omic framework for bulk omic analysis) 3 """ 5 #from Pyomic.bulk.DeGene import find_DEG,Density_norm,Plot_gene_expression,ID_mapping 6 #from Pyomic.bulk.Gene_module import pywgcna ----> 8 from ._Gene_module import pyWGCNA,readWGCNA 9 from ._Enrichment import pyGSEA,pyGSE,geneset_enrichment,geneset_plot,geneset_enrichment_GSEA,geneset_plot_multi,enrichment_multi_concat 10 from ._network import pyPPI,string_interaction,string_map,generate_G

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/bulk/_Gene_module.py:2 ----> 2 from ._dynamicTree import cutreeHybrid 3 import pandas as pd 4 import matplotlib.pyplot as plt

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/bulk/_dynamicTree.py:784 758 results = dict(labels = OrdNumLabs-UnlabeledExist, 759 cores = ordCoreLabels, 760 smallLabels = SmallLabels, (...) 768 RootBranch = RootBranch, isCluster = isCluster, 769 nPoints = nMerge+1)) 771 return(results) --> 784 import pandas as pd 785 import numpy as np 786 from scipy.cluster.hierarchy import to_tree

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/pandas/init.py:22 19 del _hard_dependencies, _dependency, _missing_dependencies 21 # numpy compat ---> 22 from pandas.compat import is_numpy_dev as _is_numpy_dev # pyright: ignore # noqa:F401 24 try: 25 from pandas._libs import hashtable as _hashtable, lib as _lib, tslib as _tslib

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/pandas/compat/init.py:25 17 from pandas.compat._constants import ( 18 IS64, 19 PY39, (...) 22 PYPY, 23 ) 24 import pandas.compat.compressors ---> 25 from pandas.compat.numpy import ( 26 is_numpy_dev, 27 np_version_under1p21, 28 ) 29 from pandas.compat.pyarrow import ( 30 pa_version_under7p0, 31 pa_version_under8p0, 32 pa_version_under9p0, 33 pa_version_under11p0, 34 ) 37 def set_function_name(f: F, name: str, cls) -> F:

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/pandas/compat/numpy/init.py:25 21 np_percentile_argname = "interpolation" 24 if _nlv < Version(_min_numpy_ver): ---> 25 raise ImportError( 26 f"this version of pandas is incompatible with numpy < {_min_numpy_ver}\n" 27 f"your numpy version is {_np_version}.\n" 28 f"Please upgrade numpy to >= {_min_numpy_ver} to use this pandas version" 29 ) 32 all = [ 33 "np", 34 "_np_version", 35 "is_numpy_dev", 36 ]

ImportError: this version of pandas is incompatible with numpy < 1.20.3 your numpy version is 1.19.5. Please upgrade numpy to >= 1.20.3 to use this pandas version

ZHANGDAPA commented 9 hours ago

Later, after I reinstalled Pandas, I encountered a new error.

TypeError Traceback (most recent call last) Cell In[1], line 1 ----> 1 import omicverse as ov

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/init.py:11 8 from pkg_resources import get_distribution 9 version = lambda name: get_distribution(name).version ---> 11 from . import bulk,single,utils,bulk2single,pp,space,pl,externel 12 #usually 13 from .utils._data import read

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/bulk/init.py:8 1 r""" 2 bulk (A omic framework for bulk omic analysis) 3 """ 5 #from Pyomic.bulk.DeGene import find_DEG,Density_norm,Plot_gene_expression,ID_mapping 6 #from Pyomic.bulk.Gene_module import pywgcna ----> 8 from ._Gene_module import pyWGCNA,readWGCNA 9 from ._Enrichment import pyGSEA,pyGSE,geneset_enrichment,geneset_plot,geneset_enrichment_GSEA,geneset_plot_multi,enrichment_multi_concat 10 from ._network import pyPPI,string_interaction,string_map,generate_G

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/bulk/_Gene_module.py:24 21 from ..utils import pyomic_palette,plot_network 22 import os ---> 24 from ..externel.PyWGCNA.wgcna import pyWGCNA 25 from ..externel.PyWGCNA.utils import readWGCNA 28 class pyWGCNA_old(object):

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/externel/init.py:1 ----> 1 from . import (scSLAT,CEFCON,mofapy2,GNTD,spaceflow,STT, 2 tosica,STAGATE_pyG,STAligner,spatrio,PROST,cytotrace2, 3 GraphST,commot,cnmf,starfysh,scMulan,flowsig,PyWGCNA,CAST,scanorama 4 )

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/externel/CAST/init.py:1 ----> 1 from .main import CAST_STACK, CAST_MARK, CAST_PROJECT 2 from .CAST_Stack import reg_params, region_detect, corr_dist, CAST_STACK_rough 3 from .visualize import kmeans_plot_multiple, plot_mid, dsplot, plot_mid_v2

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/externel/CAST/main.py:2 1 from .CAST_Mark import ----> 2 from .CAST_Stack import 3 from .CAST_Projection import 4 from .utils import

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/externel/CAST/CAST_Stack.py:14 8 from .visualize import add_scale_bar 10 #################### Registration #################### 11 # Parameters class 13 @dataclass ---> 14 class reg_params: 15 dataname : str 16 ### affine

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/externel/CAST/CAST_Stack.py:19, in reg_params() 17 theta_r1 : float = 0 18 theta_r2 : float = 0 ---> 19 d_list : list[float] = field(default_factory=list) 20 translation_params : list[float] = None 21 mirror_t : list[float] = None

TypeError: 'type' object is not subscriptable

Zstark11 commented 9 hours ago

When I lowered Numpy to version 1.19.5, omicverse showed an error

ImportError Traceback (most recent call last) Cell In[1], line 1 ----> 1 import omicverse as ov

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/init.py:11 8 from pkg_resources import get_distribution 9 version = lambda name: get_distribution(name).version ---> 11 from . import bulk,single,utils,bulk2single,pp,space,pl,externel 12 #usually 13 from .utils._data import read

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/bulk/init.py:8 1 r""" 2 bulk (A omic framework for bulk omic analysis) 3 """ 5 #from Pyomic.bulk.DeGene import find_DEG,Density_norm,Plot_gene_expression,ID_mapping 6 #from Pyomic.bulk.Gene_module import pywgcna ----> 8 from ._Gene_module import pyWGCNA,readWGCNA 9 from ._Enrichment import pyGSEA,pyGSE,geneset_enrichment,geneset_plot,geneset_enrichment_GSEA,geneset_plot_multi,enrichment_multi_concat 重试    错误原因 10 from ._network import pyPPI,string_interaction,string_map,generate_G

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/bulk/_Gene_module.py:2 ----> 2 from ._dynamicTree import cutreeHybrid 3 import pandas as pd 4 import matplotlib.pyplot as plt

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/omicverse/bulk/_dynamicTree.py:784 758 results = dict(labels = OrdNumLabs-UnlabeledExist, 759 cores = ordCoreLabels, 760 smallLabels = SmallLabels, (...) 768 RootBranch = RootBranch, isCluster = isCluster, 769 nPoints = nMerge+1)) 771 return(results) --> 784 import pandas as pd 785 import numpy as np 786 from scipy.cluster.hierarchy import to_tree

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/pandas/init.py:22 19 del _hard_dependencies, _dependency, _missing_dependencies 21 # numpy compat ---> 22 from pandas.compat import is_numpy_dev as _is_numpy_dev # pyright: ignore # noqa:F401 24 try: 25 from pandas._libs import hashtable as _hashtable, lib as _lib, tslib as _tslib

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/pandas/compat/init.py:25 17 from pandas.compat._constants import ( 18 IS64, 19 PY39, (...) 22 PYPY, 23 ) 24 import pandas.compat.compressors ---> 25 from pandas.compat.numpy import ( 26 is_numpy_dev, 27 np_version_under1p21, 28 ) 29 from pandas.compat.pyarrow import ( 30 pa_version_under7p0, 31 pa_version_under8p0, 32 pa_version_under9p0, 33 pa_version_under11p0, 34 ) 37 def set_function_name(f: F, name: str, cls) -> F:

File ~/anaconda3/envs/omicverse3.8/lib/python3.8/site-packages/pandas/compat/numpy/init.py:25 21 np_percentile_argname = "interpolation"21 np_centent_argname ="中间化" 24 if _nlv < Version(_min_numpy_ver): ---> 25 raise ImportError( 26 f"this version of pandas is incompatible with numpy < {_min_numpy_ver}\n" 27 f"your numpy version is {_np_version}.\n" 28 f"Please upgrade numpy to >= {_min_numpy_ver} to use this pandas version" 29 ) 32 all = [ 33 "np", 34 "_np_version", 35 "is_numpy_dev", 36 ]

ImportError: this version of pandas is incompatible with numpy < 1.20.3 your numpy version is 1.19.5. Please upgrade numpy to >= 1.20.3 to use this pandas version

Omicverse need numpy >= 1.20.3