LTS4 / graph-learning

Collection of models for learning networks from signals.
BSD 3-Clause "New" or "Revised" License
1 stars 0 forks source link

Laplacians initiation in glmm #10

Open iricchi opened 1 month ago

iricchi commented 1 month ago

Add in glmm script the option of initialising the laplacians (and actually use the initialised matrices in _estimate_gauss_laplacian_parameters)

iricchi commented 1 month ago

Error if I give laplacians init as input, after adding the laplacian in the _estimate_gauss_laplacian_parameters as inputs and editing lines 131, 158 + if conditional if laplacians_init is a np array.

Dimensionality error : not sure whether the dimension should be Nnodes x Nnodes or Nnodes x Nnodes x ncomponent.

`--------------------------------------------------------------------------- ValueError Traceback (most recent call last) Cell In[31], line 3 1 # delta doesn't change much ... amplitute but to check 2 glmm = GLMM(n_components=K,avg_degree=ave_deg,laplacian_init=laplacians,init_params="k-means++",maxiter=10000) ----> 3 glmm.fit(X) # -> no labels but means, laplacian glmm.means , glmm.laplacians_ 4 # # and then run probability to get gamma hats 5 gamma_hats = glmm.predict_proba(X)

File ~/anaconda3/envs/python310/lib/python3.10/site-packages/sklearn/mixture/_base.py:181, in BaseMixture.fit(self, X, y) 155 """Estimate model parameters with the EM algorithm. 156 157 The method fits the model n_init times and sets the parameters with (...) 178 The fitted mixture. 179 """ 180 # parameters are validated in fit_predict --> 181 self.fit_predict(X, y) 182 return self

File ~/anaconda3/envs/python310/lib/python3.10/site-packages/sklearn/base.py:1473, in _fit_context..decorator..wrapper(estimator, *args, *kwargs) 1466 estimator._validate_params() 1468 with config_context( 1469 skip_parameter_validation=( 1470 prefer_skip_nested_validation or global_skip_validation 1471 ) 1472 ): -> 1473 return fit_method(estimator, args, **kwargs)

File ~/anaconda3/envs/python310/lib/python3.10/site-packages/sklearn/mixture/_base.py:235, in BaseMixture.fit_predict(self, X, y) 232 self._print_verbose_msg_init_beg(init) 234 if do_init: --> 235 self._initialize_parameters(X, random_state) 237 lower_bound = -np.inf if do_init else self.lowerbound 239 if self.max_iter == 0:

File ~/anaconda3/envs/python310/lib/python3.10/site-packages/sklearn/mixture/_base.py:140, in BaseMixture._initialize_parameters(self, X, randomstate) 133 , indices = kmeans_plusplus( 134 X, 135 self.n_components, 136 random_state=random_state, 137 ) 138 resp[indices, np.arange(self.n_components)] = 1 --> 140 self._initialize(X, resp)

File ~/anaconda3/envs/python310/lib/python3.10/site-packages/graph_learn/clustering/glmm.py:133, in GLMM._initialize(self, x, resp) 130 def _initialize(self, x, resp): 131 _n_samples, self.nnodes = x.shape --> 133 self.weights, self.means, laplacians = _estimate_gauss_laplacian_parameters( 134 x, resp, self.avg_degree, self.delta, self.laplacian_init 135 ) 137 if self.laplacianinit is None: 138 self.laplacians = laplacians

File ~/anaconda3/envs/python310/lib/python3.10/site-packages/graph_learn/clustering/glmm.py:35, in _estimate_gauss_laplacian_parameters(x, resp, avg_degree, delta, laplacian) 32 sq_dist = np.sum((y[..., np.newaxis] - y[..., np.newaxis, :]) * 2, axis=0) 34 # theta = np.mean(sq_dist) / norm_par ---> 35 edge_weights = delta gsp_learn_graph_log_degrees( 36 square_to_vec(sq_dist) * [[get_theta(sqd, avg_degree)] for sqd in sq_dist], 37 alpha=1, 38 beta=1, 39 edge_init=square_to_vec(-laplacian) 40 ) 42 laplacians = laplacian_squareform_vec(edge_weights) 44 return weights, means, laplacians

File ~/anaconda3/envs/python310/lib/python3.10/site-packages/graph_learn/smooth_learning.py:154, in gsp_learn_graph_log_degrees(distances, alpha, beta, edge_init, maxit, tol, step_size, edge_tol) 152 else: 153 edge_w = np.zeros_like(distances) --> 154 d_n = sum_op @ edge_w 156 for i in range(maxit): 157 y_n = (1 - step_size 2 beta) edge_w - step_size (sum_op_t @ d_n)

File ~/anaconda3/envs/python310/lib/python3.10/site-packages/scipy/sparse/_base.py:695, in _spbase.matmul(self, other) 692 if isscalarlike(other): 693 raise ValueError("Scalar operands are not allowed, " 694 "use '*' instead") --> 695 return self._matmul_dispatch(other)

File ~/anaconda3/envs/python310/lib/python3.10/site-packages/scipy/sparse/_base.py:642, in _spbase._matmul_dispatch(self, other) 637 elif other.ndim == 2: 638 ## 639 # dense 2D array or matrix ("multivector") 641 if other.shape[0] != N: --> 642 raise ValueError('dimension mismatch') 644 result = self._matmul_multivector(np.asarray(other)) 646 if isinstance(other, np.matrix):

ValueError: dimension mismatch`