mir-aidj / all-in-one

All-In-One Music Structure Analyzer
http://arxiv.org/abs/2307.16425
MIT License
370 stars 35 forks source link

TypeError: natten1dqkrpb() takes 4 positional arguments but 5 were given #16

Closed MrSutsuj closed 3 weeks ago

MrSutsuj commented 4 weeks ago

Dear Taejun, dear Vitaly,

We really love your work and would like to point out that it's brilliant! We'd love to try it out but are currently stuck with the latest error:

TypeError: natten1dqkrpb() takes 4 positional arguments but 5 were given

We're using Python 3.11.7 on a Apple M1 Silicon with manually installed Natten 0.14.4 - might there be a problem with Natten 0.14.4 beeing older than the latest version 0.17.1?

We'd love to hear from you!

Thank you so much and best regards!


TypeError Traceback (most recent call last) Cell In[6], line 1 ----> 1 result = allin1.analyze('/Users/justus/Downloads/Happy.wav')

File /opt/anaconda3/lib/python3.11/site-packages/allin1/analyze.py:134, in analyze(paths, out_dir, visualize, sonify, model, device, include_activations, include_embeddings, demix_dir, spec_dir, keep_byproducts, overwrite, multiprocess) 131 for path, spec_path in pbar: 132 pbar.set_description(f'Analyzing {path.name}') --> 134 result = run_inference( 135 path=path, 136 spec_path=spec_path, 137 model=model, 138 device=device, 139 include_activations=include_activations, 140 include_embeddings=include_embeddings, 141 ) 143 # Save the result right after the inference. 144 # Checkpointing is always important for this kind of long-running tasks... 145 # for my mental health... 146 if out_dir is not None:

File /opt/anaconda3/lib/python3.11/site-packages/allin1/helpers.py:29, in run_inference(path, spec_path, model, device, include_activations, include_embeddings) 26 spec = np.load(spec_path) 27 spec = torch.from_numpy(spec).unsqueeze(0).to(device) ---> 29 logits = model(spec) 31 metrical_structure = postprocess_metrical_structure(logits, model.cfg) 32 functional_structure = postprocess_functional_structure(logits, model.cfg)

File /opt/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1532, in Module._wrapped_call_impl(self, *args, kwargs) 1530 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1531 else: -> 1532 return self._call_impl(args, kwargs)

File /opt/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1541, in Module._call_impl(self, *args, *kwargs) 1536 # If we don't have any hooks, we want to skip the rest of the logic in 1537 # this function, and just call forward. 1538 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1539 or _global_backward_pre_hooks or _global_backward_hooks 1540 or _global_forward_hooks or _global_forward_pre_hooks): -> 1541 return forward_call(args, **kwargs) 1543 try: 1544 result = None

File /opt/anaconda3/lib/python3.11/site-packages/allin1/models/ensemble.py:21, in Ensemble.forward(self, x) 20 def forward(self, x): ---> 21 outputs: List[AllInOneOutput] = [model(x) for model in self.models] 22 avg = AllInOneOutput( 23 logits_beat=torch.stack([output.logits_beat for output in outputs], dim=0).mean(dim=0), 24 logits_downbeat=torch.stack([output.logits_downbeat for output in outputs], dim=0).mean(dim=0), (...) 27 embeddings=torch.stack([output.embeddings for output in outputs], dim=-1), 28 ) 30 return avg

File /opt/anaconda3/lib/python3.11/site-packages/allin1/models/ensemble.py:21, in (.0) 20 def forward(self, x): ---> 21 outputs: List[AllInOneOutput] = [model(x) for model in self.models] 22 avg = AllInOneOutput( 23 logits_beat=torch.stack([output.logits_beat for output in outputs], dim=0).mean(dim=0), 24 logits_downbeat=torch.stack([output.logits_downbeat for output in outputs], dim=0).mean(dim=0), (...) 27 embeddings=torch.stack([output.embeddings for output in outputs], dim=-1), 28 ) 30 return avg

File /opt/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1532, in Module._wrapped_call_impl(self, *args, kwargs) 1530 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1531 else: -> 1532 return self._call_impl(args, kwargs)

File /opt/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1541, in Module._call_impl(self, *args, *kwargs) 1536 # If we don't have any hooks, we want to skip the rest of the logic in 1537 # this function, and just call forward. 1538 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1539 or _global_backward_pre_hooks or _global_backward_hooks 1540 or _global_forward_hooks or _global_forward_pre_hooks): -> 1541 return forward_call(args, **kwargs) 1543 try: 1544 result = None

File /opt/anaconda3/lib/python3.11/site-packages/allin1/models/allinone.py:51, in AllInOne.forward(self, inputs, output_attentions) 48 inputs = inputs.reshape(-1, 1, T, F) # N x K, C=1, T, F=81 49 frame_embed = self.embeddings(inputs) # NK, T, C=16 ---> 51 encoder_outputs = self.encoder( 52 frame_embed, 53 output_attentions=output_attentions, 54 ) 55 hidden_state_levels = encoder_outputs[0] 57 hidden_states = hidden_state_levels[-1].reshape(N, K, T, -1) # N, K, T, C=16

File /opt/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1532, in Module._wrapped_call_impl(self, *args, kwargs) 1530 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1531 else: -> 1532 return self._call_impl(args, kwargs)

File /opt/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1541, in Module._call_impl(self, *args, *kwargs) 1536 # If we don't have any hooks, we want to skip the rest of the logic in 1537 # this function, and just call forward. 1538 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1539 or _global_backward_pre_hooks or _global_backward_hooks 1540 or _global_forward_hooks or _global_forward_pre_hooks): -> 1541 return forward_call(args, **kwargs) 1543 try: 1544 result = None

File /opt/anaconda3/lib/python3.11/site-packages/allin1/models/allinone.py:110, in AllInOneEncoder.forward(self, frame_embed, output_attentions) 108 hidden_states = frame_embed 109 for i, layer in enumerate(self.layers): --> 110 layer_outputs = layer(hidden_states, output_attentions) 111 hidden_states = layer_outputs[0] 112 hidden_state_levels.append(hidden_states)

File /opt/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1532, in Module._wrapped_call_impl(self, *args, kwargs) 1530 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1531 else: -> 1532 return self._call_impl(args, kwargs)

File /opt/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1541, in Module._call_impl(self, *args, *kwargs) 1536 # If we don't have any hooks, we want to skip the rest of the logic in 1537 # this function, and just call forward. 1538 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1539 or _global_backward_pre_hooks or _global_backward_hooks 1540 or _global_forward_hooks or _global_forward_pre_hooks): -> 1541 return forward_call(args, **kwargs) 1543 try: 1544 result = None

File /opt/anaconda3/lib/python3.11/site-packages/allin1/models/allinone.py:170, in AllInOneBlock.forward(self, hidden_states, output_attentions) 167 NK, T, C = hidden_states.shape 168 N, K = NK // self.cfg.data.num_instruments, self.cfg.data.num_instruments --> 170 timelayer_outputs = self.timelayer(hidden_states, output_attentions) 171 hidden_states = timelayer_outputs[0] 172 if self.cfg.instrument_attention:

File /opt/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1532, in Module._wrapped_call_impl(self, *args, kwargs) 1530 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1531 else: -> 1532 return self._call_impl(args, kwargs)

File /opt/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1541, in Module._call_impl(self, *args, *kwargs) 1536 # If we don't have any hooks, we want to skip the rest of the logic in 1537 # this function, and just call forward. 1538 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1539 or _global_backward_pre_hooks or _global_backward_hooks 1540 or _global_forward_hooks or _global_forward_pre_hooks): -> 1541 return forward_call(args, **kwargs) 1543 try: 1544 result = None

File /opt/anaconda3/lib/python3.11/site-packages/allin1/models/dinat.py:298, in _DinatLayerNd.forward(self, hidden_states, output_attentions) 295 if attention is None: 296 continue --> 298 attention_output = attention(attention_inputs, output_attentions=output_attentions) 299 attention_output = attention_output[0] 301 if is_2d:

File /opt/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1532, in Module._wrapped_call_impl(self, *args, kwargs) 1530 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1531 else: -> 1532 return self._call_impl(args, kwargs)

File /opt/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1541, in Module._call_impl(self, *args, *kwargs) 1536 # If we don't have any hooks, we want to skip the rest of the logic in 1537 # this function, and just call forward. 1538 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1539 or _global_backward_pre_hooks or _global_backward_hooks 1540 or _global_forward_hooks or _global_forward_pre_hooks): -> 1541 return forward_call(args, **kwargs) 1543 try: 1544 result = None

File /opt/anaconda3/lib/python3.11/site-packages/allin1/models/dinat.py:193, in _NeighborhoodAttentionModuleNd.forward(self, hidden_states, output_attentions) 188 def forward( 189 self, 190 hidden_states: torch.Tensor, 191 output_attentions: Optional[bool] = False, 192 ) -> Tuple[torch.Tensor]: --> 193 self_outputs = self.self(hidden_states, output_attentions) 194 attention_output = self.output(self_outputs[0]) 195 outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them

File /opt/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1532, in Module._wrapped_call_impl(self, *args, kwargs) 1530 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1531 else: -> 1532 return self._call_impl(args, kwargs)

File /opt/anaconda3/lib/python3.11/site-packages/torch/nn/modules/module.py:1541, in Module._call_impl(self, *args, *kwargs) 1536 # If we don't have any hooks, we want to skip the rest of the logic in 1537 # this function, and just call forward. 1538 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1539 or _global_backward_pre_hooks or _global_backward_hooks 1540 or _global_forward_hooks or _global_forward_pre_hooks): -> 1541 return forward_call(args, **kwargs) 1543 try: 1544 result = None

File /opt/anaconda3/lib/python3.11/site-packages/allin1/models/dinat.py:99, in _NeighborhoodAttentionNd.forward(self, hidden_states, output_attentions) 95 query_layer = query_layer / math.sqrt(self.attention_head_size) 97 # Compute NA between "query" and "key" to get the raw attention scores, and add relative positional biases. 98 # attention_scores = natten2dqkrpb(query_layer, key_layer, self.rpb, self.dilation) ---> 99 attention_scores = self.nattendqkrpb(query_layer, key_layer, self.rpb, self.kernel_size, self.dilation) 101 # Normalize the attention scores to probabilities. 102 attention_probs = nn.functional.softmax(attention_scores, dim=-1)

TypeError: natten1dqkrpb() takes 4 positional arguments but 5 were given

tae-jun commented 3 weeks ago

Hi, yes you should use the latest version of natten.

Their API changed at some point.

Please open the issue again if it doesn't get fixed even after the upgrade!

Thanks.