@VikParuchuri
read order model Unable to perform multi-batch inference, error message when reasoning on multiple images
code:
model = load_model(device="cpu", dtype=torch.float32)
Loaded reading order model vikp/surya_order on device cpu with dtype torch.float32
xxx vikp/surya_order
Traceback (most recent call last):
File "D:\code\pycharm\OCR_LAYOUT - \sb.py", line 147, in
order_predictions = batch_ordering(inputs, bb, model, processor, batch_size=4)
File "D:\code\pycharm\OCR_LAYOUT - \surya\ordering.py", line 70, in batch_ordering
return_dict = model(
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, kwargs)
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, *kwargs)
File "D:\code\pycharm\OCR_LAYOUT - \surya\model\ordering\encoderdecoder.py", line 64, in forward
decoder_outputs = self.decoder(
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(args, kwargs)
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, kwargs)
File "D:\code\pycharm\OCR_LAYOUT - \surya\model\ordering\decoder.py", line 526, in forward
outputs = self.model.decoder(
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, *kwargs)
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(args, kwargs)
File "D:\code\pycharm\OCR_LAYOUT - \surya\model\ordering\decoder.py", line 342, in forward
inputs_embeds = self.embed_tokens(input_boxes, input_boxes_counts, past_key_values_length) self.embed_scale
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(args, kwargs)
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, *kwargs)
File "D:\code\pycharm\OCR_LAYOUT - \surya\model\ordering\decoder.py", line 261, in forward
coord_embeds = self.x1_embed(x1) + self.y1_embed(y1) + self.x2_embed(x2) + self.y2_embed(y2)
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(args, kwargs)
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\sparse.py", line 162, in forward
return F.embedding(
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\functional.py", line 2233, in embedding
return torch.embedding(weight, input, padding_idx, scale_grad_by_freq, sparse)
IndexError: index out of range in self
@VikParuchuri read order model Unable to perform multi-batch inference, error message when reasoning on multiple images code: model = load_model(device="cpu", dtype=torch.float32)
processor = load_processor()
ii = Image.open(IMAGE_PATH)
jj = ii.copy() kk = ii.copy() inputs = [ii, jj, kk] bb = [bboxes, list(bboxes), list(bboxes)]
order_predictions = batch_ordering(inputs, bb, model, processor, batch_size=4)
error:
Loaded reading order model vikp/surya_order on device cpu with dtype torch.float32 xxx vikp/surya_order Traceback (most recent call last): File "D:\code\pycharm\OCR_LAYOUT - \sb.py", line 147, in
order_predictions = batch_ordering(inputs, bb, model, processor, batch_size=4)
File "D:\code\pycharm\OCR_LAYOUT - \surya\ordering.py", line 70, in batch_ordering
return_dict = model(
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, kwargs)
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, *kwargs)
File "D:\code\pycharm\OCR_LAYOUT - \surya\model\ordering\encoderdecoder.py", line 64, in forward
decoder_outputs = self.decoder(
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(args, kwargs)
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, kwargs)
File "D:\code\pycharm\OCR_LAYOUT - \surya\model\ordering\decoder.py", line 526, in forward
outputs = self.model.decoder(
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, *kwargs)
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(args, kwargs)
File "D:\code\pycharm\OCR_LAYOUT - \surya\model\ordering\decoder.py", line 342, in forward
inputs_embeds = self.embed_tokens(input_boxes, input_boxes_counts, past_key_values_length) self.embed_scale
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(args, kwargs)
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, *kwargs)
File "D:\code\pycharm\OCR_LAYOUT - \surya\model\ordering\decoder.py", line 261, in forward
coord_embeds = self.x1_embed(x1) + self.y1_embed(y1) + self.x2_embed(x2) + self.y2_embed(y2)
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(args, kwargs)
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\modules\sparse.py", line 162, in forward
return F.embedding(
File "C:\Users\AppData\Roaming\Python\Python39\site-packages\torch\nn\functional.py", line 2233, in embedding
return torch.embedding(weight, input, padding_idx, scale_grad_by_freq, sparse)
IndexError: index out of range in self
Process finished with exit code 1