I appreciate the effort and time you are putting into this project it is indeed the utility most of us need but haven't known existed.
However, I have an issue.
I have just installed all requirements and then tried running python gui.py and got the following error
Traceback (most recent call last): File "D:\Latex_OCR\LaTeX-OCR-main\LaTeX-OCR-main\gui.py", line 274, in <module> ex = App(arguments) File "D:\Latex_OCR\LaTeX-OCR-main\LaTeX-OCR-main\gui.py", line 26, in __init__ self.initModel() File "D:\Latex_OCR\LaTeX-OCR-main\LaTeX-OCR-main\gui.py", line 33, in initModel args, *objs = pix2tex.initialize(self.args) File "D:\Latex_OCR\LaTeX-OCR-main\LaTeX-OCR-main\pix2tex.py", line 55, in initialize model.load_state_dict(torch.load(args.checkpoint, map_location=args.device)) File "C:\Users\amuka\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1482, in load_state_dict raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format( RuntimeError: Error(s) in loading state_dict for Model: Missing key(s) in state_dict: "decoder.net.attn_layers.layers.0.0.0.weight", "decoder.net.attn_layers.layers.0.0.0.bias", "decoder.net.attn_layers.layers.1.0.0.weight", "decoder.net.attn_layers.layers.1.0.0.bias", "decoder.net.attn_layers.layers.2.0.0.weight", "decoder.net.attn_layers.layers.2.0.0.bias", "decoder.net.attn_layers.layers.2.1.net.3.weight", "decoder.net.attn_layers.layers.2.1.net.3.bias", "decoder.net.attn_layers.layers.3.0.0.weight", "decoder.net.attn_layers.layers.3.0.0.bias", "decoder.net.attn_layers.layers.4.0.0.weight", "decoder.net.attn_layers.layers.4.0.0.bias", "decoder.net.attn_layers.layers.5.0.0.weight", "decoder.net.attn_layers.layers.5.0.0.bias", "decoder.net.attn_layers.layers.5.1.net.3.weight", "decoder.net.attn_layers.layers.5.1.net.3.bias", "decoder.net.attn_layers.layers.6.0.0.weight", "decoder.net.attn_layers.layers.6.0.0.bias", "decoder.net.attn_layers.layers.7.0.0.weight", "decoder.net.attn_layers.layers.7.0.0.bias", "decoder.net.attn_layers.layers.8.0.0.weight", "decoder.net.attn_layers.layers.8.0.0.bias", "decoder.net.attn_layers.layers.8.1.net.3.weight", "decoder.net.attn_layers.layers.8.1.net.3.bias", "decoder.net.attn_layers.layers.9.0.0.weight", "decoder.net.attn_layers.layers.9.0.0.bias", "decoder.net.attn_layers.layers.10.0.0.weight", "decoder.net.attn_layers.layers.10.0.0.bias", "decoder.net.attn_layers.layers.11.0.0.weight", "decoder.net.attn_layers.layers.11.0.0.bias", "decoder.net.attn_layers.layers.11.1.net.3.weight", "decoder.net.attn_layers.layers.11.1.net.3.bias". Unexpected key(s) in state_dict: "decoder.net.attn_layers.layers.0.0.weight", "decoder.net.attn_layers.layers.0.0.bias", "decoder.net.attn_layers.layers.1.0.weight", "decoder.net.attn_layers.layers.1.0.bias", "decoder.net.attn_layers.layers.2.0.weight", "decoder.net.attn_layers.layers.2.0.bias", "decoder.net.attn_layers.layers.2.1.net.2.weight", "decoder.net.attn_layers.layers.2.1.net.2.bias", "decoder.net.attn_layers.layers.3.0.weight", "decoder.net.attn_layers.layers.3.0.bias", "decoder.net.attn_layers.layers.4.0.weight", "decoder.net.attn_layers.layers.4.0.bias", "decoder.net.attn_layers.layers.5.0.weight", "decoder.net.attn_layers.layers.5.0.bias", "decoder.net.attn_layers.layers.5.1.net.2.weight", "decoder.net.attn_layers.layers.5.1.net.2.bias", "decoder.net.attn_layers.layers.6.0.weight", "decoder.net.attn_layers.layers.6.0.bias", "decoder.net.attn_layers.layers.7.0.weight", "decoder.net.attn_layers.layers.7.0.bias", "decoder.net.attn_layers.layers.8.0.weight", "decoder.net.attn_layers.layers.8.0.bias", "decoder.net.attn_layers.layers.8.1.net.2.weight", "decoder.net.attn_layers.layers.8.1.net.2.bias", "decoder.net.attn_layers.layers.9.0.weight", "decoder.net.attn_layers.layers.9.0.bias", "decoder.net.attn_layers.layers.10.0.weight", "decoder.net.attn_layers.layers.10.0.bias", "decoder.net.attn_layers.layers.11.0.weight", "decoder.net.attn_layers.layers.11.0.bias", "decoder.net.attn_layers.layers.11.1.net.2.weight", "decoder.net.attn_layers.layers.11.1.net.2.bias".
Hello ,
I appreciate the effort and time you are putting into this project it is indeed the utility most of us need but haven't known existed. However, I have an issue.
I have just installed all requirements and then tried running
python gui.py
and got the following errorTraceback (most recent call last): File "D:\Latex_OCR\LaTeX-OCR-main\LaTeX-OCR-main\gui.py", line 274, in <module> ex = App(arguments) File "D:\Latex_OCR\LaTeX-OCR-main\LaTeX-OCR-main\gui.py", line 26, in __init__ self.initModel() File "D:\Latex_OCR\LaTeX-OCR-main\LaTeX-OCR-main\gui.py", line 33, in initModel args, *objs = pix2tex.initialize(self.args) File "D:\Latex_OCR\LaTeX-OCR-main\LaTeX-OCR-main\pix2tex.py", line 55, in initialize model.load_state_dict(torch.load(args.checkpoint, map_location=args.device)) File "C:\Users\amuka\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1482, in load_state_dict raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format( RuntimeError: Error(s) in loading state_dict for Model: Missing key(s) in state_dict: "decoder.net.attn_layers.layers.0.0.0.weight", "decoder.net.attn_layers.layers.0.0.0.bias", "decoder.net.attn_layers.layers.1.0.0.weight", "decoder.net.attn_layers.layers.1.0.0.bias", "decoder.net.attn_layers.layers.2.0.0.weight", "decoder.net.attn_layers.layers.2.0.0.bias", "decoder.net.attn_layers.layers.2.1.net.3.weight", "decoder.net.attn_layers.layers.2.1.net.3.bias", "decoder.net.attn_layers.layers.3.0.0.weight", "decoder.net.attn_layers.layers.3.0.0.bias", "decoder.net.attn_layers.layers.4.0.0.weight", "decoder.net.attn_layers.layers.4.0.0.bias", "decoder.net.attn_layers.layers.5.0.0.weight", "decoder.net.attn_layers.layers.5.0.0.bias", "decoder.net.attn_layers.layers.5.1.net.3.weight", "decoder.net.attn_layers.layers.5.1.net.3.bias", "decoder.net.attn_layers.layers.6.0.0.weight", "decoder.net.attn_layers.layers.6.0.0.bias", "decoder.net.attn_layers.layers.7.0.0.weight", "decoder.net.attn_layers.layers.7.0.0.bias", "decoder.net.attn_layers.layers.8.0.0.weight", "decoder.net.attn_layers.layers.8.0.0.bias", "decoder.net.attn_layers.layers.8.1.net.3.weight", "decoder.net.attn_layers.layers.8.1.net.3.bias", "decoder.net.attn_layers.layers.9.0.0.weight", "decoder.net.attn_layers.layers.9.0.0.bias", "decoder.net.attn_layers.layers.10.0.0.weight", "decoder.net.attn_layers.layers.10.0.0.bias", "decoder.net.attn_layers.layers.11.0.0.weight", "decoder.net.attn_layers.layers.11.0.0.bias", "decoder.net.attn_layers.layers.11.1.net.3.weight", "decoder.net.attn_layers.layers.11.1.net.3.bias". Unexpected key(s) in state_dict: "decoder.net.attn_layers.layers.0.0.weight", "decoder.net.attn_layers.layers.0.0.bias", "decoder.net.attn_layers.layers.1.0.weight", "decoder.net.attn_layers.layers.1.0.bias", "decoder.net.attn_layers.layers.2.0.weight", "decoder.net.attn_layers.layers.2.0.bias", "decoder.net.attn_layers.layers.2.1.net.2.weight", "decoder.net.attn_layers.layers.2.1.net.2.bias", "decoder.net.attn_layers.layers.3.0.weight", "decoder.net.attn_layers.layers.3.0.bias", "decoder.net.attn_layers.layers.4.0.weight", "decoder.net.attn_layers.layers.4.0.bias", "decoder.net.attn_layers.layers.5.0.weight", "decoder.net.attn_layers.layers.5.0.bias", "decoder.net.attn_layers.layers.5.1.net.2.weight", "decoder.net.attn_layers.layers.5.1.net.2.bias", "decoder.net.attn_layers.layers.6.0.weight", "decoder.net.attn_layers.layers.6.0.bias", "decoder.net.attn_layers.layers.7.0.weight", "decoder.net.attn_layers.layers.7.0.bias", "decoder.net.attn_layers.layers.8.0.weight", "decoder.net.attn_layers.layers.8.0.bias", "decoder.net.attn_layers.layers.8.1.net.2.weight", "decoder.net.attn_layers.layers.8.1.net.2.bias", "decoder.net.attn_layers.layers.9.0.weight", "decoder.net.attn_layers.layers.9.0.bias", "decoder.net.attn_layers.layers.10.0.weight", "decoder.net.attn_layers.layers.10.0.bias", "decoder.net.attn_layers.layers.11.0.weight", "decoder.net.attn_layers.layers.11.0.bias", "decoder.net.attn_layers.layers.11.1.net.2.weight", "decoder.net.attn_layers.layers.11.1.net.2.bias".
Any help or comment would be highly appreciated