OpenMOSS / MOSS

An open-source tool-augmented conversational language model from Fudan University
https://txsun1997.github.io/blogs/moss.html
Apache License 2.0
11.92k stars 1.14k forks source link

main.c:4:10 fatal error: Python.h: No such file or directory #254

Open JackMeiLong opened 1 year ago

JackMeiLong commented 1 year ago

版本:moss-moon-003-sft-int8 出现以下错误

outputs = model.generate(**inputs, do_sample=True, temperature=0.7, top_p=0.8, repetition_penalty=1.02, max_new_tokens=256) Setting pad_token_id to eos_token_id:106068 for open-end generation. /tmp/tmpmb2st6kj/main.c:4:10: fatal error: Python.h: No such file or directory 4 | #include | ^~~~~~ compilation terminated. Traceback (most recent call last): File "", line 21, in matmul_248_kernel KeyError: ('2-.-0-.-0-d82511111ad128294e9d31a6ac684238-d6252949da17ceb5f3a278a70250af13-3b85c7bef5f0a641282f3b73af50f599-3d2aedeb40d6d81c66a42791e268f98b-3498c340fd4b6ee7805fd54b882a04f5-e1f133f98d04093da2078dfc51c36b72-b26258bf01f839199e39d64851821f26-d7c06e3b46e708006c15224aac7a1378-f585402118c8a136948ce0a49cfe122c', (torch.float16, torch.int32, torch.float16, torch.float16, torch.int32, torch.int32, 'i32', 'i32', 'i32', 'i32', 'i32', 'i32', 'i32', 'i32', 'i32', 'i32', 'i32', 'i32', 'i32'), (256, 64, 32, 8), (True, True, True, True, True, True, (False, False), (True, False), (True, False), (False, False), (False, False), (True, False), (False, True), (True, False), (False, True), (True, False), (False, True), (True, False), (True, False)))

During handling of the above exception, another exception occurred:

Traceback (most recent call last): File "", line 1, in File "/usr/local/lib/python3.9/dist-packages/torch/autograd/grad_mode.py", line 27, in decorate_context return func(*args, kwargs) File "/usr/local/lib/python3.9/dist-packages/transformers/generation/utils.py", line 1571, in generate return self.sample( File "/usr/local/lib/python3.9/dist-packages/transformers/generation/utils.py", line 2534, in sample outputs = self( File "/usr/local/lib/python3.9/dist-packages/torch/nn/modules/module.py", line 1194, in _call_impl return forward_call(*input, *kwargs) File "/root/.cache/huggingface/modules/transformers_modules/local/modeling_moss.py", line 674, in forward transformer_outputs = self.transformer( File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1194, in _call_impl return forward_call(input, kwargs) File "/root/.cache/huggingface/modules/transformers_modules/local/modeling_moss.py", line 545, in forward outputs = block( File "/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py", line 1194, in _call_impl return forward_call(*input, kwargs) File "/root/.cache/huggingface/modules/transformers_modules/local/modeling_moss.py", line 270, in forward attn_outputs = self.attn( File "/usr/local/lib/python3.9/dist-packages/torch/nn/modules/module.py", line 1194, in _call_impl return forward_call(*input, *kwargs) File "/root/.cache/huggingface/modules/transformers_modules/local/modeling_moss.py", line 164, in forward qkv = self.qkv_proj(hidden_states) File "/usr/local/lib/python3.9/dist-packages/torch/nn/modules/module.py", line 1194, in _call_impl return forward_call(input, kwargs) File "/root/.cache/huggingface/modules/transformers_modules/local/quantization.py", line 371, in forward out = QuantLinearFunction.apply(x.reshape(-1, x.shape[-1]), self.qweight, self.scales, File "/usr/local/lib/python3.9/dist-packages/torch/cuda/amp/autocast_mode.py", line 105, in decorate_fwd return fwd(*args, kwargs) File "/root/.cache/huggingface/modules/transformers_modules/local/quantization.py", line 283, in forward output = matmul248(input, qweight, scales, qzeros, g_idx, bits, maxq) File "/root/.cache/huggingface/modules/transformers_modules/local/quantization.py", line 254, in matmul248 matmul_248_kernel[grid](input, qweight, output, File "/root/.cache/huggingface/modules/transformers_modules/local/custom_autotune.py", line 89, in run timings = {config: self._bench(*args, config=config, *kwargs) File "/root/.cache/huggingface/modules/transformers_modules/local/custom_autotune.py", line 89, in timings = {config: self._bench(args, config=config, kwargs) File "/root/.cache/huggingface/modules/transformers_modules/local/custom_autotune.py", line 71, in _bench return triton.testing.do_bench(kernel_call, rep=40) File "/usr/local/lib/python3.9/dist-packages/triton/testing.py", line 143, in do_bench fn() File "/root/.cache/huggingface/modules/transformers_modules/local/custom_autotune.py", line 67, in kernel_call self.fn.run(*args, num_warps=config.num_warps, num_stages=config.num_stages, **current) File "", line 41, in matmul_248_kernel File "/usr/local/lib/python3.9/dist-packages/triton/compiler.py", line 1588, in compile so_path = make_stub(name, signature, constants) File "/usr/local/lib/python3.9/dist-packages/triton/compiler.py", line 1477, in make_stub so = _build(name, src_path, tmpdir) File "/usr/local/lib/python3.9/dist-packages/triton/compiler.py", line 1392, in _build ret = subprocess.check_call(cc_cmd) File "/usr/lib/python3.9/subprocess.py", line 373, in check_call raise CalledProcessError(retcode, cmd) subprocess.CalledProcessError: Command '['/usr/bin/gcc', '/tmp/tmpmb2st6kj/main.c', '-O3', '-I/usr/local/cuda/include', '-I/usr/include/python3.10', '-I/tmp/tmpmb2st6kj', '-shared', '-fPIC', '-lcuda', '-o', '/tmp/tmpmb2st6kj/matmul_248_kernel.cpython-310-x86_64-linux-gnu.so', '-L/usr/lib/x86_64-linux-gnu']' returned non-zero exit status 1.

eRoad-f2e commented 1 year ago

我也是这个错误