Open dapengsmith opened 3 weeks ago
I have the same problem: Exo cannot run on Linux Kubuntu 22.04 without Nvidia GPU. The machine is HP ProLiant DL380 Gen9 with 2 x Xeon e5 2660 v.4 374 GB RAM 256 GB NMMe SSD
uname -r 6.8.0-48-generic python --version Python 3.12.7 gcc --version gcc (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0
sudo apt install build-essential pip install torch sudo apt install clang
(exo) gpetrov@ProL9:~/ai/exo$ exo run llama-3.1-70b --prompt "What is the meaning of exo?" Selected inference engine: None
/ \ \/ / \ | /> < () | \//\___/
Detected system: Linux Inference engine name after selection: tinygrad Using inference engine: TinygradDynamicShardInferenceEngine with shard downloader: HFShardDownloader [64782, 60234, 60309, 53504, 55645, 58698, 59989, 49565, 57391, 51424, 60610, 65333, 63266] Chat interface started:
What is the meaning of exo?<|eot_id|><|start_header_id|>assistant<|end_header_id|>
Removing download task for Shard(model_id='NousResearch/Meta-Llama-3.1-70B-Instruct', start_layer=0, end_layer=79, nlayers=80): True 0%| | 0/724 [00:00<?, ?it/s] ╭─────────────────────────────────────────────── Exo Cluster (1 node) ───────────────────────────────────────────────╮ │ │ │ ____ _ │ │ / \ \/ / \ │ │ | /> < () | │ │ \//\_/ │ │ │ │ │ │ Web Chat URL (tinychat): http://127.0.0.1:8000 │ │ ChatGPT API endpoint: http://127.0.0.1:8000/v1/chat/completions │ │ GPU poor ▼ GPU rich │ │ [🟥🟥🟥🟥🟥🟥🟥🟥🟧🟧🟧🟧🟧🟧🟧🟨🟨🟨🟨🟨🟨🟨🟨🟩🟩🟩🟩🟩🟩🟩] │ │ 0.00 TFLOPS │ │ ▲ │ │ │ │ │ │ │ │ │ │ │ │ │ │ │ │ │ ╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ │ 💬️ What is the meaning of exo? │ │ │ │ 🤖 │ │ │ │ │ loaded weights in 244.51 ms, 0.13 GB loaded at 0.55 GB/s Error processing prompt: Command '['clang', '-shared', '-march=native', '-O2', '-Wall', '-Werror', '-x', 'c', '-fPIC', '-ffreestanding', '-nostdlib', '-', '-o', '/tmp/tmpv8ke8y1y']' returned non-zero exit status 1. Traceback (most recent call last): File "/home/gpetrov/ai/exo/exo/main.py", line 192, in run_model_cli await node.process_prompt(shard, prompt, None, request_id=request_id) File "/home/gpetrov/ai/exo/exo/orchestration/standard_node.py", line 123, in process_prompt resp = await self._process_prompt(base_shard, prompt, image_str, request_id, inference_state) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gpetrov/ai/exo/exo/orchestration/standard_node.py", line 159, in _process_prompt result, inference_state, is_finished = await self.inference_engine.infer_prompt(request_id, shard, prompt, image_str, inference_state=inference_state) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gpetrov/ai/exo/exo/inference/tinygrad/inference.py", line 59, in infer_prompt await self.ensure_shard(shard) File "/home/gpetrov/ai/exo/exo/inference/tinygrad/inference.py", line 97, in ensure_shard self.model = await asyncio.get_event_loop().run_in_executor(self.executor, build_transformer, model_path, shard, "8B" if "8b" in shard.model_id.lower() else "70B") ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gpetrov/dev/miniconda3/envs/exo/lib/python3.12/concurrent/futures/thread.py", line 58, in run result = self.fn(*self.args, *self.kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gpetrov/ai/exo/exo/inference/tinygrad/inference.py", line 48, in build_transformer load_state_dict(model, weights, strict=False, consume=False) # consume=True ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gpetrov/dev/miniconda3/envs/exo/lib/python3.12/site-packages/tinygrad/nn/state.py", line 129, in load_state_dict else: v.replace(state_dict[k].to(v.device)).realize() ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gpetrov/dev/miniconda3/envs/exo/lib/python3.12/site-packages/tinygrad/tensor.py", line 3500, in _wrapper ret = fn(args, *kwargs) ^^^^^^^^^^^^^^^^^^^ File "/home/gpetrov/dev/miniconda3/envs/exo/lib/python3.12/site-packages/tinygrad/tensor.py", line 213, in realize run_schedule(self.schedule_with_vars(*lst), do_update_stats=do_update_stats) File "/home/gpetrov/dev/miniconda3/envs/exo/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 222, in run_schedule for ei in lower_schedule(schedule): ^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gpetrov/dev/miniconda3/envs/exo/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 215, in lower_schedule raise e File "/home/gpetrov/dev/miniconda3/envs/exo/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 209, in lower_schedule try: yield lower_schedule_item(si) ^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gpetrov/dev/miniconda3/envs/exo/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 193, in lower_schedule_item runner = get_runner(si.outputs[0].device, si.ast) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gpetrov/dev/miniconda3/envs/exo/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 162, in get_runner method_cache[ckey] = method_cache[bkey] = ret = CompiledRunner(replace(prg, dname=dname)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gpetrov/dev/miniconda3/envs/exo/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 84, in init__ self.lib:bytes = precompiled if precompiled is not None else Device[p.dname].compiler.compile_cached(p.src) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gpetrov/dev/miniconda3/envs/exo/lib/python3.12/site-packages/tinygrad/device.py", line 183, in compile_cached lib = self.compile(src) ^^^^^^^^^^^^^^^^^ File "/home/gpetrov/dev/miniconda3/envs/exo/lib/python3.12/site-packages/tinygrad/runtime/ops_clang.py", line 15, in compile subprocess.check_output(['clang', '-shared', self.args, '-O2', '-Wall', '-Werror', '-x', 'c', '-fPIC', '-ffreestanding', '-nostdlib', File "/home/gpetrov/dev/miniconda3/envs/exo/lib/python3.12/subprocess.py", line 466, in check_output return run(popenargs, stdout=PIPE, timeout=timeout, check=True, ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/gpetrov/dev/miniconda3/envs/exo/lib/python3.12/subprocess.py", line 571, in run raise CalledProcessError(retcode, process.args, subprocess.CalledProcessError: Command '['clang', '-shared', '-march=native', '-O2', '-Wall', '-Werror', '-x', 'c', '-fPIC', '-ffreestanding', '-nostdlib', '-', '-o', '/tmp/tmpv8ke8y1y']' returned non-zero exit status 1. Received exit signal SIGTERM... Thank you for using exo.
/ \ \/ / \ | /> < () | \//\___/
Cancelling 4 outstanding tasks ╭─────────────────────────────────────────────── Exo Cluster (1 node) ───────────────────────────────────────────────╮ │ │ │ │ │ / \ \/ / \ │ │ | /> < () | │ │ \//\___/ │ │ │ │ │ │ Web Chat URL (tinychat): http://127.0.0.1:8000 │ │ ChatGPT API endpoint: http://127.0.0.1:8000/v1/chat/completions │ │ GPU poor ▼ GPU rich │ │ [🟥🟥🟥🟥🟥🟥🟥🟥🟧🟧🟧🟧🟧🟧🟧🟨🟨🟨🟨🟨🟨🟨🟨🟩🟩🟩🟩🟩🟩🟩] │ │ 0.00 TFLOPS │ │ ▲ │ │ │ │ │ │ │ │ │ │ │ │ │ │ │ │ │ ╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮ │ 💬️ What is the meaning of exo? │ │ │ │ 🤖 │ │ │ │ │ │ │ │ │ │ │ │ │ │ │ │ │ │ │ │ │ ╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯(exo) gpetrov@ProL9:~/ai/exo$
same specs as person above but on E5-2650 is this a clang error an AVX thing?
"loaded weights in 119.39 ms, 0.03 GB loaded at 0.28 GB/s Traceback (most recent call last): File "/usr/local/lib/python3.12/site-packages/exo/api/chatgpt_api.py", line 275, in handle_post_chat_completions await asyncio.wait_for(asyncio.shield(asyncio.create_task(self.node.process_prompt(shard, prompt, image_str, request_id=request_id))), timeout=self.response_timeout) File "/usr/local/lib/python3.12/asyncio/tasks.py", line 520, in wait_for return await fut ^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/exo/orchestration/standard_node.py", line 123, in process_prompt resp = await self._process_prompt(base_shard, prompt, image_str, request_id, inference_state) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/exo/orchestration/standard_node.py", line 159, in _process_prompt result, inference_state, is_finished = await self.inference_engine.infer_prompt(request_id, shard, prompt, image_str, inference_state=inference_state) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/exo/inference/tinygrad/inference.py", line 59, in infer_prompt await self.ensure_shard(shard) File "/usr/local/lib/python3.12/site-packages/exo/inference/tinygrad/inference.py", line 97, in ensure_shard self.model = await asyncio.get_event_loop().run_in_executor(self.executor, build_transformer, model_path, shard, "8B" if "8b" in shard.model_id.lower() else "70B") ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/concurrent/futures/thread.py", line 58, in run result = self.fn(*self.args, *self.kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/exo/inference/tinygrad/inference.py", line 48, in build_transformer load_state_dict(model, weights, strict=False, consume=False) # consume=True ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/tinygrad/nn/state.py", line 129, in load_state_dict else: v.replace(state_dict[k].to(v.device)).realize() ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/tinygrad/tensor.py", line 3500, in _wrapper ret = fn(args, *kwargs) ^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/tinygrad/tensor.py", line 213, in realize run_schedule(self.schedule_with_vars(lst), do_update_stats=do_update_stats) File "/usr/local/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 222, in run_schedule for ei in lower_schedule(schedule): ^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 215, in lower_schedule raise e File "/usr/local/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 209, in lower_schedule try: yield lower_schedule_item(si) ^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 193, in lower_schedule_item runner = get_runner(si.outputs[0].device, si.ast) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 162, in get_runner method_cache[ckey] = method_cache[bkey] = ret = CompiledRunner(replace(prg, dname=dname)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 84, in init self.lib:bytes = precompiled if precompiled is not None else Device[p.dname].compiler.compile_cached(p.src) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/tinygrad/device.py", line 183, in compile_cached lib = self.compile(src) ^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/tinygrad/runtime/ops_clang.py", line 15, in compile subprocess.check_output(['clang', '-shared', self.args, '-O2', '-Wall', '-Werror', '-x', 'c', '-fPIC', '-ffreestanding', '-nostdlib', File "/usr/local/lib/python3.12/subprocess.py", line 466, in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/subprocess.py", line 571, in run raise CalledProcessError(retcode, process.args, subprocess.CalledProcessError: Command '['clang', '-shared', '-march=native', '-O2', '-Wall', '-Werror', '-x', 'c', '-fPIC', '-ffreestanding', '-nostdlib', '-', '-o', '/tmp/tmpvcptigkc']' returned non-zero exit status 1. Deregister callback_id='chatgpt-api-wait-response-b4cd4819-ca83-4193-a5b0-ccb074b3d033' deregistered_callback=None Received request: GET /v1/download/progress Received request: GET /v1/download/progress update_peers: added=[] removed=[] updated=[] unchanged=[] to_disconnect=[] to_connect=[] did_peers_change=False Received request: GET /v1/download/progress Received request: GET /v1/download/progress update_peers: added=[] removed=[] updated=[] unchanged=[] to_disconnect=[] to_connect=[] did_peers_change=False Received request: GET /v1/download/progress Received request: GET /v1/download/progress update_peers: added=[] removed=[] updated=[] unchanged=[] to_disconnect=[] to_connect=[] did_peers_change=False Received request: GET /v1/download/progress Received request: GET /v1/download/progress update_peers: added=[] removed=[] updated=[] unchanged=[] to_disconnect=[] to_connect=[] did_peers_change=False Received request: GET /v1/download/progress ╭──────────────────────────────────────────────────────────────────────── Exo Cluster (1 node) ────────────────────────────────────────────────────────────────────────╮ Received exit signal SIGTERM... Thank you for using exo.
/ \ \/ / \ | /> < () | \//\___/
Cancelling 8 outstanding tasks Server stopped and all connections are closed Received exit signal SIGTERM... Thank you for using exo.
/ \ \/ / \ | /> < () | \//\___/
Cancelling 0 outstanding tasks Traceback (most recent call last): File "/usr/local/lib/python3.12/site-packages/exo/main.py", line 232, in run loop.run_until_complete(main()) File "/usr/local/lib/python3.12/asyncio/base_events.py", line 687, in run_until_complete return future.result() ^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/exo/main.py", line 225, in main await asyncio.Event().wait() File "/usr/local/lib/python3.12/asyncio/locks.py", line 212, in wait await fut asyncio.exceptions.CancelledError
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/bin/exo", line 8, in
Three is no GPU(Nvidia card) in x86 Linux/Ubuntu, I installed latest exo . It can't work with LLVM backend. With below command. DEBUG=9 LLVM=1 exo run llama-3.1-8b --prompt "hello, who are you?" --inference-engine tinygrad The error log is as below. Could you help check it ? Thanks.
What's more, I can run tinygrad successful on the same host. LLVM=1 python ./examples/gpt2.py
..... xxxxxxxxxxxxxxxxxx
'model.layers.9.self_attn.k_proj.weight': 'model-00002-of-00004.safetensors', 'model.layers.9.self_attn.o_proj.weight': 'model-00002-of-00004.safetensors', 'model.layers.9.self_attn.q_proj.weight': 'model-00002-of-00004.safetensors', 'model.layers.9.self_attn.v_proj.weight': 'model-00002-of-00004.safetensors', 'model.norm.weight': 'model-00004-of-00004.safetensors'} shard=Shard(model_id='mlabonne/Meta-Llama-3.1-8B-Instruct-abliterated', start_layer=0, end_layer=31, n_layers=32) shard_specific_patterns={'model-00003-of-00004.safetensors', 'model-00002-of-00004.safetensors', 'model-00001-of-00004.safetensors', 'model-00004-of-00004.safetensors'} update_peers: added=[] removed=[] updated=[] unchanged=[] to_disconnect=[] to_connect=[] did_peers_change=False update_peers: added=[] removed=[] updated=[] unchanged=[] to_disconnect=[] to_connect=[] did_peers_change=False update_peers: added=[] removed=[] updated=[] unchanged=[] to_disconnect=[] to_connect=[] did_peers_change=False Excluded model param keys for shard=Shard(model_id='mlabonne/Meta-Llama-3.1-8B-Instruct-abliterated', start_layer=0, end_layer=31, n_layers=32): [] 0%| | 0/292 [00:00<?, ?it/s] loaded weights in 48.87 ms, 0.03 GB loaded at 0.69 GB/s Error processing prompt: Traceback (most recent call last): File "/mnt/sdb1/code/github/exo/exo/main.py", line 176, in run_model_cli await node.process_prompt(shard, prompt, None, request_id=request_id) File "/mnt/sdb1/code/github/exo/exo/orchestration/standard_node.py", line 126, in process_prompt resp = await self._process_prompt(base_shard, prompt, image_str, request_id, inference_state) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/mnt/sdb1/code/github/exo/exo/orchestration/standard_node.py", line 162, in _process_prompt result, inference_state, is_finished = await self.inference_engine.infer_prompt(request_id, shard, prompt, image_str, inference_state=inference_state) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/mnt/sdb1/code/github/exo/exo/inference/tinygrad/inference.py", line 59, in infer_prompt await self.ensure_shard(shard) File "/mnt/sdb1/code/github/exo/exo/inference/tinygrad/inference.py", line 97, in ensure_shard self.model = await asyncio.get_event_loop().run_in_executor(self.executor, build_transformer, model_path, shard, "8B" if "8b" in shard.model_id.lower() else "70B") ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3.12/concurrent/futures/thread.py", line 58, in run result = self.fn(*self.args, *self.kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/mnt/sdb1/code/github/exo/exo/inference/tinygrad/inference.py", line 48, in build_transformer load_state_dict(model, weights, strict=False, consume=False) # consume=True ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/tinygrad/nn/state.py", line 129, in load_state_dict else: v.replace(state_dict[k].to(v.device)).realize() ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/tinygrad/tensor.py", line 3500, in _wrapper ret = fn(args, *kwargs) ^^^^^^^^^^^^^^^^^^^ File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/tinygrad/tensor.py", line 213, in realize run_schedule(self.schedule_with_vars(*lst), do_update_stats=do_update_stats) File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 222, in run_schedule for ei in lower_schedule(schedule): ^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 215, in lower_schedule raise e File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 209, in lower_schedule try: yield lower_schedule_item(si) ^^^^^^^^^^^^^^^^^^^^^^^ File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 193, in lower_schedule_item runner = get_runner(si.outputs[0].device, si.ast) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/tinygrad/engine/realize.py", line 158, in get_runner prg: Program = get_kernel(Device[dname].renderer, ast).to_program() ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/tinygrad/codegen/kernel.py", line 727, in to_program src = self.opts.render(name:=to_function_name(ansiname:=(name_override if name_override is not None else self.name)), self.uops) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/tinygrad/renderer/llvmir.py", line 141, in render elif uop in {UOps.CAST, UOps.BITCAST}: lvars[u] = cast(bb, lvars[src[0]], src[0].dtype, dtype, bitcast=uop is UOps.BITCAST) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/tinygrad/renderer/llvmir.py", line 21, in cast val = bb[-1].bitcast(bb[-1].shl(bb[-1].sext(val, ir.IntType(32)), ir.Constant(ir.IntType(32), 16)),val, ir.FloatType()) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/llvmlite/ir/builder.py", line 164, in wrapped instr = cls(self.block, opname, val, typ, name) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/llvmlite/ir/instructions.py", line 420, in init super(CastInstr, self).init(parent, typ, op, [val], name=name) File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/llvmlite/ir/instructions.py", line 14, in init super(Instruction, self).init(parent, typ, name=name) File "/home/xiaoshi/code/myenvs/exo_py3.12/lib/python3.12/site-packages/llvmlite/ir/values.py", line 539, in init assert isinstance(type, types.Type) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ AssertionError Received exit signal SIGTERM... Thank you for using exo.
/ \ \/ / \ | /> < () | \//\___/
Cancelling 4 outstanding tasks Server stopped and all connections are closed