Closed inklesspen1rus closed 2 months ago
I'm testing on nightly version of rust
rustup 1.27.1 (54dd3d00f 2024-04-24)
info: This is the version for the rustup toolchain manager, not the rustc compiler.
info: The currently active rustc
version is rustc 1.80.0-nightly (ada5e2c7b 2024-05-31)
Also, stable version works fine o_0
rustup 1.27.1 (54dd3d00f 2024-04-24)
info: This is the version for the rustup toolchain manager, not the rustc compiler.
info: The currently active rustc
version is rustc 1.78.0 (9b00956e5 2024-04-29)
Hmm. It seems that Inner::buffer has become null for some reason. But I cannot find any code that seems to make it null. And I cannot reproduce this on other 32-bit targets so I feel it may be a platform issue.
I'm getting a different issue also in crossbeam-deque
targeting wasm32-wasip1-threads
, I'm not if the problem is even in crossbeam because I'm always getting memory allocation of 133120 bytes failed
before the actual panic, but it's reproducible on wasmer
, wastime
and iwasm
. It's trigger by creating a nucleo Nucleo
instance:
use nucleo::{Config, Nucleo};
fn main() {
let mut nucleo: Nucleo<String> = Nucleo::new(Config::DEFAULT, Arc::new(|| {}), Some(1), 1);
}
cargo b --target=wasm32-wasip1-threads
(printing is a bit racey and changes a bit each time, memory allocation of 133120 bytes failed
prints as thread \
❯ wasmer run ./target/wasm32-wasip1-threads/debug/f.wasm
memory allocation of 133120thread ' bytes failed
<unnamed>' panicked at /home/bbb651/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossbeam-deque-0.8.5/src/deque.rs:64:33error: RuntimeError: unreachable
at abort (f-6c7c88893f5c911f.wasm[1887]:0x6d277)
at std::sys::pal::wasi::helpers::abort_internal::hd621692127b9daa9 (f-6c7c88893f5c911f.wasm[1782]:0x66057)
at std::process::abort::hd54ee46f43c5cd26 (f-6c7c88893f5c911f.wasm[1816]:0x68067)
at std::alloc::rust_oom::hd4bcd7215041ce28 (f-6c7c88893f5c911f.wasm[1855]:0x69c61)
at __rg_oom (f-6c7c88893f5c911f.wasm[1856]:0x69c6f)
at __rust_alloc_error_handler (f-6c7c88893f5c911f.wasm[265]:0xea62)
at alloc::alloc::handle_alloc_error::haced9e87ced732f5 (f-6c7c88893f5c911f.wasm[1939]:0x6eea5)
at nucleo_matcher::matrix::MatrixSlab::new::h615350e8830d8a59 (f-6c7c88893f5c911f.wasm[307]:0x115eb)
at nucleo_matcher::Matcher::new::he6b151bfd4749d34 (f-6c7c88893f5c911f.wasm[309]:0x11668)
at nucleo::worker::Worker<T>::new::{{closure}}::h715eac0a4a8550cf (f-6c7c88893f5c911f.wasm[54]:0x5280)
at core::iter::adapters::map::map_fold::{{closure}}::h4af07fdd8bd953d5 (f-6c7c88893f5c911f.wasm[197]:0xb193)
at core::iter::traits::iterator::Iterator::fold::he753264b94b8d07b (f-6c7c88893f5c911f.wasm[79]:0x6844)
at <core::iter::adapters::map::Map<I,F> as core::iter::traits::iterator::Iterator>::fold::he8d93fa8241deb72 (f-6c7c88893f5c911f.wasm[190]:0xaf0b)
at core::iter::traits::iterator::Iterator::for_each::h924e7efb51717c5f (f-6c7c88893f5c911f.wasm[196]:0xb117)
at alloc::vec::Vec<T,A>::extend_trusted::hea060507b3d2e463 (f-6c7c88893f5c911f.wasm[255]:0xe5e4)
at <alloc::vec::Vec<T,A> as alloc::vec::spec_extend::SpecExtend<T,I>>::spec_extend::h3d99dbe441301a5f (f-6c7c88893f5c911f.wasm[250]:0xe221)
at <alloc::vec::Vec<T> as alloc::vec::spec_from_iter_nested::SpecFromIterNested<T,I>>::from_iter::h4db9eb1b740aa1fe (f-6c7c88893f5c911f.wasm[249]:0xe105)
at <alloc::vec::Vec<T> as alloc::vec::spec_from_iter::SpecFromIter<T,I>>::from_iter::ha93efd3577cb99b8 (f-6c7c88893f5c911f.wasm[260]:0xea04)
at <alloc::vec::Vec<T> as core::iter::traits::collect::FromIterator<T>>::from_iter::hd8b254c338f6c905 (f-6c7c88893f5c911f.wasm[259]:0xe9e3)
at core::iter::traits::iterator::Iterator::collect::hd58476b4627c65f0 (f-6c7c88893f5c911f.wasm[195]:0xb0b6)
at <alloc::boxed::Box<[I]> as core::iter::traits::collect::FromIterator<I>>::from_iter::hd60a35feaf71d91b (f-6c7c88893f5c911f.wasm[92]:0x6fbd)
at core::iter::traits::iterator::Iterator::collect::h9e8c5bb987f62e14 (f-6c7c88893f5c911f.wasm[194]:0xb079)
at nucleo::worker::Worker<T>::new::h5efa456836f88faa (f-6c7c88893f5c911f.wasm[51]:0x4d7e)
at nucleo::Nucleo<T>::new::h63144caa3f818fc9 (f-6c7c88893f5c911f.wasm[217]:0xbf5e)
at f::main::h3a1622b536ea17d9 (f-6c7c88893f5c911f.wasm[95]:0x72a4)
at core::ops::function::FnOnce::call_once::hdd5b7db5cebb71b8 (f-6c7c88893f5c911f.wasm[106]:0x7634)
at std::sys_common::backtrace::__rust_begin_short_backtrace::hcaa5b5e9f10c5d27 (f-6c7c88893f5c911f.wasm[218]:0xc361)
at std::rt::lang_start::{{closure}}::h93b5b4222cdcb8ee (f-6c7c88893f5c911f.wasm[221]:0xc526)
at std::rt::lang_start_internal::h90b030aff06a1679 (f-6c7c88893f5c911f.wasm[1780]:0x65f1d)
at std::rt::lang_start::haf5e3f2b9cb52289 (f-6c7c88893f5c911f.wasm[220]:0xc4c2)
at __main_void (f-6c7c88893f5c911f.wasm[96]:0x72f3)
at _start (f-6c7c88893f5c911f.wasm[10]:0xc60)
╰─▶ 1: RuntimeError: unreachable
at abort (f-6c7c88893f5c911f.wasm[1887]:0x6d277)
at std::sys::pal::wasi::helpers::abort_internal::hd621692127b9daa9 (f-6c7c88893f5c911f.wasm[1782]:0x66057)
at std::process::abort::hd54ee46f43c5cd26 (f-6c7c88893f5c911f.wasm[1816]:0x68067)
at std::alloc::rust_oom::hd4bcd7215041ce28 (f-6c7c88893f5c911f.wasm[1855]:0x69c61)
at __rg_oom (f-6c7c88893f5c911f.wasm[1856]:0x69c6f)
at __rust_alloc_error_handler (f-6c7c88893f5c911f.wasm[265]:0xea62)
at alloc::alloc::handle_alloc_error::haced9e87ced732f5 (f-6c7c88893f5c911f.wasm[1939]:0x6eea5)
at nucleo_matcher::matrix::MatrixSlab::new::h615350e8830d8a59 (f-6c7c88893f5c911f.wasm[307]:0x115eb)
at nucleo_matcher::Matcher::new::he6b151bfd4749d34 (f-6c7c88893f5c911f.wasm[309]:0x11668)
at nucleo::worker::Worker<T>::new::{{closure}}::h715eac0a4a8550cf (f-6c7c88893f5c911f.wasm[54]:0x5280)
at core::iter::adapters::map::map_fold::{{closure}}::h4af07fdd8bd953d5 (f-6c7c88893f5c911f.wasm[197]:0xb193)
at core::iter::traits::iterator::Iterator::fold::he753264b94b8d07b (f-6c7c88893f5c911f.wasm[79]:0x6844)
at <core::iter::adapters::map::Map<I,F> as core::iter::traits::iterator::Iterator>::fold::he8d93fa8241deb72 (f-6c7c88893f5c911f.wasm[190]:0xaf0b)
at core::iter::traits::iterator::Iterator::for_each::h924e7efb51717c5f (f-6c7c88893f5c911f.wasm[196]:0xb117)
at alloc::vec::Vec<T,A>::extend_trusted::hea060507b3d2e463 (f-6c7c88893f5c911f.wasm[255]:0xe5e4)
at <alloc::vec::Vec<T,A> as alloc::vec::spec_extend::SpecExtend<T,I>>::spec_extend::h3d99dbe441301a5f (f-6c7c88893f5c911f.wasm[250]:0xe221)
at <alloc::vec::Vec<T> as alloc::vec::spec_from_iter_nested::SpecFromIterNested<T,I>>::from_iter::h4db9eb1b740aa1fe (f-6c7c88893f5c911f.wasm[249]:0xe105)
at <alloc::vec::Vec<T> as alloc::vec::spec_from_iter::SpecFromIter<T,I>>::from_iter::ha93efd3577cb99b8 (f-6c7c88893f5c911f.wasm[260]:0xea04)
at <alloc::vec::Vec<T> as core::iter::traits::collect::FromIterator<T>>::from_iter::hd8b254c338f6c905 (f-6c7c88893f5c911f.wasm[259]:0xe9e3)
at core::iter::traits::iterator::Iterator::collect::hd58476b4627c65f0 (f-6c7c88893f5c911f.wasm[195]:0xb0b6)
at <alloc::boxed::Box<[I]> as core::iter::traits::collect::FromIterator<I>>::from_iter::hd60a35feaf71d91b (f-6c7c88893f5c911f.wasm[92]:0x6fbd)
at core::iter::traits::iterator::Iterator::collect::h9e8c5bb987f62e14 (f-6c7c88893f5c911f.wasm[194]:0xb079)
at nucleo::worker::Worker<T>::new::h5efa456836f88faa (f-6c7c88893f5c911f.wasm[51]:0x4d7e)
at nucleo::Nucleo<T>::new::h63144caa3f818fc9 (f-6c7c88893f5c911f.wasm[217]:0xbf5e)
at f::main::h3a1622b536ea17d9 (f-6c7c88893f5c911f.wasm[95]:0x72a4)
at core::ops::function::FnOnce::call_once::hdd5b7db5cebb71b8 (f-6c7c88893f5c911f.wasm[106]:0x7634)
at std::sys_common::backtrace::__rust_begin_short_backtrace::hcaa5b5e9f10c5d27 (f-6c7c88893f5c911f.wasm[218]:0xc361)
at std::rt::lang_start::{{closure}}::h93b5b4222cdcb8ee (f-6c7c88893f5c911f.wasm[221]:0xc526)
at std::rt::lang_start_internal::h90b030aff06a1679 (f-6c7c88893f5c911f.wasm[1780]:0x65f1d)
at std::rt::lang_start::haf5e3f2b9cb52289 (f-6c7c88893f5c911f.wasm[220]:0xc4c2)
at __main_void (f-6c7c88893f5c911f.wasm[96]:0x72f3)
at _start (f-6c7c88893f5c911f.wasm[10]:0xc60)
As I'm writing this, I tired to simplify the example and slightly changed the code from Some(1)
to None
, causing nucleo to change the number of thread in the threadpool:
// ...
let worker_threads = worker_threads
.unwrap_or_else(|| std::thread::available_parallelism().map_or(4, |it| it.get()));
let pool = rayon::ThreadPoolBuilder::new()
.thread_name(|i| format!("nucleo worker {i}"))
.num_threads(worker_threads)
.build()
.expect("creating threadpool failed");
// ...
and managed to reproduce this exact panic:
❯ wasmer run ./target/wasm32-wasip1-threads/debug/f.wasm
thread '<unnamed>' panicked at library/core/src/panicking.rs:156:5:
unsafe precondition(s) violated: slice::from_raw_parts requires the pointer to be aligned and non-null, and the total size of the slice not to exceed `isize::MAX`
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
thread caused non-unwinding panic. aborting.
error: RuntimeError: out of bounds memory access
at dlmalloc (f-6c7c88893f5c911f.wasm[1869]:0x6a2f7)
at realloc (f-6c7c88893f5c911f.wasm[1873]:0x6c812)
at __rdl_realloc (f-6c7c88893f5c911f.wasm[1833]:0x68bdd)
at __rust_realloc (f-6c7c88893f5c911f.wasm[263]:0xea31)
at alloc::raw_vec::finish_grow::h5db88e7aa4d3da4d (f-6c7c88893f5c911f.wasm[1938]:0x6ee2a)
at alloc::ffi::c_str::CString::_from_vec_unchecked::h7c2114d19a138e92 (f-6c7c88893f5c911f.wasm[1943]:0x6f232)
at <T as alloc::ffi::c_str::CString::new::SpecNewImpl>::spec_new_impl::h8d6cd8bb46dd2544 (f-6c7c88893f5c911f.wasm[641]:0x2abf3)
at std::thread::Builder::spawn_unchecked_::hbd37aeff6ecfafeb (f-6c7c88893f5c911f.wasm[805]:0x32227)
at std::thread::Builder::spawn_unchecked::hed05dc885d9588c1 (f-6c7c88893f5c911f.wasm[804]:0x31d4b)
at std::thread::Builder::spawn::hd1fdc04ae3b662f0 (f-6c7c88893f5c911f.wasm[814]:0x342fc)
at <rayon_core::registry::DefaultSpawn as rayon_core::registry::ThreadSpawn>::spawn::ha25b26c66933259e (f-6c7c88893f5c911f.wasm[573]:0x259c7)
at rayon_core::registry::Registry::new::h36b778a7deba5b0c (f-6c7c88893f5c911f.wasm[575]:0x26b7f)
at rayon_core::thread_pool::ThreadPool::build::h4c26f42651824f8c (f-6c7c88893f5c911f.wasm[491]:0x1c5ad)
at rayon_core::ThreadPoolBuilder<S>::build::h748d8853ee8513f4 (f-6c7c88893f5c911f.wasm[754]:0x2fc72)
at nucleo::worker::Worker<T>::new::h5efa456836f88faa (f-6c7c88893f5c911f.wasm[51]:0x4cd0)
at nucleo::Nucleo<T>::new::h63144caa3f818fc9 (f-6c7c88893f5c911f.wasm[217]:0xbf53)
at f::main::h3a1622b536ea17d9 (f-6c7c88893f5c911f.wasm[95]:0x7299)
at core::ops::function::FnOnce::call_once::hdd5b7db5cebb71b8 (f-6c7c88893f5c911f.wasm[106]:0x7629)
at std::sys_common::backtrace::__rust_begin_short_backtrace::hcaa5b5e9f10c5d27 (f-6c7c88893f5c911f.wasm[218]:0xc356)
at std::rt::lang_start::{{closure}}::h93b5b4222cdcb8ee (f-6c7c88893f5c911f.wasm[221]:0xc51b)
at std::rt::lang_start_internal::h90b030aff06a1679 (f-6c7c88893f5c911f.wasm[1780]:0x65f12)
at std::rt::lang_start::haf5e3f2b9cb52289 (f-6c7c88893f5c911f.wasm[220]:0xc4b7)
at __main_void (f-6c7c88893f5c911f.wasm[96]:0x72e8)
at _start (f-6c7c88893f5c911f.wasm[10]:0xc60)
╰─▶ 1: RuntimeError: out of bounds memory access
at dlmalloc (f-6c7c88893f5c911f.wasm[1869]:0x6a2f7)
at realloc (f-6c7c88893f5c911f.wasm[1873]:0x6c812)
at __rdl_realloc (f-6c7c88893f5c911f.wasm[1833]:0x68bdd)
at __rust_realloc (f-6c7c88893f5c911f.wasm[263]:0xea31)
at alloc::raw_vec::finish_grow::h5db88e7aa4d3da4d (f-6c7c88893f5c911f.wasm[1938]:0x6ee2a)
at alloc::ffi::c_str::CString::_from_vec_unchecked::h7c2114d19a138e92 (f-6c7c88893f5c911f.wasm[1943]:0x6f232)
at <T as alloc::ffi::c_str::CString::new::SpecNewImpl>::spec_new_impl::h8d6cd8bb46dd2544 (f-6c7c88893f5c911f.wasm[641]:0x2abf3)
at std::thread::Builder::spawn_unchecked_::hbd37aeff6ecfafeb (f-6c7c88893f5c911f.wasm[805]:0x32227)
at std::thread::Builder::spawn_unchecked::hed05dc885d9588c1 (f-6c7c88893f5c911f.wasm[804]:0x31d4b)
at std::thread::Builder::spawn::hd1fdc04ae3b662f0 (f-6c7c88893f5c911f.wasm[814]:0x342fc)
at <rayon_core::registry::DefaultSpawn as rayon_core::registry::ThreadSpawn>::spawn::ha25b26c66933259e (f-6c7c88893f5c911f.wasm[573]:0x259c7)
at rayon_core::registry::Registry::new::h36b778a7deba5b0c (f-6c7c88893f5c911f.wasm[575]:0x26b7f)
at rayon_core::thread_pool::ThreadPool::build::h4c26f42651824f8c (f-6c7c88893f5c911f.wasm[491]:0x1c5ad)
at rayon_core::ThreadPoolBuilder<S>::build::h748d8853ee8513f4 (f-6c7c88893f5c911f.wasm[754]:0x2fc72)
at nucleo::worker::Worker<T>::new::h5efa456836f88faa (f-6c7c88893f5c911f.wasm[51]:0x4cd0)
at nucleo::Nucleo<T>::new::h63144caa3f818fc9 (f-6c7c88893f5c911f.wasm[217]:0xbf53)
at f::main::h3a1622b536ea17d9 (f-6c7c88893f5c911f.wasm[95]:0x7299)
at core::ops::function::FnOnce::call_once::hdd5b7db5cebb71b8 (f-6c7c88893f5c911f.wasm[106]:0x7629)
at std::sys_common::backtrace::__rust_begin_short_backtrace::hcaa5b5e9f10c5d27 (f-6c7c88893f5c911f.wasm[218]:0xc356)
at std::rt::lang_start::{{closure}}::h93b5b4222cdcb8ee (f-6c7c88893f5c911f.wasm[221]:0xc51b)
at std::rt::lang_start_internal::h90b030aff06a1679 (f-6c7c88893f5c911f.wasm[1780]:0x65f12)
at std::rt::lang_start::haf5e3f2b9cb52289 (f-6c7c88893f5c911f.wasm[220]:0xc4b7)
at __main_void (f-6c7c88893f5c911f.wasm[96]:0x72e8)
at _start (f-6c7c88893f5c911f.wasm[10]:0xc60)
Closing in favor of upstream bug report: https://github.com/rust-lang/rust/issues/126747
Hi! You are AWESOME :heart: ! But... Tried to run following code with wasmer and wasmtime on wasm32-wasip1-threads target and got panic
Interesting thing: release profile don't panic
Code
Results
Environment
wasmer 4.3.1 wasmtime-cli 21.0.1 (cedf9aa0f 2024-05-22) inxi -vvva: CPU: 6-core AMD Ryzen 5 5625U with Radeon Graphics (-MT MCP-) speed/min/max: 2039/400/4388 MHz Kernel: 6.9.0-1-MANJARO x86_64 Up: 3d 2h 23m Mem: 9.93/15.01 GiB (66.2%) Storage: 476.94 GiB (24.8% used) Procs: 650 Shell: Zsh 5.9 inxi: 3.3.34