Closed keflavich closed 2 years ago
This went away when I stopped using the nthreads=32
setup. I only had 8 cores available so maybe that's related?
Can you try commenting out the try...except TypeError as I think this is hiding the real error?
I'm sure you're right. Tough to do right now; I'm running stuff remotely while traveling.
I was also going to try save_to_tmpdir
in the last step before writing to see if that helped.
(also, `num_workers=8 caused same problem)
save_to_tmpdir
does seem to suppress the error
New set of data, similar but different error:
WARNING: StokesWarning: Cube is a Stokes cube, returning spectral cube for I component [spectral_cube.io.core]
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
/scratch/local/20668739/ipykernel_6333/3761283239.py in <module>
7 with cube.use_dask_scheduler('threads', num_workers=8):
8 cb = cube.beams.common_beam(max_iter=20, max_epsilon=0.01)
----> 9 scube = cube.convolve_to(cb, save_to_tmp_dir=True)
10 scube.write(outfile)
/blue/adamginsburg/adamginsburg/repos/spectral-cube/spectral_cube/dask_spectral_cube.py in wrapper(self, *args, **kwargs)
83 filename = tempfile.mktemp()
84 with dask.config.set(**cube._scheduler_kwargs):
---> 85 cube._data.to_zarr(filename)
86 cube._data = da.from_zarr(filename)
87 return cube
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/array/core.py in to_zarr(self, *args, **kwargs)
2675 See function :func:`dask.array.to_zarr` for parameters.
2676 """
-> 2677 return to_zarr(self, *args, **kwargs)
2678
2679 def to_tiledb(self, uri, *args, **kwargs):
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/array/core.py in to_zarr(arr, url, component, storage_options, overwrite, compute, return_stored, **kwargs)
3418 **kwargs,
3419 )
-> 3420 return arr.store(z, lock=False, compute=compute, return_stored=return_stored)
3421
3422
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/array/core.py in store(self, target, **kwargs)
1595 @wraps(store)
1596 def store(self, target, **kwargs):
-> 1597 r = store([self], [target], **kwargs)
1598
1599 if kwargs.get("return_stored", False):
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/array/core.py in store(sources, targets, lock, regions, compute, return_stored, **kwargs)
1074 elif compute:
1075 store_dsk = HighLevelGraph(layers, dependencies)
-> 1076 compute_as_if_collection(Array, store_dsk, map_keys, **kwargs)
1077 return None
1078
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/base.py in compute_as_if_collection(cls, dsk, keys, scheduler, get, **kwargs)
313 schedule = get_scheduler(scheduler=scheduler, cls=cls, get=get)
314 dsk2 = optimization_function(cls)(dsk, keys, **kwargs)
--> 315 return schedule(dsk2, keys, **kwargs)
316
317
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, pool, **kwargs)
77 pool = MultiprocessingPoolExecutor(pool)
78
---> 79 results = get_async(
80 pool.submit,
81 pool._max_workers,
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/local.py in get_async(submit, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, chunksize, **kwargs)
505 _execute_task(task, data) # Re-execute locally
506 else:
--> 507 raise_exception(exc, tb)
508 res, worker_id = loads(res_info)
509 state["cache"][key] = res
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/local.py in reraise(exc, tb)
313 if exc.__traceback__ is not tb:
314 raise exc.with_traceback(tb)
--> 315 raise exc
316
317
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
218 try:
219 task, data = loads(task_info)
--> 220 result = _execute_task(task, data)
221 id = get_id()
222 result = dumps((result, id))
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/core.py in _execute_task(arg, cache, dsk)
117 # temporaries by their reference count and can execute certain
118 # operations in-place.
--> 119 return func(*(_execute_task(a, cache) for a in args))
120 elif not ishashable(arg):
121 return arg
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/core.py in <genexpr>(.0)
117 # temporaries by their reference count and can execute certain
118 # operations in-place.
--> 119 return func(*(_execute_task(a, cache) for a in args))
120 elif not ishashable(arg):
121 return arg
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/core.py in _execute_task(arg, cache, dsk)
117 # temporaries by their reference count and can execute certain
118 # operations in-place.
--> 119 return func(*(_execute_task(a, cache) for a in args))
120 elif not ishashable(arg):
121 return arg
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/optimization.py in __call__(self, *args)
967 if not len(args) == len(self.inkeys):
968 raise ValueError("Expected %d args, got %d" % (len(self.inkeys), len(args)))
--> 969 return core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args)))
970
971 def __reduce__(self):
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/core.py in get(dsk, out, cache)
147 for key in toposort(dsk):
148 task = dsk[key]
--> 149 result = _execute_task(task, cache)
150 cache[key] = result
151 result = _execute_task(out, cache)
/orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/core.py in _execute_task(arg, cache, dsk)
117 # temporaries by their reference count and can execute certain
118 # operations in-place.
--> 119 return func(*(_execute_task(a, cache) for a in args))
120 elif not ishashable(arg):
121 return arg
/blue/adamginsburg/adamginsburg/repos/spectral-cube/spectral_cube/dask_spectral_cube.py in convfunc(img, beam, **kwargs)
1546
1547 if needs_beam_ratio:
-> 1548 out[index] *= beam_ratio_factors[index]
1549
1550 return out
TypeError: ufunc 'multiply' output (typecode 'O') could not be coerced to provided output parameter (typecode 'f') according to the casting rule ''same_kind''
Is this maybe some sort of endianness issue?
@keflavich -- is this related to #803 and solved in #804?
probably yes!
code: