Closed keflavich closed 2 years ago
I'm reading, manipulating, then writing some FITS cubes, and I get the following:
--------------------------------------------------------------------------- TypeError Traceback (most recent call last) /scratch/local/25260084/ipykernel_19787/2012730627.py in <module> 20 assert scube._beam is not None 21 scube.to(u.K) ---> 22 rcube = scube.reproject(target_header) 23 rcube.write(outfile, overwrite=True) /blue/adamginsburg/adamginsburg/repos/spectral-cube/spectral_cube/utils.py in wrapper(self, *args, **kwargs) 47 PossiblySlowWarning 48 ) ---> 49 return function(self, *args, **kwargs) 50 return wrapper 51 /blue/adamginsburg/adamginsburg/repos/spectral-cube/spectral_cube/spectral_cube.py in reproject(self, header, order, use_memmap, filled) 2658 2659 if filled: -> 2660 data = self.unitless_filled_data[:] 2661 else: 2662 data = self._data /blue/adamginsburg/adamginsburg/repos/spectral-cube/spectral_cube/cube_utils.py in __getitem__(self, view) 231 result = self._func(self._other, view) 232 if isinstance(result, da.Array): --> 233 result = result.compute() 234 return result 235 /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/base.py in compute(self, **kwargs) 286 dask.base.compute 287 """ --> 288 (result,) = compute(self, traverse=False, **kwargs) 289 return result 290 /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/base.py in compute(*args, **kwargs) 568 postcomputes.append(x.__dask_postcompute__()) 569 --> 570 results = schedule(dsk, keys, **kwargs) 571 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)]) 572 /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, pool, **kwargs) 77 pool = MultiprocessingPoolExecutor(pool) 78 ---> 79 results = get_async( 80 pool.submit, 81 pool._max_workers, /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/local.py in get_async(submit, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, chunksize, **kwargs) 505 _execute_task(task, data) # Re-execute locally 506 else: --> 507 raise_exception(exc, tb) 508 res, worker_id = loads(res_info) 509 state["cache"][key] = res /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/local.py in reraise(exc, tb) 313 if exc.__traceback__ is not tb: 314 raise exc.with_traceback(tb) --> 315 raise exc 316 317 /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception) 218 try: 219 task, data = loads(task_info) --> 220 result = _execute_task(task, data) 221 id = get_id() 222 result = dumps((result, id)) /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/core.py in _execute_task(arg, cache, dsk) 117 # temporaries by their reference count and can execute certain 118 # operations in-place. --> 119 return func(*(_execute_task(a, cache) for a in args)) 120 elif not ishashable(arg): 121 return arg /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/array/core.py in getter(a, b, asarray, lock) 100 lock.acquire() 101 try: --> 102 c = a[b] 103 # Below we special-case `np.matrix` to force a conversion to 104 # `np.ndarray` and preserve original Dask behavior for `getter`, /blue/adamginsburg/adamginsburg/repos/spectral-cube/spectral_cube/dask_spectral_cube.py in __getitem__(self, view) 197 return 0. 198 else: --> 199 return self._mask._filled(data=self._data, 200 view=view, 201 wcs=self._wcs, /blue/adamginsburg/adamginsburg/repos/spectral-cube/spectral_cube/masks.py in _filled(self, data, wcs, fill, view, use_memmap, **kwargs) 238 ex = self.exclude(data=data, wcs=wcs, view=view, **kwargs) 239 --> 240 return np.ma.masked_array(sliced_data, mask=ex).filled(fill) 241 242 def __and__(self, other): ~/.local/lib/python3.9/site-packages/numpy/ma/core.py in __new__(cls, data, mask, dtype, copy, subok, ndmin, fill_value, keep_mask, hard_mask, shrink, order) 2827 """ 2828 # Process data. -> 2829 _data = np.array(data, dtype=dtype, copy=copy, 2830 order=order, subok=True, ndmin=ndmin) 2831 _baseclass = getattr(data, '_baseclass', type(_data)) /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/array/core.py in __array__(self, dtype, **kwargs) 1532 1533 def __array__(self, dtype=None, **kwargs): -> 1534 x = self.compute() 1535 if dtype and x.dtype != dtype: 1536 x = x.astype(dtype) /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/base.py in compute(self, **kwargs) 286 dask.base.compute 287 """ --> 288 (result,) = compute(self, traverse=False, **kwargs) 289 return result 290 /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/base.py in compute(*args, **kwargs) 568 postcomputes.append(x.__dask_postcompute__()) 569 --> 570 results = schedule(dsk, keys, **kwargs) 571 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)]) 572 /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, pool, **kwargs) 77 pool = MultiprocessingPoolExecutor(pool) 78 ---> 79 results = get_async( 80 pool.submit, 81 pool._max_workers, /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/local.py in get_async(submit, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, chunksize, **kwargs) 505 _execute_task(task, data) # Re-execute locally 506 else: --> 507 raise_exception(exc, tb) 508 res, worker_id = loads(res_info) 509 state["cache"][key] = res /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/local.py in reraise(exc, tb) 313 if exc.__traceback__ is not tb: 314 raise exc.with_traceback(tb) --> 315 raise exc 316 317 /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception) 218 try: 219 task, data = loads(task_info) --> 220 result = _execute_task(task, data) 221 id = get_id() 222 result = dumps((result, id)) /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/core.py in _execute_task(arg, cache, dsk) 117 # temporaries by their reference count and can execute certain 118 # operations in-place. --> 119 return func(*(_execute_task(a, cache) for a in args)) 120 elif not ishashable(arg): 121 return arg /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/optimization.py in __call__(self, *args) 967 if not len(args) == len(self.inkeys): 968 raise ValueError("Expected %d args, got %d" % (len(self.inkeys), len(args))) --> 969 return core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args))) 970 971 def __reduce__(self): /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/core.py in get(dsk, out, cache) 147 for key in toposort(dsk): 148 task = dsk[key] --> 149 result = _execute_task(task, cache) 150 cache[key] = result 151 result = _execute_task(out, cache) /orange/adamginsburg/miniconda3/envs/python39/lib/python3.9/site-packages/dask/core.py in _execute_task(arg, cache, dsk) 117 # temporaries by their reference count and can execute certain 118 # operations in-place. --> 119 return func(*(_execute_task(a, cache) for a in args)) 120 elif not ishashable(arg): 121 return arg /blue/adamginsburg/adamginsburg/repos/spectral-cube/spectral_cube/dask_spectral_cube.py in convfunc(img, beam, **kwargs) 1546 1547 if needs_beam_ratio: -> 1548 out[index] *= beam_ratio_factors[index] 1549 1550 return out TypeError: ufunc 'multiply' output (typecode 'O') could not be coerced to provided output parameter (typecode 'f') according to the casting rule ''same_kind''
I'm reading, manipulating, then writing some FITS cubes, and I get the following: