gbencke / pyAstroTrader

Machine Learning + Financial Markets + Astrology = A Match made in Heaven
111 stars 28 forks source link

Getting error in Predict.price.change.ipynb #6

Open mumernasim opened 4 years ago

mumernasim commented 4 years ago

Hi.. I am getting error in Predict.price.change.ipynb at following line #9 as below:

StockPrices[column_name] = StockPricesDask.map_partitions(lambda df : df.apply(lambda x : int(get_degree_for_planet(x, current_planet) / 3), axis =1), meta='int').compute(scheduler='processes')

Error:

KeyError Traceback (most recent call last)

in 5 column_name="ASTRO_{}_POSITION".format(PLANETS[current_planet]).upper() 6 StockPricesDask = dd.from_pandas(StockPrices, npartitions=NPARTITIONS) ----> 7 StockPrices[column_name] = StockPricesDask.map_partitions(lambda df : df.apply(lambda x : int(get_degree_for_planet(x, current_planet) / 3), axis =1), meta='int').compute(scheduler='processes') 8 StockPrices[column_name] = pd.to_numeric(StockPrices[column_name], downcast='float', errors='coerce') 9 astro_columns.append(column_name) ~/anaconda3/lib/python3.7/site-packages/dask/base.py in compute(self, **kwargs) 164 dask.base.compute 165 """ --> 166 (result,) = compute(self, traverse=False, **kwargs) 167 return result 168 ~/anaconda3/lib/python3.7/site-packages/dask/base.py in compute(*args, **kwargs) 442 postcomputes.append(x.__dask_postcompute__()) 443 --> 444 results = schedule(dsk, keys, **kwargs) 445 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)]) 446 ~/anaconda3/lib/python3.7/site-packages/dask/multiprocessing.py in get(dsk, keys, num_workers, func_loads, func_dumps, optimize_graph, pool, **kwargs) 216 pack_exception=pack_exception, 217 raise_exception=reraise, --> 218 **kwargs 219 ) 220 finally: ~/anaconda3/lib/python3.7/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs) 484 _execute_task(task, data) # Re-execute locally 485 else: --> 486 raise_exception(exc, tb) 487 res, worker_id = loads(res_info) 488 state["cache"][key] = res ~/anaconda3/lib/python3.7/site-packages/dask/local.py in reraise(exc, tb) 314 if exc.__traceback__ is not tb: 315 raise exc.with_traceback(tb) --> 316 raise exc 317 318 ~/anaconda3/lib/python3.7/site-packages/dask/local.py in execute_task() 220 try: 221 task, data = loads(task_info) --> 222 result = _execute_task(task, data) 223 id = get_id() 224 result = dumps((result, id)) ~/anaconda3/lib/python3.7/site-packages/dask/core.py in _execute_task() 119 # temporaries by their reference count and can execute certain 120 # operations in-place. --> 121 return func(*(_execute_task(a, cache) for a in args)) 122 elif not ishashable(arg): 123 return arg ~/anaconda3/lib/python3.7/site-packages/dask/optimization.py in __call__() 986 if not len(args) == len(self.inkeys): 987 raise ValueError("Expected %d args, got %d" % (len(self.inkeys), len(args))) --> 988 return core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args))) 989 990 def __reduce__(self): ~/anaconda3/lib/python3.7/site-packages/dask/core.py in get() 149 for key in toposort(dsk): 150 task = dsk[key] --> 151 result = _execute_task(task, cache) 152 cache[key] = result 153 result = _execute_task(out, cache) ~/anaconda3/lib/python3.7/site-packages/dask/core.py in _execute_task() 119 # temporaries by their reference count and can execute certain 120 # operations in-place. --> 121 return func(*(_execute_task(a, cache) for a in args)) 122 elif not ishashable(arg): 123 return arg ~/anaconda3/lib/python3.7/site-packages/dask/utils.py in apply() 28 def apply(func, args, kwargs=None): 29 if kwargs: ---> 30 return func(*args, **kwargs) 31 else: 32 return func(*args) ~/anaconda3/lib/python3.7/site-packages/dask/dataframe/core.py in apply_and_enforce() 5129 func = kwargs.pop("_func") 5130 meta = kwargs.pop("_meta") -> 5131 df = func(*args, **kwargs) 5132 if is_dataframe_like(df) or is_series_like(df) or is_index_like(df): 5133 if not len(df): in () 5 column_name="ASTRO_{}_POSITION".format(PLANETS[current_planet]).upper() 6 StockPricesDask = dd.from_pandas(StockPrices, npartitions=NPARTITIONS) ----> 7 StockPrices[column_name] = StockPricesDask.map_partitions(lambda df : df.apply(lambda x : int(get_degree_for_planet(x, current_planet) / 3), axis =1), meta='int').compute(scheduler='processes') 8 StockPrices[column_name] = pd.to_numeric(StockPrices[column_name], downcast='float', errors='coerce') 9 astro_columns.append(column_name) ~/anaconda3/lib/python3.7/site-packages/pandas/core/frame.py in apply() 6876 kwds=kwds, 6877 ) -> 6878 return op.get_result() 6879 6880 def applymap(self, func) -> "DataFrame": ~/anaconda3/lib/python3.7/site-packages/pandas/core/apply.py in get_result() 184 return self.apply_raw() 185 --> 186 return self.apply_standard() 187 188 def apply_empty_result(self): ~/anaconda3/lib/python3.7/site-packages/pandas/core/apply.py in apply_standard() 294 try: 295 result = libreduction.compute_reduction( --> 296 values, self.f, axis=self.axis, dummy=dummy, labels=labels 297 ) 298 except ValueError as err: pandas/_libs/reduction.pyx in pandas._libs.reduction.compute_reduction() pandas/_libs/reduction.pyx in pandas._libs.reduction.Reducer.get_result() in () 5 column_name="ASTRO_{}_POSITION".format(PLANETS[current_planet]).upper() 6 StockPricesDask = dd.from_pandas(StockPrices, npartitions=NPARTITIONS) ----> 7 StockPrices[column_name] = StockPricesDask.map_partitions(lambda df : df.apply(lambda x : int(get_degree_for_planet(x, current_planet) / 3), axis =1), meta='int').compute(scheduler='processes') 8 StockPrices[column_name] = pd.to_numeric(StockPrices[column_name], downcast='float', errors='coerce') 9 astro_columns.append(column_name) ~/Downloads/pyAstroTrader/notebooks/helpers.py in get_degree_for_planet() 195 196 def get_degree_for_planet(row, planet): --> 197 c_chart = charts[row['CorrectedDate']] 198 return get_degree(c_chart, planet) 199 KeyError: '2020-05-19'
atakhadivi commented 2 years ago

I have the same error