openego / eTraGo

Optimization of flexibility options for transmission grids based on PyPSA
GNU Affero General Public License v3.0
30 stars 13 forks source link

TypeError: can't multiply sequence by non-int of type 'float' if Clustering is False #636

Open IsGut opened 1 year ago

IsGut commented 1 year ago

SETTING CLUTSERING TO FALSE LEADING TO:
"network_clustering": {... "active": False, # choose if clustering is activated ....}

TypeError Traceback (most recent call last) Input In [6], in <cell line: 1>() 1 if name == "main": 2 # execute etrago function 3 print(datetime.datetime.now()) ----> 4 etrago = run_etrago(args, json_path=None) 5 print(datetime.datetime.now()) 6 etrago.session.close()

File ...:......\git\eTraGo-features-plots-for-paper\etrago\appl.py:624, in run_etrago(args, json_path) 620 etrago.network.generators_t.p_max_pu.where(etrago.network.generators_t.p_max_pu>1e-7, other=0., inplace=True) 622 # start linear optimal powerflow calculations 623 # needs to be adjusted for new sectors --> 624 etrago.lopf() 626 # conduct lopf with full complex timeseries for dispatch disaggregation 627 etrago.dispatch_disaggregation()

File ~\git\jupyter_eda\lib\site-packages\etrago\tools\execute.py:313, in lopf(self) 303 """ Functions that runs lopf accordning to arguments 304 305 Returns (...) 308 309 """ 311 x = time.time() --> 313 iterate_lopf( 314 self, 315 Constraints(self.args).functionality, 316 method=self.args['method']) 318 y = time.time() 319 z = (y - x) / 60

File ~\git\jupyter_eda\lib\site-packages\etrago\tools\execute.py:242, in iterate_lopf(etrago, extra_functionality, method, dispatch_disaggregation) 238 n_iter = method['n_iter'] 240 for i in range(1, (1+n_iter)): --> 242 run_lopf(etrago, extra_functionality, method) 244 if args['csv_export'] != False: 245 path = args['csv_export'] + '/lopfiteration'+ str(i)

File ~\git\jupyter_eda\lib\site-packages\etrago\tools\execute.py:145, in run_lopf(etrago, extra_functionality, method, dispatch_disaggregation) 142 else: 144 if method['pyomo']: --> 145 etrago.network.lopf( 146 etrago.network.snapshots, 147 solver_name=etrago.args['solver'], 148 solver_options=etrago.args['solver_options'], 149 pyomo=True, 150 extra_functionality=extra_functionality, 151 formulation=etrago.args['model_formulation']) 153 if etrago.network.results["Solver"][0]["Status"] != 'ok': 154 raise Exception('LOPF not solved.')

File ~\git\jupyter_eda\lib\site-packages\pypsa\components.py:769, in Network.lopf(self, snapshots, pyomo, solver_name, solver_options, solver_logfile, formulation, keep_files, extra_functionality, multi_investment_periods, kwargs) 763 if pyomo: 764 logger.warning( 765 "Solving optimisation problem with pyomo." 766 "In PyPSA version 0.21 the default will change to n.lopf(pyomo=False)." 767 "Explicitly set n.lopf(pyomo=True) to retain current behaviour." 768 ) --> 769 return network_lopf(self, args) 770 else: 771 return network_lopf_lowmem(self, **args)

File ~\git\jupyter_eda\lib\site-packages\pypsa\opf.py:2416, in network_lopf(network, snapshots, solver_name, solver_io, skip_pre, extra_functionality, multi_investment_periods, solver_logfile, solver_options, keep_files, formulation, ptdf_tolerance, free_memory, extra_postprocessing) 2410 logger.warning( 2411 "Encountered nonzero ramp limits for links. These are ignored when running the optimization with pyomo=True." 2412 ) 2414 snapshots = _as_snapshots(network, snapshots) -> 2416 network_lopf_build_model( 2417 network, 2418 snapshots, 2419 skip_pre=skip_pre, 2420 formulation=formulation, 2421 ptdf_tolerance=ptdf_tolerance, 2422 ) 2424 if extra_functionality is not None: 2425 extra_functionality(network, snapshots)

File ~\git\jupyter_eda\lib\site-packages\pypsa\opf.py:2169, in network_lopf_build_model(network, snapshots, skip_pre, formulation, ptdf_tolerance) 2165 define_store_variables_constraints(network, snapshots) 2167 define_branch_extension_variables(network, snapshots) -> 2169 define_link_flows(network, snapshots) 2171 define_nodal_balances(network, snapshots) 2173 define_passive_branch_flows(network, snapshots, formulation, ptdf_tolerance)

File ~\git\jupyter_eda\lib\site-packages\pypsa\opf.py:1068, in define_link_flows(network, snapshots) 1065 p_max_pu = get_switchable_as_dense(network, "Link", "p_max_pu", snapshots) 1066 p_min_pu = get_switchable_as_dense(network, "Link", "p_min_pu", snapshots) -> 1068 fixed_lower = p_min_pu.loc[:, fixed_links_i].multiply( 1069 network.links.loc[fixed_links_i, "p_nom"] 1070 ) 1071 fixed_upper = p_max_pu.loc[:, fixed_links_i].multiply( 1072 network.links.loc[fixed_links_i, "p_nom"] 1073 ) 1075 network.model.link_p = Var(list(network.links.index), snapshots)

File ~\git\jupyter_eda\lib\site-packages\pandas\core\ops__init__.py:440, in flex_arith_method_FRAME..f(self, other, axis, level, fill_value) 436 self, other = align_method_FRAME(self, other, axis, flex=True, level=level) 438 if isinstance(other, ABCDataFrame): 439 # Another DataFrame --> 440 new_data = self._combine_frame(other, na_op, fill_value) 442 elif isinstance(other, ABCSeries): 443 new_data = self._dispatch_frame_op(other, op, axis=axis)

File ~\git\jupyter_eda\lib\site-packages\pandas\core\frame.py:7691, in DataFrame._combine_frame(self, other, func, fill_value) 7688 left, right = ops.fill_binop(left, right, fill_value) 7689 return func(left, right) -> 7691 new_data = self._dispatch_frame_op(other, _arith_op) 7692 return new_data

File ~\git\jupyter_eda\lib\site-packages\pandas\core\frame.py:7633, in DataFrame._dispatch_frame_op(self, right, func, axis) 7627 # TODO: The previous assertion assert right._indexed_same(self) 7628 # fails in cases with empty columns reached via 7629 # _frame_arith_method_with_reindex 7630 7631 # TODO operate_blockwise expects a manager of the same type 7632 with np.errstate(all="ignore"): -> 7633 bm = self._mgr.operate_blockwise( 7634 # error: Argument 1 to "operate_blockwise" of "ArrayManager" has 7635 # incompatible type "Union[ArrayManager, BlockManager]"; expected 7636 # "ArrayManager" 7637 # error: Argument 1 to "operate_blockwise" of "BlockManager" has 7638 # incompatible type "Union[ArrayManager, BlockManager]"; expected 7639 # "BlockManager" 7640 right._mgr, # type: ignore[arg-type] 7641 array_op, 7642 ) 7643 return self._constructor(bm) 7645 elif isinstance(right, Series) and axis == 1: 7646 # axis=1 means we want to operate row-by-row

File ~\git\jupyter_eda\lib\site-packages\pandas\core\internals\managers.py:1589, in BlockManager.operate_blockwise(self, other, array_op) 1585 def operate_blockwise(self, other: BlockManager, array_op) -> BlockManager: 1586 """ 1587 Apply array_op blockwise with another (aligned) BlockManager. 1588 """ -> 1589 return operate_blockwise(self, other, array_op)

File ~\git\jupyter_eda\lib\site-packages\pandas\core\internals\ops.py:63, in operate_blockwise(left, right, array_op) 61 res_blks: list[Block] = [] 62 for lvals, rvals, locs, left_ea, right_ea, rblk in _iter_block_pairs(left, right): ---> 63 res_values = array_op(lvals, rvals) 64 if left_ea and not right_ea and hasattr(res_values, "reshape"): 65 res_values = res_values.reshape(1, -1)

File ~\git\jupyter_eda\lib\site-packages\pandas\core\ops\array_ops.py:226, in arithmetic_op(left, right, op) 222 _bool_arith_check(op, left, right) 224 # error: Argument 1 to "_na_arithmetic_op" has incompatible type 225 # "Union[ExtensionArray, ndarray[Any, Any]]"; expected "ndarray[Any, Any]" --> 226 res_values = _na_arithmetic_op(left, right, op) # type: ignore[arg-type] 228 return res_values

File ~\git\jupyter_eda\lib\site-packages\pandas\core\ops\array_ops.py:172, in _na_arithmetic_op(left, right, op, is_cmp) 166 except TypeError: 167 if not is_cmp and (is_object_dtype(left.dtype) or is_object_dtype(right)): 168 # For object dtype, fallback to a masked operation (only operating 169 # on the non-missing values) 170 # Don't do this for comparisons, as that will handle complex numbers 171 # incorrectly, see GH#32047 --> 172 result = _masked_arith_op(left, right, op) 173 else: 174 raise

File ~\git\jupyter_eda\lib\site-packages\pandas\core\ops\array_ops.py:110, in _masked_arith_op(x, y, op) 108 # See GH#5284, GH#5035, GH#19448 for historical reference 109 if mask.any(): --> 110 result[mask] = op(xrav[mask], yrav[mask]) 112 else: 113 if not is_scalar(y):

TypeError: can't multiply sequence by non-int of type 'float'