fastai / fastprogress

Simple and flexible progress bar for Jupyter Notebook and console
Apache License 2.0
1.09k stars 106 forks source link

AttributeError: 'NBMasterBar' object has no attribute 'out' #60

Closed 0tist closed 4 years ago

0tist commented 4 years ago

from the 10_nlp.ipynb, trying to fine tune the language model

learn.fit_one_cycle(1, 2e-2)


AssertionError Traceback (most recent call last) /opt/conda/lib/python3.7/site-packages/fastai2/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt) 187 try: --> 188 self._do_begin_fit(n_epoch) 189 for epoch in range(n_epoch):

/opt/conda/lib/python3.7/site-packages/fastai2/learner.py in _do_begin_fit(self, n_epoch) 159 def _do_begin_fit(self, n_epoch): --> 160 self.n_epoch,self.loss = n_epoch,tensor(0.); self('begin_fit') 161

/opt/conda/lib/python3.7/site-packages/fastai2/learner.py in call(self, event_name) 123 --> 124 def call(self, event_name): L(event_name).map(self._call_one) 125 def _call_one(self, event_name):

/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in map(self, f, *args, **kwargs) 371 else f.getitem) --> 372 return self._new(map(g, self)) 373

/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in _new(self, items, *args, kwargs) 322 def _xtra(self): return None --> 323 def _new(self, items, *args, *kwargs): return type(self)(items, args, use_list=None, kwargs) 324 def getitem(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)

/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in call(cls, x, *args, *kwargs) 40 ---> 41 res = super().call(((x,) + args), **kwargs) 42 res._newchk = 0

/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in init(self, items, use_list, match, *rest) 313 if (use_list is not None) or not _is_array(items): --> 314 items = list(items) if use_list else _listify(items) 315 if match is not None:

/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in _listify(o) 249 if isinstance(o, str) or _is_array(o): return [o] --> 250 if is_iter(o): return list(o) 251 return [o]

/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in call(self, *args, *kwargs) 215 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:] --> 216 return self.fn(fargs, **kwargs) 217

/opt/conda/lib/python3.7/site-packages/fastai2/learner.py in _call_one(self, event_name) 126 assert hasattr(event, event_name) --> 127 [cb(event_name) for cb in sort_by_run(self.cbs)] 128

/opt/conda/lib/python3.7/site-packages/fastai2/learner.py in (.0) 126 assert hasattr(event, event_name) --> 127 [cb(event_name) for cb in sort_by_run(self.cbs)] 128

/opt/conda/lib/python3.7/site-packages/fastai2/callback/core.py in call(self, event_name) 23 (self.run_valid and not getattr(self, 'training', False))) ---> 24 if self.run and _run: getattr(self, event_name, noop)() 25 if event_name=='after_fit': self.run=True #Reset self.run to True at each end of fit

/opt/conda/lib/python3.7/site-packages/fastai2/callback/fp16.py in begin_fit(self) 83 def begin_fit(self): ---> 84 assert self.dls.device.type == 'cuda', "Mixed-precision training requires a GPU, remove the call to_fp16" 85 if self.learn.opt is None: self.learn.create_opt()

AssertionError: Mixed-precision training requires a GPU, remove the call to_fp16

During handling of the above exception, another exception occurred:

AttributeError Traceback (most recent call last)

in ----> 1 learn.fit_one_cycle(1, 2e-2) /opt/conda/lib/python3.7/site-packages/fastai2/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt) 110 scheds = {'lr': combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final), 111 'mom': combined_cos(pct_start, *(self.moms if moms is None else moms))} --> 112 self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd) 113 114 # Cell /opt/conda/lib/python3.7/site-packages/fastai2/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt) 196 197 except CancelFitException: self('after_cancel_fit') --> 198 finally: self('after_fit') 199 200 def validate(self, ds_idx=1, dl=None, cbs=None): /opt/conda/lib/python3.7/site-packages/fastai2/learner.py in __call__(self, event_name) 122 def ordered_cbs(self, event): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, event)] 123 --> 124 def __call__(self, event_name): L(event_name).map(self._call_one) 125 def _call_one(self, event_name): 126 assert hasattr(event, event_name) /opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in map(self, f, *args, **kwargs) 370 else f.format if isinstance(f,str) 371 else f.__getitem__) --> 372 return self._new(map(g, self)) 373 374 def filter(self, f, negate=False, **kwargs): /opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in _new(self, items, *args, **kwargs) 321 @property 322 def _xtra(self): return None --> 323 def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs) 324 def __getitem__(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None) 325 def copy(self): return self._new(self.items.copy()) /opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in __call__(cls, x, *args, **kwargs) 39 return x 40 ---> 41 res = super().__call__(*((x,) + args), **kwargs) 42 res._newchk = 0 43 return res /opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in __init__(self, items, use_list, match, *rest) 312 if items is None: items = [] 313 if (use_list is not None) or not _is_array(items): --> 314 items = list(items) if use_list else _listify(items) 315 if match is not None: 316 if is_coll(match): match = len(match) /opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in _listify(o) 248 if isinstance(o, list): return o 249 if isinstance(o, str) or _is_array(o): return [o] --> 250 if is_iter(o): return list(o) 251 return [o] 252 /opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in __call__(self, *args, **kwargs) 214 if isinstance(v,_Arg): kwargs[k] = args.pop(v.i) 215 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:] --> 216 return self.fn(*fargs, **kwargs) 217 218 # Cell /opt/conda/lib/python3.7/site-packages/fastai2/learner.py in _call_one(self, event_name) 125 def _call_one(self, event_name): 126 assert hasattr(event, event_name) --> 127 [cb(event_name) for cb in sort_by_run(self.cbs)] 128 129 def _bn_bias_state(self, with_bias): return bn_bias_params(self.model, with_bias).map(self.opt.state) /opt/conda/lib/python3.7/site-packages/fastai2/learner.py in (.0) 125 def _call_one(self, event_name): 126 assert hasattr(event, event_name) --> 127 [cb(event_name) for cb in sort_by_run(self.cbs)] 128 129 def _bn_bias_state(self, with_bias): return bn_bias_params(self.model, with_bias).map(self.opt.state) /opt/conda/lib/python3.7/site-packages/fastai2/callback/core.py in __call__(self, event_name) 22 _run = (event_name not in _inner_loop or (self.run_train and getattr(self, 'training', True)) or 23 (self.run_valid and not getattr(self, 'training', False))) ---> 24 if self.run and _run: getattr(self, event_name, noop)() 25 if event_name=='after_fit': self.run=True #Reset self.run to True at each end of fit 26 /opt/conda/lib/python3.7/site-packages/fastai2/callback/progress.py in after_fit(self) 37 def after_fit(self): 38 if getattr(self, 'mbar', False): ---> 39 self.mbar.on_iter_end() 40 delattr(self, 'mbar') 41 self.learn.logger = self.old_logger /opt/conda/lib/python3.7/site-packages/fastprogress/fastprogress.py in on_iter_end(self) 155 total_time = format_time(time.time() - self.main_bar.start_t) 156 self.text = f'Total time: {total_time}

' + self.text --> 157 self.out.update(HTML(self.text)) 158 159 def add_child(self, child): AttributeError: 'NBMasterBar' object has no attribute 'out'

0tist commented 4 years ago

As I removed the callback .to_fp16(), the error was gone

sgugger commented 4 years ago

Yes, because as the stack trace indicated, the error comes from

Mixed-precision training requires a GPU, remove the call to_fp16"

the rest is just a consequence of that. You can't use to_fp16 if you don't have a GPU, this is unrelated to fastprogress.