bmurauer / pipelinehelper

scikit-helper to hot-swap pipeline elements
GNU General Public License v3.0
20 stars 9 forks source link

Your model does not support predict_proba #3

Closed jessequinn closed 5 years ago

jessequinn commented 5 years ago

It is me again,

I have another new error with the roc_auc scoring

the following is the traceback

---------------------------------------------------------------------------
RemoteTraceback                           Traceback (most recent call last)
RemoteTraceback: 
"""
Traceback (most recent call last):
  File "/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/metrics/scorer.py", line 187, in __call__
    y_pred = clf.decision_function(X)
  File "/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/utils/metaestimators.py", line 109, in __get__
    getattr(delegate, self.attribute_name)
AttributeError: 'PipelineHelper' object has no attribute 'decision_function'

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/externals/joblib/_parallel_backends.py", line 350, in __call__
    return self.func(*args, **kwargs)
  File "/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/externals/joblib/parallel.py", line 131, in __call__
    return [func(*args, **kwargs) for func, args, kwargs in self.items]
  File "/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/externals/joblib/parallel.py", line 131, in <listcomp>
    return [func(*args, **kwargs) for func, args, kwargs in self.items]
  File "/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/model_selection/_validation.py", line 488, in _fit_and_score
    test_scores = _score(estimator, X_test, y_test, scorer, is_multimetric)
  File "/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/model_selection/_validation.py", line 523, in _score
    return _multimetric_score(estimator, X_test, y_test, scorer)
  File "/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/model_selection/_validation.py", line 553, in _multimetric_score
    score = scorer(estimator, X_test, y_test)
  File "/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/metrics/scorer.py", line 194, in __call__
    y_pred = clf.predict_proba(X)
  File "/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/utils/metaestimators.py", line 115, in <lambda>
    out = lambda *args, **kwargs: self.fn(obj, *args, **kwargs)
  File "/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/pipeline.py", line 357, in predict_proba
    return self.steps[-1][-1].predict_proba(Xt)
  File "/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/pipelinehelper/__init__.py", line 100, in predict_proba
    raise Exception("Your model does not support predict_proba")
Exception: Your model does not support predict_proba

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/multiprocessing/pool.py", line 121, in worker
    result = (True, func(*args, **kwds))
  File "/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/externals/joblib/_parallel_backends.py", line 359, in __call__
    raise TransportableException(text, e_type)
sklearn.externals.joblib.my_exceptions.TransportableException: TransportableException
___________________________________________________________________________
Exception                                          Sun Nov 11 14:12:24 2018
PID: 98210Python 3.7.0: /Users/jessequinn/.pyenv/versions/3.7.0/bin/python3.7
...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/externals/joblib/parallel.py in __call__(self=<sklearn.externals.joblib.parallel.BatchedCalls object>)
    126     def __init__(self, iterator_slice):
    127         self.items = list(iterator_slice)
    128         self._size = len(self.items)
    129 
    130     def __call__(self):
--> 131         return [func(*args, **kwargs) for func, args, kwargs in self.items]
        self.items = [(<function _fit_and_score>, (Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]),       Department  Clustering  Degree  Degree Cen...000  0.000175  0.000017  

[753 rows x 8 columns], 0       0.0
3       1.0
4       1.0
6       1.0
...e: Management Salary, Length: 753, dtype: float64, {'score': make_scorer(roc_auc_score, needs_threshold=True)}, array([ 83,  84,  89,  90,  96, 102, 103, 105, 1..., 745, 746, 747, 748,
       749, 750, 751, 752]), array([  0,   1,   2,   3,   4,   5,   6,   7,  ...,
       175, 176, 177, 178, 179, 180, 181, 182]), 1, {'classifier__selected_model': ('svm', {}), 'scaler__selected_model': ('std', {'with_mean': True, 'with_std': True})}), {'error_score': 'raise', 'fit_params': {}, 'return_n_test_samples': True, 'return_parameters': False, 'return_times': True, 'return_train_score': 'warn'}), (<function _fit_and_score>, (Pipeline(memory=None,
     steps=[('scaler', Pip...    include_bypass=False, selected_model=None))]),       Department  Clustering  Degree  Degree Cen...000  0.000175  0.000017  

[753 rows x 8 columns], 0       0.0
3       1.0
4       1.0
6       1.0
...e: Management Salary, Length: 753, dtype: float64, {'score': make_scorer(roc_auc_score, needs_threshold=True)}, array([  0,   1,   2,   3,   4,   5,   6,   7,  ..., 745, 746, 747, 748,
       749, 750, 751, 752]), array([ 83,  84,  89,  90,  96, 102, 103, 105, 1...,
       333, 334, 335, 336, 337, 338, 340, 342]), 1, {'classifier__selected_model': ('svm', {}), 'scaler__selected_model': ('std', {'with_mean': True, 'with_std': True})}), {'error_score': 'raise', 'fit_params': {}, 'return_n_test_samples': True, 'return_parameters': False, 'return_times': True, 'return_train_score': 'warn'})]
    132 
    133     def __len__(self):
    134         return self._size
    135 

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/externals/joblib/parallel.py in <listcomp>(.0=<list_iterator object>)
    126     def __init__(self, iterator_slice):
    127         self.items = list(iterator_slice)
    128         self._size = len(self.items)
    129 
    130     def __call__(self):
--> 131         return [func(*args, **kwargs) for func, args, kwargs in self.items]
        func = <function _fit_and_score>
        args = (Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]),       Department  Clustering  Degree  Degree Cen...000  0.000175  0.000017  

[753 rows x 8 columns], 0       0.0
3       1.0
4       1.0
6       1.0
...e: Management Salary, Length: 753, dtype: float64, {'score': make_scorer(roc_auc_score, needs_threshold=True)}, array([ 83,  84,  89,  90,  96, 102, 103, 105, 1..., 745, 746, 747, 748,
       749, 750, 751, 752]), array([  0,   1,   2,   3,   4,   5,   6,   7,  ...,
       175, 176, 177, 178, 179, 180, 181, 182]), 1, {'classifier__selected_model': ('svm', {}), 'scaler__selected_model': ('std', {'with_mean': True, 'with_std': True})})
        kwargs = {'error_score': 'raise', 'fit_params': {}, 'return_n_test_samples': True, 'return_parameters': False, 'return_times': True, 'return_train_score': 'warn'}
    132 
    133     def __len__(self):
    134         return self._size
    135 

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/model_selection/_validation.py in _fit_and_score(estimator=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X=      Department  Clustering  Degree  Degree Cen...000  0.000175  0.000017  

[753 rows x 8 columns], y=0       0.0
3       1.0
4       1.0
6       1.0
...e: Management Salary, Length: 753, dtype: float64, scorer={'score': make_scorer(roc_auc_score, needs_threshold=True)}, train=array([ 83,  84,  89,  90,  96, 102, 103, 105, 1..., 745, 746, 747, 748,
       749, 750, 751, 752]), test=array([  0,   1,   2,   3,   4,   5,   6,   7,  ...,
       175, 176, 177, 178, 179, 180, 181, 182]), verbose=1, parameters={'classifier__selected_model': ('svm', {}), 'scaler__selected_model': ('std', {'with_mean': True, 'with_std': True})}, fit_params={}, return_train_score='warn', return_parameters=False, return_n_test_samples=True, return_times=True, error_score='raise')
    483                              " make sure that it has been spelled correctly.)")
    484 
    485     else:
    486         fit_time = time.time() - start_time
    487         # _score will return dict if is_multimetric is True
--> 488         test_scores = _score(estimator, X_test, y_test, scorer, is_multimetric)
        test_scores = {}
        estimator = Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))])
        X_test =      Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns]
        y_test = 0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64
        scorer = {'score': make_scorer(roc_auc_score, needs_threshold=True)}
        is_multimetric = True
    489         score_time = time.time() - start_time - fit_time
    490         if return_train_score:
    491             train_scores = _score(estimator, X_train, y_train, scorer,
    492                                   is_multimetric)

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/model_selection/_validation.py in _score(estimator=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X_test=     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns], y_test=0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64, scorer={'score': make_scorer(roc_auc_score, needs_threshold=True)}, is_multimetric=True)
    518 
    519     Will return a single float if is_multimetric is False and a dict of floats,
    520     if is_multimetric is True
    521     """
    522     if is_multimetric:
--> 523         return _multimetric_score(estimator, X_test, y_test, scorer)
        estimator = Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))])
        X_test =      Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns]
        y_test = 0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64
        scorer = {'score': make_scorer(roc_auc_score, needs_threshold=True)}
    524     else:
    525         if y_test is None:
    526             score = scorer(estimator, X_test)
    527         else:

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/model_selection/_validation.py in _multimetric_score(estimator=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X_test=     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns], y_test=0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64, scorers={'score': make_scorer(roc_auc_score, needs_threshold=True)})
    548 
    549     for name, scorer in scorers.items():
    550         if y_test is None:
    551             score = scorer(estimator, X_test)
    552         else:
--> 553             score = scorer(estimator, X_test, y_test)
        score = undefined
        scorer = make_scorer(roc_auc_score, needs_threshold=True)
        estimator = Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))])
        X_test =      Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns]
        y_test = 0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64
    554 
    555         if hasattr(score, 'item'):
    556             try:
    557                 # e.g. unwrap memmapped scalars

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/metrics/scorer.py in __call__(self=make_scorer(roc_auc_score, needs_threshold=True), clf=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X=     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns], y=0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64, sample_weight=None)
    189                 # For multi-output multi-class estimator
    190                 if isinstance(y_pred, list):
    191                     y_pred = np.vstack(p for p in y_pred).T
    192 
    193             except (NotImplementedError, AttributeError):
--> 194                 y_pred = clf.predict_proba(X)
        y_pred = undefined
        clf.predict_proba = <function Pipeline.predict_proba>
        X =      Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns]
    195 
    196                 if y_type == "binary":
    197                     y_pred = y_pred[:, 1]
    198                 elif isinstance(y_pred, list):

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/utils/metaestimators.py in <lambda>(*args=(     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns],), **kwargs={})
    110                     break
    111             else:
    112                 attrgetter(self.delegate_names[-1])(obj)
    113 
    114         # lambda, but not partial, allows help() to work with update_wrapper
--> 115         out = lambda *args, **kwargs: self.fn(obj, *args, **kwargs)
        args = (     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns],)
        kwargs = {}
    116         # update the docstring of the returned function
    117         update_wrapper(out, self.fn)
    118         return out
    119 

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/pipeline.py in predict_proba(self=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X=     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns])
    352         """
    353         Xt = X
    354         for name, transform in self.steps[:-1]:
    355             if transform is not None:
    356                 Xt = transform.transform(Xt)
--> 357         return self.steps[-1][-1].predict_proba(Xt)
        self.steps.predict_proba = undefined
        Xt = array([[-1.25425361, -0.46040364,  0.45852572, .... -0.2750333 ,
        -0.43767636, -0.29704686]])
    358 
    359     @if_delegate_has_method(delegate='_final_estimator')
    360     def decision_function(self, X):
    361         """Apply transforms, and decision_function of the final estimator

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/pipelinehelper/__init__.py in predict_proba(self=PipelineHelper(available_models={'svm': SVC(C=1....one, shrinking=True,
  tol=0.001, verbose=False)), x=array([[-1.25425361, -0.46040364,  0.45852572, .... -0.2750333 ,
        -0.43767636, -0.29704686]]))
     95         if hasattr(self.selected_model, "predict_proba"):
     96             method = getattr(self.selected_model, "predict_proba", None)
     97             if callable(method):
     98                 return method(x)
     99         else:
--> 100             raise Exception("Your model does not support predict_proba")
    101 
    102 

Exception: Your model does not support predict_proba
___________________________________________________________________________
"""

The above exception was the direct cause of the following exception:

TransportableException                    Traceback (most recent call last)
~/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/externals/joblib/parallel.py in retrieve(self)
    698                 if getattr(self._backend, 'supports_timeout', False):
--> 699                     self._output.extend(job.get(timeout=self.timeout))
    700                 else:

~/.pyenv/versions/3.7.0/lib/python3.7/multiprocessing/pool.py in get(self, timeout)
    656         else:
--> 657             raise self._value
    658 

TransportableException: TransportableException
___________________________________________________________________________
Exception                                          Sun Nov 11 14:12:24 2018
PID: 98210Python 3.7.0: /Users/jessequinn/.pyenv/versions/3.7.0/bin/python3.7
...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/externals/joblib/parallel.py in __call__(self=<sklearn.externals.joblib.parallel.BatchedCalls object>)
    126     def __init__(self, iterator_slice):
    127         self.items = list(iterator_slice)
    128         self._size = len(self.items)
    129 
    130     def __call__(self):
--> 131         return [func(*args, **kwargs) for func, args, kwargs in self.items]
        self.items = [(<function _fit_and_score>, (Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]),       Department  Clustering  Degree  Degree Cen...000  0.000175  0.000017  

[753 rows x 8 columns], 0       0.0
3       1.0
4       1.0
6       1.0
...e: Management Salary, Length: 753, dtype: float64, {'score': make_scorer(roc_auc_score, needs_threshold=True)}, array([ 83,  84,  89,  90,  96, 102, 103, 105, 1..., 745, 746, 747, 748,
       749, 750, 751, 752]), array([  0,   1,   2,   3,   4,   5,   6,   7,  ...,
       175, 176, 177, 178, 179, 180, 181, 182]), 1, {'classifier__selected_model': ('svm', {}), 'scaler__selected_model': ('std', {'with_mean': True, 'with_std': True})}), {'error_score': 'raise', 'fit_params': {}, 'return_n_test_samples': True, 'return_parameters': False, 'return_times': True, 'return_train_score': 'warn'}), (<function _fit_and_score>, (Pipeline(memory=None,
     steps=[('scaler', Pip...    include_bypass=False, selected_model=None))]),       Department  Clustering  Degree  Degree Cen...000  0.000175  0.000017  

[753 rows x 8 columns], 0       0.0
3       1.0
4       1.0
6       1.0
...e: Management Salary, Length: 753, dtype: float64, {'score': make_scorer(roc_auc_score, needs_threshold=True)}, array([  0,   1,   2,   3,   4,   5,   6,   7,  ..., 745, 746, 747, 748,
       749, 750, 751, 752]), array([ 83,  84,  89,  90,  96, 102, 103, 105, 1...,
       333, 334, 335, 336, 337, 338, 340, 342]), 1, {'classifier__selected_model': ('svm', {}), 'scaler__selected_model': ('std', {'with_mean': True, 'with_std': True})}), {'error_score': 'raise', 'fit_params': {}, 'return_n_test_samples': True, 'return_parameters': False, 'return_times': True, 'return_train_score': 'warn'})]
    132 
    133     def __len__(self):
    134         return self._size
    135 

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/externals/joblib/parallel.py in <listcomp>(.0=<list_iterator object>)
    126     def __init__(self, iterator_slice):
    127         self.items = list(iterator_slice)
    128         self._size = len(self.items)
    129 
    130     def __call__(self):
--> 131         return [func(*args, **kwargs) for func, args, kwargs in self.items]
        func = <function _fit_and_score>
        args = (Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]),       Department  Clustering  Degree  Degree Cen...000  0.000175  0.000017  

[753 rows x 8 columns], 0       0.0
3       1.0
4       1.0
6       1.0
...e: Management Salary, Length: 753, dtype: float64, {'score': make_scorer(roc_auc_score, needs_threshold=True)}, array([ 83,  84,  89,  90,  96, 102, 103, 105, 1..., 745, 746, 747, 748,
       749, 750, 751, 752]), array([  0,   1,   2,   3,   4,   5,   6,   7,  ...,
       175, 176, 177, 178, 179, 180, 181, 182]), 1, {'classifier__selected_model': ('svm', {}), 'scaler__selected_model': ('std', {'with_mean': True, 'with_std': True})})
        kwargs = {'error_score': 'raise', 'fit_params': {}, 'return_n_test_samples': True, 'return_parameters': False, 'return_times': True, 'return_train_score': 'warn'}
    132 
    133     def __len__(self):
    134         return self._size
    135 

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/model_selection/_validation.py in _fit_and_score(estimator=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X=      Department  Clustering  Degree  Degree Cen...000  0.000175  0.000017  

[753 rows x 8 columns], y=0       0.0
3       1.0
4       1.0
6       1.0
...e: Management Salary, Length: 753, dtype: float64, scorer={'score': make_scorer(roc_auc_score, needs_threshold=True)}, train=array([ 83,  84,  89,  90,  96, 102, 103, 105, 1..., 745, 746, 747, 748,
       749, 750, 751, 752]), test=array([  0,   1,   2,   3,   4,   5,   6,   7,  ...,
       175, 176, 177, 178, 179, 180, 181, 182]), verbose=1, parameters={'classifier__selected_model': ('svm', {}), 'scaler__selected_model': ('std', {'with_mean': True, 'with_std': True})}, fit_params={}, return_train_score='warn', return_parameters=False, return_n_test_samples=True, return_times=True, error_score='raise')
    483                              " make sure that it has been spelled correctly.)")
    484 
    485     else:
    486         fit_time = time.time() - start_time
    487         # _score will return dict if is_multimetric is True
--> 488         test_scores = _score(estimator, X_test, y_test, scorer, is_multimetric)
        test_scores = {}
        estimator = Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))])
        X_test =      Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns]
        y_test = 0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64
        scorer = {'score': make_scorer(roc_auc_score, needs_threshold=True)}
        is_multimetric = True
    489         score_time = time.time() - start_time - fit_time
    490         if return_train_score:
    491             train_scores = _score(estimator, X_train, y_train, scorer,
    492                                   is_multimetric)

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/model_selection/_validation.py in _score(estimator=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X_test=     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns], y_test=0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64, scorer={'score': make_scorer(roc_auc_score, needs_threshold=True)}, is_multimetric=True)
    518 
    519     Will return a single float if is_multimetric is False and a dict of floats,
    520     if is_multimetric is True
    521     """
    522     if is_multimetric:
--> 523         return _multimetric_score(estimator, X_test, y_test, scorer)
        estimator = Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))])
        X_test =      Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns]
        y_test = 0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64
        scorer = {'score': make_scorer(roc_auc_score, needs_threshold=True)}
    524     else:
    525         if y_test is None:
    526             score = scorer(estimator, X_test)
    527         else:

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/model_selection/_validation.py in _multimetric_score(estimator=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X_test=     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns], y_test=0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64, scorers={'score': make_scorer(roc_auc_score, needs_threshold=True)})
    548 
    549     for name, scorer in scorers.items():
    550         if y_test is None:
    551             score = scorer(estimator, X_test)
    552         else:
--> 553             score = scorer(estimator, X_test, y_test)
        score = undefined
        scorer = make_scorer(roc_auc_score, needs_threshold=True)
        estimator = Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))])
        X_test =      Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns]
        y_test = 0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64
    554 
    555         if hasattr(score, 'item'):
    556             try:
    557                 # e.g. unwrap memmapped scalars

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/metrics/scorer.py in __call__(self=make_scorer(roc_auc_score, needs_threshold=True), clf=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X=     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns], y=0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64, sample_weight=None)
    189                 # For multi-output multi-class estimator
    190                 if isinstance(y_pred, list):
    191                     y_pred = np.vstack(p for p in y_pred).T
    192 
    193             except (NotImplementedError, AttributeError):
--> 194                 y_pred = clf.predict_proba(X)
        y_pred = undefined
        clf.predict_proba = <function Pipeline.predict_proba>
        X =      Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns]
    195 
    196                 if y_type == "binary":
    197                     y_pred = y_pred[:, 1]
    198                 elif isinstance(y_pred, list):

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/utils/metaestimators.py in <lambda>(*args=(     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns],), **kwargs={})
    110                     break
    111             else:
    112                 attrgetter(self.delegate_names[-1])(obj)
    113 
    114         # lambda, but not partial, allows help() to work with update_wrapper
--> 115         out = lambda *args, **kwargs: self.fn(obj, *args, **kwargs)
        args = (     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns],)
        kwargs = {}
    116         # update the docstring of the returned function
    117         update_wrapper(out, self.fn)
    118         return out
    119 

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/pipeline.py in predict_proba(self=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X=     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns])
    352         """
    353         Xt = X
    354         for name, transform in self.steps[:-1]:
    355             if transform is not None:
    356                 Xt = transform.transform(Xt)
--> 357         return self.steps[-1][-1].predict_proba(Xt)
        self.steps.predict_proba = undefined
        Xt = array([[-1.25425361, -0.46040364,  0.45852572, .... -0.2750333 ,
        -0.43767636, -0.29704686]])
    358 
    359     @if_delegate_has_method(delegate='_final_estimator')
    360     def decision_function(self, X):
    361         """Apply transforms, and decision_function of the final estimator

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/pipelinehelper/__init__.py in predict_proba(self=PipelineHelper(available_models={'svm': SVC(C=1....one, shrinking=True,
  tol=0.001, verbose=False)), x=array([[-1.25425361, -0.46040364,  0.45852572, .... -0.2750333 ,
        -0.43767636, -0.29704686]]))
     95         if hasattr(self.selected_model, "predict_proba"):
     96             method = getattr(self.selected_model, "predict_proba", None)
     97             if callable(method):
     98                 return method(x)
     99         else:
--> 100             raise Exception("Your model does not support predict_proba")
    101 
    102 

Exception: Your model does not support predict_proba
___________________________________________________________________________

During handling of the above exception, another exception occurred:

JoblibException                           Traceback (most recent call last)
<ipython-input-10-ea9ec120f647> in <module>()
      1 grid = GridSearchCV(pipe,params,scoring='roc_auc',cv=5,verbose=1,n_jobs=-1)
----> 2 grid.fit(X_train,y_train)
      3 print(grid.best_params_)
      4 print(grid.best_score_)

~/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/model_selection/_search.py in fit(self, X, y, groups, **fit_params)
    638                                   error_score=self.error_score)
    639           for parameters, (train, test) in product(candidate_params,
--> 640                                                    cv.split(X, y, groups)))
    641 
    642         # if one choose to see train score, "out" will contain train score info

~/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/externals/joblib/parallel.py in __call__(self, iterable)
    787                 # consumption.
    788                 self._iterating = False
--> 789             self.retrieve()
    790             # Make sure that we get a last message telling us we are done
    791             elapsed_time = time.time() - self._start_time

~/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/externals/joblib/parallel.py in retrieve(self)
    738                     exception = exception_type(report)
    739 
--> 740                     raise exception
    741 
    742     def __call__(self, iterable):

JoblibException: JoblibException
___________________________________________________________________________
Multiprocessing exception:
...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/runpy.py in _run_module_as_main(mod_name='ipykernel_launcher', alter_argv=1)
    188         sys.exit(msg)
    189     main_globals = sys.modules["__main__"].__dict__
    190     if alter_argv:
    191         sys.argv[0] = mod_spec.origin
    192     return _run_code(code, main_globals, None,
--> 193                      "__main__", mod_spec)
        mod_spec = ModuleSpec(name='ipykernel_launcher', loader=<_f...b/python3.7/site-packages/ipykernel_launcher.py')
    194 
    195 def run_module(mod_name, init_globals=None,
    196                run_name=None, alter_sys=False):
    197     """Execute a module's code without importing it

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/runpy.py in _run_code(code=<code object <module> at 0x10cc83390, file "/Use...3.7/site-packages/ipykernel_launcher.py", line 5>, run_globals={'__annotations__': {}, '__builtins__': <module 'builtins' (built-in)>, '__cached__': '/Users/jessequinn/.pyenv/versions/3.7.0/lib/pyth...ges/__pycache__/ipykernel_launcher.cpython-37.pyc', '__doc__': 'Entry point for launching an IPython kernel.\n\nTh...orts until\nafter removing the cwd from sys.path.\n', '__file__': '/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/ipykernel_launcher.py', '__loader__': <_frozen_importlib_external.SourceFileLoader object>, '__name__': '__main__', '__package__': '', '__spec__': ModuleSpec(name='ipykernel_launcher', loader=<_f...b/python3.7/site-packages/ipykernel_launcher.py'), 'app': <module 'ipykernel.kernelapp' from '/Users/jesse.../python3.7/site-packages/ipykernel/kernelapp.py'>, ...}, init_globals=None, mod_name='__main__', mod_spec=ModuleSpec(name='ipykernel_launcher', loader=<_f...b/python3.7/site-packages/ipykernel_launcher.py'), pkg_name='', script_name=None)
     80                        __cached__ = cached,
     81                        __doc__ = None,
     82                        __loader__ = loader,
     83                        __package__ = pkg_name,
     84                        __spec__ = mod_spec)
---> 85     exec(code, run_globals)
        code = <code object <module> at 0x10cc83390, file "/Use...3.7/site-packages/ipykernel_launcher.py", line 5>
        run_globals = {'__annotations__': {}, '__builtins__': <module 'builtins' (built-in)>, '__cached__': '/Users/jessequinn/.pyenv/versions/3.7.0/lib/pyth...ges/__pycache__/ipykernel_launcher.cpython-37.pyc', '__doc__': 'Entry point for launching an IPython kernel.\n\nTh...orts until\nafter removing the cwd from sys.path.\n', '__file__': '/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/ipykernel_launcher.py', '__loader__': <_frozen_importlib_external.SourceFileLoader object>, '__name__': '__main__', '__package__': '', '__spec__': ModuleSpec(name='ipykernel_launcher', loader=<_f...b/python3.7/site-packages/ipykernel_launcher.py'), 'app': <module 'ipykernel.kernelapp' from '/Users/jesse.../python3.7/site-packages/ipykernel/kernelapp.py'>, ...}
     86     return run_globals
     87 
     88 def _run_module_code(code, init_globals=None,
     89                     mod_name=None, mod_spec=None,

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/ipykernel_launcher.py in <module>()
     11     # This is added back by InteractiveShellApp.init_path()
     12     if sys.path[0] == '':
     13         del sys.path[0]
     14 
     15     from ipykernel import kernelapp as app
---> 16     app.launch_new_instance()

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/traitlets/config/application.py in launch_instance(cls=<class 'ipykernel.kernelapp.IPKernelApp'>, argv=None, **kwargs={})
    653 
    654         If a global instance already exists, this reinitializes and starts it
    655         """
    656         app = cls.instance(**kwargs)
    657         app.initialize(argv)
--> 658         app.start()
        app.start = <bound method IPKernelApp.start of <ipykernel.kernelapp.IPKernelApp object>>
    659 
    660 #-----------------------------------------------------------------------------
    661 # utility functions, for convenience
    662 #-----------------------------------------------------------------------------

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/ipykernel/kernelapp.py in start(self=<ipykernel.kernelapp.IPKernelApp object>)
    481         if self.poller is not None:
    482             self.poller.start()
    483         self.kernel.start()
    484         self.io_loop = ioloop.IOLoop.current()
    485         try:
--> 486             self.io_loop.start()
        self.io_loop.start = <bound method BaseAsyncIOLoop.start of <tornado.platform.asyncio.AsyncIOMainLoop object>>
    487         except KeyboardInterrupt:
    488             pass
    489 
    490 launch_new_instance = IPKernelApp.launch_instance

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/tornado/platform/asyncio.py in start(self=<tornado.platform.asyncio.AsyncIOMainLoop object>)
    127         except (RuntimeError, AssertionError):
    128             old_loop = None
    129         try:
    130             self._setup_logging()
    131             asyncio.set_event_loop(self.asyncio_loop)
--> 132             self.asyncio_loop.run_forever()
        self.asyncio_loop.run_forever = <bound method BaseEventLoop.run_forever of <_Uni...EventLoop running=True closed=False debug=False>>
    133         finally:
    134             asyncio.set_event_loop(old_loop)
    135 
    136     def stop(self):

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/asyncio/base_events.py in run_forever(self=<_UnixSelectorEventLoop running=True closed=False debug=False>)
    518         sys.set_asyncgen_hooks(firstiter=self._asyncgen_firstiter_hook,
    519                                finalizer=self._asyncgen_finalizer_hook)
    520         try:
    521             events._set_running_loop(self)
    522             while True:
--> 523                 self._run_once()
        self._run_once = <bound method BaseEventLoop._run_once of <_UnixS...EventLoop running=True closed=False debug=False>>
    524                 if self._stopping:
    525                     break
    526         finally:
    527             self._stopping = False

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/asyncio/base_events.py in _run_once(self=<_UnixSelectorEventLoop running=True closed=False debug=False>)
   1753                         logger.warning('Executing %s took %.3f seconds',
   1754                                        _format_handle(handle), dt)
   1755                 finally:
   1756                     self._current_handle = None
   1757             else:
-> 1758                 handle._run()
        handle._run = <bound method Handle._run of <Handle IOLoop._run_callback(functools.par... 0x11ccd9598>))>>
   1759         handle = None  # Needed to break cycles when an exception occurs.
   1760 
   1761     def _set_coroutine_origin_tracking(self, enabled):
   1762         if bool(enabled) == bool(self._coroutine_origin_tracking_enabled):

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/asyncio/events.py in _run(self=<Handle IOLoop._run_callback(functools.par... 0x11ccd9598>))>)
     83     def cancelled(self):
     84         return self._cancelled
     85 
     86     def _run(self):
     87         try:
---> 88             self._context.run(self._callback, *self._args)
        self._context.run = <built-in method run of Context object>
        self._callback = <bound method IOLoop._run_callback of <tornado.platform.asyncio.AsyncIOMainLoop object>>
        self._args = (functools.partial(<function wrap.<locals>.null_wrapper at 0x11ccd9598>),)
     89         except Exception as exc:
     90             cb = format_helpers._format_callback_source(
     91                 self._callback, self._args)
     92             msg = f'Exception in callback {cb}'

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/tornado/ioloop.py in _run_callback(self=<tornado.platform.asyncio.AsyncIOMainLoop object>, callback=functools.partial(<function wrap.<locals>.null_wrapper at 0x11ccd9598>))
    753         """Runs a callback with error handling.
    754 
    755         For use in subclasses.
    756         """
    757         try:
--> 758             ret = callback()
        ret = undefined
        callback = functools.partial(<function wrap.<locals>.null_wrapper at 0x11ccd9598>)
    759             if ret is not None:
    760                 from tornado import gen
    761                 # Functions that return Futures typically swallow all
    762                 # exceptions and store them in the Future.  If a Future

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/tornado/stack_context.py in null_wrapper(*args=(), **kwargs={})
    295         # Fast path when there are no active contexts.
    296         def null_wrapper(*args, **kwargs):
    297             try:
    298                 current_state = _state.contexts
    299                 _state.contexts = cap_contexts[0]
--> 300                 return fn(*args, **kwargs)
        args = ()
        kwargs = {}
    301             finally:
    302                 _state.contexts = current_state
    303         null_wrapper._wrapped = True
    304         return null_wrapper

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/zmq/eventloop/zmqstream.py in <lambda>()
    531             return
    532 
    533         if state & self.socket.events:
    534             # events still exist that haven't been processed
    535             # explicitly schedule handling to avoid missing events due to edge-triggered FDs
--> 536             self.io_loop.add_callback(lambda : self._handle_events(self.socket, 0))
    537 
    538     def _init_io_state(self):
    539         """initialize the ioloop event handler"""
    540         with stack_context.NullContext():

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/zmq/eventloop/zmqstream.py in _handle_events(self=<zmq.eventloop.zmqstream.ZMQStream object>, fd=<zmq.sugar.socket.Socket object>, events=0)
    445             return
    446         zmq_events = self.socket.EVENTS
    447         try:
    448             # dispatch events:
    449             if zmq_events & zmq.POLLIN and self.receiving():
--> 450                 self._handle_recv()
        self._handle_recv = <bound method ZMQStream._handle_recv of <zmq.eventloop.zmqstream.ZMQStream object>>
    451                 if not self.socket:
    452                     return
    453             if zmq_events & zmq.POLLOUT and self.sending():
    454                 self._handle_send()

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/zmq/eventloop/zmqstream.py in _handle_recv(self=<zmq.eventloop.zmqstream.ZMQStream object>)
    475             else:
    476                 raise
    477         else:
    478             if self._recv_callback:
    479                 callback = self._recv_callback
--> 480                 self._run_callback(callback, msg)
        self._run_callback = <bound method ZMQStream._run_callback of <zmq.eventloop.zmqstream.ZMQStream object>>
        callback = <function wrap.<locals>.null_wrapper>
        msg = [<zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>]
    481         
    482 
    483     def _handle_send(self):
    484         """Handle a send event."""

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/zmq/eventloop/zmqstream.py in _run_callback(self=<zmq.eventloop.zmqstream.ZMQStream object>, callback=<function wrap.<locals>.null_wrapper>, *args=([<zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>],), **kwargs={})
    427         close our socket."""
    428         try:
    429             # Use a NullContext to ensure that all StackContexts are run
    430             # inside our blanket exception handler rather than outside.
    431             with stack_context.NullContext():
--> 432                 callback(*args, **kwargs)
        callback = <function wrap.<locals>.null_wrapper>
        args = ([<zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>],)
        kwargs = {}
    433         except:
    434             gen_log.error("Uncaught exception in ZMQStream callback",
    435                           exc_info=True)
    436             # Re-raise the exception so that IOLoop.handle_callback_exception

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/tornado/stack_context.py in null_wrapper(*args=([<zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>],), **kwargs={})
    295         # Fast path when there are no active contexts.
    296         def null_wrapper(*args, **kwargs):
    297             try:
    298                 current_state = _state.contexts
    299                 _state.contexts = cap_contexts[0]
--> 300                 return fn(*args, **kwargs)
        args = ([<zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>],)
        kwargs = {}
    301             finally:
    302                 _state.contexts = current_state
    303         null_wrapper._wrapped = True
    304         return null_wrapper

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/ipykernel/kernelbase.py in dispatcher(msg=[<zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>])
    278         if self.control_stream:
    279             self.control_stream.on_recv(self.dispatch_control, copy=False)
    280 
    281         def make_dispatcher(stream):
    282             def dispatcher(msg):
--> 283                 return self.dispatch_shell(stream, msg)
        msg = [<zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>, <zmq.sugar.frame.Frame object>]
    284             return dispatcher
    285 
    286         for s in self.shell_streams:
    287             s.on_recv(make_dispatcher(s), copy=False)

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/ipykernel/kernelbase.py in dispatch_shell(self=<ipykernel.ipkernel.IPythonKernel object>, stream=<zmq.eventloop.zmqstream.ZMQStream object>, msg={'buffers': [], 'content': {'allow_stdin': True, 'code': "grid = GridSearchCV(pipe,params,scoring='roc_auc...\nprint(grid.best_params_)\nprint(grid.best_score_)", 'silent': False, 'stop_on_error': True, 'store_history': True, 'user_expressions': {}}, 'header': {'date': datetime.datetime(2018, 11, 11, 16, 9, 50, 737904, tzinfo=tzutc()), 'msg_id': 'f21dbc0842c042909d602f9043b581d6', 'msg_type': 'execute_request', 'session': '4c7069e2899e4e8bac1faca644d9b0be', 'username': 'username', 'version': '5.2'}, 'metadata': {}, 'msg_id': 'f21dbc0842c042909d602f9043b581d6', 'msg_type': 'execute_request', 'parent_header': {}})
    228             self.log.warn("Unknown message type: %r", msg_type)
    229         else:
    230             self.log.debug("%s: %s", msg_type, msg)
    231             self.pre_handler_hook()
    232             try:
--> 233                 handler(stream, idents, msg)
        handler = <bound method Kernel.execute_request of <ipykernel.ipkernel.IPythonKernel object>>
        stream = <zmq.eventloop.zmqstream.ZMQStream object>
        idents = [b'4c7069e2899e4e8bac1faca644d9b0be']
        msg = {'buffers': [], 'content': {'allow_stdin': True, 'code': "grid = GridSearchCV(pipe,params,scoring='roc_auc...\nprint(grid.best_params_)\nprint(grid.best_score_)", 'silent': False, 'stop_on_error': True, 'store_history': True, 'user_expressions': {}}, 'header': {'date': datetime.datetime(2018, 11, 11, 16, 9, 50, 737904, tzinfo=tzutc()), 'msg_id': 'f21dbc0842c042909d602f9043b581d6', 'msg_type': 'execute_request', 'session': '4c7069e2899e4e8bac1faca644d9b0be', 'username': 'username', 'version': '5.2'}, 'metadata': {}, 'msg_id': 'f21dbc0842c042909d602f9043b581d6', 'msg_type': 'execute_request', 'parent_header': {}}
    234             except Exception:
    235                 self.log.error("Exception in message handler:", exc_info=True)
    236             finally:
    237                 self.post_handler_hook()

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/ipykernel/kernelbase.py in execute_request(self=<ipykernel.ipkernel.IPythonKernel object>, stream=<zmq.eventloop.zmqstream.ZMQStream object>, ident=[b'4c7069e2899e4e8bac1faca644d9b0be'], parent={'buffers': [], 'content': {'allow_stdin': True, 'code': "grid = GridSearchCV(pipe,params,scoring='roc_auc...\nprint(grid.best_params_)\nprint(grid.best_score_)", 'silent': False, 'stop_on_error': True, 'store_history': True, 'user_expressions': {}}, 'header': {'date': datetime.datetime(2018, 11, 11, 16, 9, 50, 737904, tzinfo=tzutc()), 'msg_id': 'f21dbc0842c042909d602f9043b581d6', 'msg_type': 'execute_request', 'session': '4c7069e2899e4e8bac1faca644d9b0be', 'username': 'username', 'version': '5.2'}, 'metadata': {}, 'msg_id': 'f21dbc0842c042909d602f9043b581d6', 'msg_type': 'execute_request', 'parent_header': {}})
    394         if not silent:
    395             self.execution_count += 1
    396             self._publish_execute_input(code, parent, self.execution_count)
    397 
    398         reply_content = self.do_execute(code, silent, store_history,
--> 399                                         user_expressions, allow_stdin)
        user_expressions = {}
        allow_stdin = True
    400 
    401         # Flush output before sending the reply.
    402         sys.stdout.flush()
    403         sys.stderr.flush()

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/ipykernel/ipkernel.py in do_execute(self=<ipykernel.ipkernel.IPythonKernel object>, code="grid = GridSearchCV(pipe,params,scoring='roc_auc...\nprint(grid.best_params_)\nprint(grid.best_score_)", silent=False, store_history=True, user_expressions={}, allow_stdin=True)
    203 
    204         self._forward_input(allow_stdin)
    205 
    206         reply_content = {}
    207         try:
--> 208             res = shell.run_cell(code, store_history=store_history, silent=silent)
        res = undefined
        shell.run_cell = <bound method ZMQInteractiveShell.run_cell of <ipykernel.zmqshell.ZMQInteractiveShell object>>
        code = "grid = GridSearchCV(pipe,params,scoring='roc_auc...\nprint(grid.best_params_)\nprint(grid.best_score_)"
        store_history = True
        silent = False
    209         finally:
    210             self._restore_input()
    211 
    212         if res.error_before_exec is not None:

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/ipykernel/zmqshell.py in run_cell(self=<ipykernel.zmqshell.ZMQInteractiveShell object>, *args=("grid = GridSearchCV(pipe,params,scoring='roc_auc...\nprint(grid.best_params_)\nprint(grid.best_score_)",), **kwargs={'silent': False, 'store_history': True})
    532             )
    533         self.payload_manager.write_payload(payload)
    534 
    535     def run_cell(self, *args, **kwargs):
    536         self._last_traceback = None
--> 537         return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)
        self.run_cell = <bound method ZMQInteractiveShell.run_cell of <ipykernel.zmqshell.ZMQInteractiveShell object>>
        args = ("grid = GridSearchCV(pipe,params,scoring='roc_auc...\nprint(grid.best_params_)\nprint(grid.best_score_)",)
        kwargs = {'silent': False, 'store_history': True}
    538 
    539     def _showtraceback(self, etype, evalue, stb):
    540         # try to preserve ordering of tracebacks and print statements
    541         sys.stdout.flush()

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/IPython/core/interactiveshell.py in run_cell(self=<ipykernel.zmqshell.ZMQInteractiveShell object>, raw_cell="grid = GridSearchCV(pipe,params,scoring='roc_auc...\nprint(grid.best_params_)\nprint(grid.best_score_)", store_history=True, silent=False, shell_futures=True)
   2657         -------
   2658         result : :class:`ExecutionResult`
   2659         """
   2660         try:
   2661             result = self._run_cell(
-> 2662                 raw_cell, store_history, silent, shell_futures)
        raw_cell = "grid = GridSearchCV(pipe,params,scoring='roc_auc...\nprint(grid.best_params_)\nprint(grid.best_score_)"
        store_history = True
        silent = False
        shell_futures = True
   2663         finally:
   2664             self.events.trigger('post_execute')
   2665             if not silent:
   2666                 self.events.trigger('post_run_cell', result)

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/IPython/core/interactiveshell.py in _run_cell(self=<ipykernel.zmqshell.ZMQInteractiveShell object>, raw_cell="grid = GridSearchCV(pipe,params,scoring='roc_auc...\nprint(grid.best_params_)\nprint(grid.best_score_)", store_history=True, silent=False, shell_futures=True)
   2780                 self.displayhook.exec_result = result
   2781 
   2782                 # Execute the user code
   2783                 interactivity = 'none' if silent else self.ast_node_interactivity
   2784                 has_raised = self.run_ast_nodes(code_ast.body, cell_name,
-> 2785                    interactivity=interactivity, compiler=compiler, result=result)
        interactivity = 'last_expr'
        compiler = <IPython.core.compilerop.CachingCompiler object>
   2786                 
   2787                 self.last_execution_succeeded = not has_raised
   2788                 self.last_execution_result = result
   2789 

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/IPython/core/interactiveshell.py in run_ast_nodes(self=<ipykernel.zmqshell.ZMQInteractiveShell object>, nodelist=[<_ast.Assign object>, <_ast.Expr object>, <_ast.Expr object>, <_ast.Expr object>], cell_name='<ipython-input-10-ea9ec120f647>', interactivity='last', compiler=<IPython.core.compilerop.CachingCompiler object>, result=<ExecutionResult object at 11ed76780, execution_...rue silent=False shell_futures=True> result=None>)
   2896             raise ValueError("Interactivity was %r" % interactivity)
   2897         try:
   2898             for i, node in enumerate(to_run_exec):
   2899                 mod = ast.Module([node])
   2900                 code = compiler(mod, cell_name, "exec")
-> 2901                 if self.run_code(code, result):
        self.run_code = <bound method InteractiveShell.run_code of <ipykernel.zmqshell.ZMQInteractiveShell object>>
        code = <code object <module> at 0x11eec99c0, file "<ipython-input-10-ea9ec120f647>", line 2>
        result = <ExecutionResult object at 11ed76780, execution_...rue silent=False shell_futures=True> result=None>
   2902                     return True
   2903 
   2904             for i, node in enumerate(to_run_interactive):
   2905                 mod = ast.Interactive([node])

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/IPython/core/interactiveshell.py in run_code(self=<ipykernel.zmqshell.ZMQInteractiveShell object>, code_obj=<code object <module> at 0x11eec99c0, file "<ipython-input-10-ea9ec120f647>", line 2>, result=<ExecutionResult object at 11ed76780, execution_...rue silent=False shell_futures=True> result=None>)
   2956         outflag = True  # happens in more places, so it's easier as default
   2957         try:
   2958             try:
   2959                 self.hooks.pre_run_code_hook()
   2960                 #rprint('Running code', repr(code_obj)) # dbg
-> 2961                 exec(code_obj, self.user_global_ns, self.user_ns)
        code_obj = <code object <module> at 0x11eec99c0, file "<ipython-input-10-ea9ec120f647>", line 2>
        self.user_global_ns = {'AdaBoostClassifier': <class 'sklearn.ensemble.weight_boosting.AdaBoostClassifier'>, 'G': <networkx.classes.graph.Graph object>, 'GradientBoostingClassifier': <class 'sklearn.ensemble.gradient_boosting.GradientBoostingClassifier'>, 'GridSearchCV': <class 'sklearn.model_selection._search.GridSearchCV'>, 'In': ['', "import networkx as nx\nimport pandas as pd\nimport... git+https://github.com/bmurauer/pipelinehelper')", "P1_Graphs = pickle.load(open('A4_graphs','rb'))\nP1_Graphs", "def graph_identification():\n    return ['PA' if ...' for G in P1_Graphs]\n    \ngraph_identification()", "G = nx.read_gpickle('email_prediction.txt')\n\n# print(nx.info(G))", '# print(G.nodes(data=True))', '# print(G.edges(data=True))', "from sklearn.pipeline import Pipeline\nfrom sklea...       'nb_pipe__nb__alpha': [0.1, 0.2],\n    })\n}", "def salary_predictions():\n    # Initialize the d...nagement Salary'])]\n\n    return df_train, df_test", "df_train, df_test = salary_predictions()\n\nfeatur...features]\ny_train = df_train['Management Salary']", "grid = GridSearchCV(pipe,params,scoring='roc_auc...\nprint(grid.best_params_)\nprint(grid.best_score_)"], 'KNeighborsClassifier': <class 'sklearn.neighbors.classification.KNeighborsClassifier'>, 'MaxAbsScaler': <class 'sklearn.preprocessing.data.MaxAbsScaler'>, 'MinMaxScaler': <class 'sklearn.preprocessing.data.MinMaxScaler'>, 'MultinomialNB': <class 'sklearn.naive_bayes.MultinomialNB'>, 'Out': {2: [<networkx.classes.graph.Graph object>, <networkx.classes.graph.Graph object>, <networkx.classes.graph.Graph object>, <networkx.classes.graph.Graph object>, <networkx.classes.graph.Graph object>], 3: ['PA', 'SW_L', 'SW_L', 'PA', 'SW_H']}, ...}
        self.user_ns = {'AdaBoostClassifier': <class 'sklearn.ensemble.weight_boosting.AdaBoostClassifier'>, 'G': <networkx.classes.graph.Graph object>, 'GradientBoostingClassifier': <class 'sklearn.ensemble.gradient_boosting.GradientBoostingClassifier'>, 'GridSearchCV': <class 'sklearn.model_selection._search.GridSearchCV'>, 'In': ['', "import networkx as nx\nimport pandas as pd\nimport... git+https://github.com/bmurauer/pipelinehelper')", "P1_Graphs = pickle.load(open('A4_graphs','rb'))\nP1_Graphs", "def graph_identification():\n    return ['PA' if ...' for G in P1_Graphs]\n    \ngraph_identification()", "G = nx.read_gpickle('email_prediction.txt')\n\n# print(nx.info(G))", '# print(G.nodes(data=True))', '# print(G.edges(data=True))', "from sklearn.pipeline import Pipeline\nfrom sklea...       'nb_pipe__nb__alpha': [0.1, 0.2],\n    })\n}", "def salary_predictions():\n    # Initialize the d...nagement Salary'])]\n\n    return df_train, df_test", "df_train, df_test = salary_predictions()\n\nfeatur...features]\ny_train = df_train['Management Salary']", "grid = GridSearchCV(pipe,params,scoring='roc_auc...\nprint(grid.best_params_)\nprint(grid.best_score_)"], 'KNeighborsClassifier': <class 'sklearn.neighbors.classification.KNeighborsClassifier'>, 'MaxAbsScaler': <class 'sklearn.preprocessing.data.MaxAbsScaler'>, 'MinMaxScaler': <class 'sklearn.preprocessing.data.MinMaxScaler'>, 'MultinomialNB': <class 'sklearn.naive_bayes.MultinomialNB'>, 'Out': {2: [<networkx.classes.graph.Graph object>, <networkx.classes.graph.Graph object>, <networkx.classes.graph.Graph object>, <networkx.classes.graph.Graph object>, <networkx.classes.graph.Graph object>], 3: ['PA', 'SW_L', 'SW_L', 'PA', 'SW_H']}, ...}
   2962             finally:
   2963                 # Reset our crash handler in place
   2964                 sys.excepthook = old_excepthook
   2965         except SystemExit as e:

...........................................................................
/Users/jessequinn/github/Coursera/Applied Data Science with Python Specialization/Course 5 - Applied Social Network Analysis in Python/04_network-evolution/02_module-4-assignment/<ipython-input-10-ea9ec120f647> in <module>()
      1 grid = GridSearchCV(pipe,params,scoring='roc_auc',cv=5,verbose=1,n_jobs=-1)
----> 2 grid.fit(X_train,y_train)
      3 print(grid.best_params_)
      4 print(grid.best_score_)

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/model_selection/_search.py in fit(self=GridSearchCV(cv=5, error_score='raise',
       e...core='warn',
       scoring='roc_auc', verbose=1), X=      Department  Clustering  Degree  Degree Cen...000  0.000175  0.000017  

[753 rows x 8 columns], y=0       0.0
3       1.0
4       1.0
6       1.0
...e: Management Salary, Length: 753, dtype: float64, groups=None, **fit_params={})
    635                                   return_train_score=self.return_train_score,
    636                                   return_n_test_samples=True,
    637                                   return_times=True, return_parameters=False,
    638                                   error_score=self.error_score)
    639           for parameters, (train, test) in product(candidate_params,
--> 640                                                    cv.split(X, y, groups)))
        cv.split = <bound method StratifiedKFold.split of Stratifie...ld(n_splits=5, random_state=None, shuffle=False)>
        X =       Department  Clustering  Degree  Degree Cen...000  0.000175  0.000017  

[753 rows x 8 columns]
        y = 0       0.0
3       1.0
4       1.0
6       1.0
...e: Management Salary, Length: 753, dtype: float64
        groups = None
    641 
    642         # if one choose to see train score, "out" will contain train score info
    643         if self.return_train_score:
    644             (train_score_dicts, test_score_dicts, test_sample_counts, fit_time,

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/externals/joblib/parallel.py in __call__(self=Parallel(n_jobs=-1), iterable=<generator object BaseSearchCV.fit.<locals>.<genexpr>>)
    784             if pre_dispatch == "all" or n_jobs == 1:
    785                 # The iterable was consumed all at once by the above for loop.
    786                 # No need to wait for async callbacks to trigger to
    787                 # consumption.
    788                 self._iterating = False
--> 789             self.retrieve()
        self.retrieve = <bound method Parallel.retrieve of Parallel(n_jobs=-1)>
    790             # Make sure that we get a last message telling us we are done
    791             elapsed_time = time.time() - self._start_time
    792             self._print('Done %3i out of %3i | elapsed: %s finished',
    793                         (len(self._output), len(self._output),

---------------------------------------------------------------------------
Sub-process traceback:
---------------------------------------------------------------------------
Exception                                          Sun Nov 11 14:12:24 2018
PID: 98210Python 3.7.0: /Users/jessequinn/.pyenv/versions/3.7.0/bin/python3.7
...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/externals/joblib/parallel.py in __call__(self=<sklearn.externals.joblib.parallel.BatchedCalls object>)
    126     def __init__(self, iterator_slice):
    127         self.items = list(iterator_slice)
    128         self._size = len(self.items)
    129 
    130     def __call__(self):
--> 131         return [func(*args, **kwargs) for func, args, kwargs in self.items]
        self.items = [(<function _fit_and_score>, (Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]),       Department  Clustering  Degree  Degree Cen...000  0.000175  0.000017  

[753 rows x 8 columns], 0       0.0
3       1.0
4       1.0
6       1.0
...e: Management Salary, Length: 753, dtype: float64, {'score': make_scorer(roc_auc_score, needs_threshold=True)}, array([ 83,  84,  89,  90,  96, 102, 103, 105, 1..., 745, 746, 747, 748,
       749, 750, 751, 752]), array([  0,   1,   2,   3,   4,   5,   6,   7,  ...,
       175, 176, 177, 178, 179, 180, 181, 182]), 1, {'classifier__selected_model': ('svm', {}), 'scaler__selected_model': ('std', {'with_mean': True, 'with_std': True})}), {'error_score': 'raise', 'fit_params': {}, 'return_n_test_samples': True, 'return_parameters': False, 'return_times': True, 'return_train_score': 'warn'}), (<function _fit_and_score>, (Pipeline(memory=None,
     steps=[('scaler', Pip...    include_bypass=False, selected_model=None))]),       Department  Clustering  Degree  Degree Cen...000  0.000175  0.000017  

[753 rows x 8 columns], 0       0.0
3       1.0
4       1.0
6       1.0
...e: Management Salary, Length: 753, dtype: float64, {'score': make_scorer(roc_auc_score, needs_threshold=True)}, array([  0,   1,   2,   3,   4,   5,   6,   7,  ..., 745, 746, 747, 748,
       749, 750, 751, 752]), array([ 83,  84,  89,  90,  96, 102, 103, 105, 1...,
       333, 334, 335, 336, 337, 338, 340, 342]), 1, {'classifier__selected_model': ('svm', {}), 'scaler__selected_model': ('std', {'with_mean': True, 'with_std': True})}), {'error_score': 'raise', 'fit_params': {}, 'return_n_test_samples': True, 'return_parameters': False, 'return_times': True, 'return_train_score': 'warn'})]
    132 
    133     def __len__(self):
    134         return self._size
    135 

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/externals/joblib/parallel.py in <listcomp>(.0=<list_iterator object>)
    126     def __init__(self, iterator_slice):
    127         self.items = list(iterator_slice)
    128         self._size = len(self.items)
    129 
    130     def __call__(self):
--> 131         return [func(*args, **kwargs) for func, args, kwargs in self.items]
        func = <function _fit_and_score>
        args = (Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]),       Department  Clustering  Degree  Degree Cen...000  0.000175  0.000017  

[753 rows x 8 columns], 0       0.0
3       1.0
4       1.0
6       1.0
...e: Management Salary, Length: 753, dtype: float64, {'score': make_scorer(roc_auc_score, needs_threshold=True)}, array([ 83,  84,  89,  90,  96, 102, 103, 105, 1..., 745, 746, 747, 748,
       749, 750, 751, 752]), array([  0,   1,   2,   3,   4,   5,   6,   7,  ...,
       175, 176, 177, 178, 179, 180, 181, 182]), 1, {'classifier__selected_model': ('svm', {}), 'scaler__selected_model': ('std', {'with_mean': True, 'with_std': True})})
        kwargs = {'error_score': 'raise', 'fit_params': {}, 'return_n_test_samples': True, 'return_parameters': False, 'return_times': True, 'return_train_score': 'warn'}
    132 
    133     def __len__(self):
    134         return self._size
    135 

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/model_selection/_validation.py in _fit_and_score(estimator=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X=      Department  Clustering  Degree  Degree Cen...000  0.000175  0.000017  

[753 rows x 8 columns], y=0       0.0
3       1.0
4       1.0
6       1.0
...e: Management Salary, Length: 753, dtype: float64, scorer={'score': make_scorer(roc_auc_score, needs_threshold=True)}, train=array([ 83,  84,  89,  90,  96, 102, 103, 105, 1..., 745, 746, 747, 748,
       749, 750, 751, 752]), test=array([  0,   1,   2,   3,   4,   5,   6,   7,  ...,
       175, 176, 177, 178, 179, 180, 181, 182]), verbose=1, parameters={'classifier__selected_model': ('svm', {}), 'scaler__selected_model': ('std', {'with_mean': True, 'with_std': True})}, fit_params={}, return_train_score='warn', return_parameters=False, return_n_test_samples=True, return_times=True, error_score='raise')
    483                              " make sure that it has been spelled correctly.)")
    484 
    485     else:
    486         fit_time = time.time() - start_time
    487         # _score will return dict if is_multimetric is True
--> 488         test_scores = _score(estimator, X_test, y_test, scorer, is_multimetric)
        test_scores = {}
        estimator = Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))])
        X_test =      Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns]
        y_test = 0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64
        scorer = {'score': make_scorer(roc_auc_score, needs_threshold=True)}
        is_multimetric = True
    489         score_time = time.time() - start_time - fit_time
    490         if return_train_score:
    491             train_scores = _score(estimator, X_train, y_train, scorer,
    492                                   is_multimetric)

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/model_selection/_validation.py in _score(estimator=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X_test=     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns], y_test=0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64, scorer={'score': make_scorer(roc_auc_score, needs_threshold=True)}, is_multimetric=True)
    518 
    519     Will return a single float if is_multimetric is False and a dict of floats,
    520     if is_multimetric is True
    521     """
    522     if is_multimetric:
--> 523         return _multimetric_score(estimator, X_test, y_test, scorer)
        estimator = Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))])
        X_test =      Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns]
        y_test = 0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64
        scorer = {'score': make_scorer(roc_auc_score, needs_threshold=True)}
    524     else:
    525         if y_test is None:
    526             score = scorer(estimator, X_test)
    527         else:

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/model_selection/_validation.py in _multimetric_score(estimator=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X_test=     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns], y_test=0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64, scorers={'score': make_scorer(roc_auc_score, needs_threshold=True)})
    548 
    549     for name, scorer in scorers.items():
    550         if y_test is None:
    551             score = scorer(estimator, X_test)
    552         else:
--> 553             score = scorer(estimator, X_test, y_test)
        score = undefined
        scorer = make_scorer(roc_auc_score, needs_threshold=True)
        estimator = Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))])
        X_test =      Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns]
        y_test = 0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64
    554 
    555         if hasattr(score, 'item'):
    556             try:
    557                 # e.g. unwrap memmapped scalars

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/metrics/scorer.py in __call__(self=make_scorer(roc_auc_score, needs_threshold=True), clf=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X=     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns], y=0      0.0
3      1.0
4      1.0
6      1.0
7   ...e: Management Salary, Length: 151, dtype: float64, sample_weight=None)
    189                 # For multi-output multi-class estimator
    190                 if isinstance(y_pred, list):
    191                     y_pred = np.vstack(p for p in y_pred).T
    192 
    193             except (NotImplementedError, AttributeError):
--> 194                 y_pred = clf.predict_proba(X)
        y_pred = undefined
        clf.predict_proba = <function Pipeline.predict_proba>
        X =      Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns]
    195 
    196                 if y_type == "binary":
    197                     y_pred = y_pred[:, 1]
    198                 elif isinstance(y_pred, list):

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/utils/metaestimators.py in <lambda>(*args=(     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns],), **kwargs={})
    110                     break
    111             else:
    112                 attrgetter(self.delegate_names[-1])(obj)
    113 
    114         # lambda, but not partial, allows help() to work with update_wrapper
--> 115         out = lambda *args, **kwargs: self.fn(obj, *args, **kwargs)
        args = (     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns],)
        kwargs = {}
    116         # update the docstring of the returned function
    117         update_wrapper(out, self.fn)
    118         return out
    119 

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/sklearn/pipeline.py in predict_proba(self=Pipeline(memory=None,
     steps=[('scaler', Pip..., shrinking=True,
  tol=0.001, verbose=False)))]), X=     Department  Clustering  Degree  Degree Cent...224  0.000486  0.000484  

[151 rows x 8 columns])
    352         """
    353         Xt = X
    354         for name, transform in self.steps[:-1]:
    355             if transform is not None:
    356                 Xt = transform.transform(Xt)
--> 357         return self.steps[-1][-1].predict_proba(Xt)
        self.steps.predict_proba = undefined
        Xt = array([[-1.25425361, -0.46040364,  0.45852572, .... -0.2750333 ,
        -0.43767636, -0.29704686]])
    358 
    359     @if_delegate_has_method(delegate='_final_estimator')
    360     def decision_function(self, X):
    361         """Apply transforms, and decision_function of the final estimator

...........................................................................
/Users/jessequinn/.pyenv/versions/3.7.0/lib/python3.7/site-packages/pipelinehelper/__init__.py in predict_proba(self=PipelineHelper(available_models={'svm': SVC(C=1....one, shrinking=True,
  tol=0.001, verbose=False)), x=array([[-1.25425361, -0.46040364,  0.45852572, .... -0.2750333 ,
        -0.43767636, -0.29704686]]))
     95         if hasattr(self.selected_model, "predict_proba"):
     96             method = getattr(self.selected_model, "predict_proba", None)
     97             if callable(method):
     98                 return method(x)
     99         else:
--> 100             raise Exception("Your model does not support predict_proba")
    101 
    102 

Exception: Your model does not support predict_proba
bmurauer commented 5 years ago

Thanks for the report! I added forwarding the call to decision_function.