Call the uncertainty-aware model to generate outputs for the test data
prediction = ensemble_NN(x_test)
I got this :
TypeError Traceback (most recent call last)
Cell In[93], line 6
3 ensemble_NN = capsa.EnsembleWrapper(standard_dense_NN)
5 # Build the model for regression, defining the loss function and optimizer
----> 6 ensemble_NN.compile(loss='mean_squared_error', optimizer='adam')
8 # Train the wrapped model for 30 epochs.
9 loss_history_ensemble = ensemble_NN.fit(x_train, y_train, epochs=30)
File ~/miniconda3/envs/mitdeep2/lib/python3.10/site-packages/capsa/epistemic/ensemble.py:87, in EnsembleWrapper.compile(self, optimizer, loss, metrics)
85 if len(optimizer) < self.num_members:
86 optim_conf = optim.serialize(optimizer[0])
---> 87 optimizer = [optim.deserialize(optimconf) for in range(self.num_members)]
88 # losses and most keras metrics are stateless, no need to serialize as above
89 if len(loss) < self.num_members:
File ~/miniconda3/envs/mitdeep2/lib/python3.10/site-packages/capsa/epistemic/ensemble.py:87, in (.0)
85 if len(optimizer) < self.num_members:
86 optim_conf = optim.serialize(optimizer[0])
---> 87 optimizer = [optim.deserialize(optimconf) for in range(self.num_members)]
88 # losses and most keras metrics are stateless, no need to serialize as above
89 if len(loss) < self.num_members:
File ~/miniconda3/envs/mitdeep2/lib/python3.10/site-packages/keras/src/optimizers/init.py:120, in deserialize(config, custom_objects, use_legacy_format, **kwargs)
118 if kwargs:
119 raise TypeError(f"Invalid keyword arguments: {kwargs}")
--> 120 if len(config["config"]) > 0:
121 # If the optimizer config is not empty, then we use the value of
122 # is_legacy_optimizer to override use_legacy_optimizer. If
123 # is_legacy_optimizer does not exist in config, it means we are
124 # using the legacy optimzier.
125 use_legacy_optimizer = config["config"].get("is_legacy_optimizer", True)
126 if (
127 tf.internal.tf2.enabled()
128 and tf.executing_eagerly()
(...)
132 # We observed a slowdown of optimizer on M1 Mac, so we fall back to the
133 # legacy optimizer for M1 users now, see b/263339144 for more context.
When I am running following codes: standard_dense_NN = create_dense_NN()
Wrap the dense network for epistemic uncertainty estimation with an Ensemble
ensemble_NN = capsa.EnsembleWrapper(standard_dense_NN)
Build the model for regression, defining the loss function and optimizer
ensemble_NN.compile(loss='mean_squared_error', optimizer='adam')
Train the wrapped model for 30 epochs.
loss_history_ensemble = ensemble_NN.fit(x_train, y_train, epochs=30)
Call the uncertainty-aware model to generate outputs for the test data
prediction = ensemble_NN(x_test)
I got this : TypeError Traceback (most recent call last) Cell In[93], line 6 3 ensemble_NN = capsa.EnsembleWrapper(standard_dense_NN) 5 # Build the model for regression, defining the loss function and optimizer ----> 6 ensemble_NN.compile(loss='mean_squared_error', optimizer='adam') 8 # Train the wrapped model for 30 epochs. 9 loss_history_ensemble = ensemble_NN.fit(x_train, y_train, epochs=30)
File ~/miniconda3/envs/mitdeep2/lib/python3.10/site-packages/capsa/epistemic/ensemble.py:87, in EnsembleWrapper.compile(self, optimizer, loss, metrics) 85 if len(optimizer) < self.num_members: 86 optim_conf = optim.serialize(optimizer[0]) ---> 87 optimizer = [optim.deserialize(optimconf) for in range(self.num_members)] 88 # losses and most keras metrics are stateless, no need to serialize as above 89 if len(loss) < self.num_members:
File ~/miniconda3/envs/mitdeep2/lib/python3.10/site-packages/capsa/epistemic/ensemble.py:87, in(.0)
85 if len(optimizer) < self.num_members:
86 optim_conf = optim.serialize(optimizer[0])
---> 87 optimizer = [optim.deserialize(optimconf) for in range(self.num_members)]
88 # losses and most keras metrics are stateless, no need to serialize as above
89 if len(loss) < self.num_members:
File ~/miniconda3/envs/mitdeep2/lib/python3.10/site-packages/keras/src/optimizers/init.py:120, in deserialize(config, custom_objects, use_legacy_format, **kwargs) 118 if kwargs: 119 raise TypeError(f"Invalid keyword arguments: {kwargs}") --> 120 if len(config["config"]) > 0: 121 # If the optimizer config is not empty, then we use the value of 122 #
is_legacy_optimizer
to overrideuse_legacy_optimizer
. If 123 #is_legacy_optimizer
does not exist in config, it means we are 124 # using the legacy optimzier. 125 use_legacy_optimizer = config["config"].get("is_legacy_optimizer", True) 126 if ( 127 tf.internal.tf2.enabled() 128 and tf.executing_eagerly() (...) 132 # We observed a slowdown of optimizer on M1 Mac, so we fall back to the 133 # legacy optimizer for M1 users now, see b/263339144 for more context.TypeError: string indices must be integers