Closed omolchanov closed 5 months ago
Can you provide a MRE? Thanks!
Can you provide a MRE? Thanks!
Sure, thanks!
from pandas import read_csv
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from scikeras.wrappers import KerasRegressor
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
dataframe = read_csv("housing.csv", delim_whitespace=True, header=None)
dataset = dataframe.values
X = dataset[:,0:13]
Y = dataset[:,13]
def baseline_model():
model = Sequential()
model.add(Dense(13, input_shape=(13,), kernel_initializer='normal', activation='relu'))
model.add(Dense(1, kernel_initializer='normal'))
model.compile(loss='mean_squared_error', optimizer='adam')
return model
estimator = KerasRegressor(model=baseline_model, epochs=100, batch_size=5, verbose=0)
kfold = KFold(n_splits=10)
results = cross_val_score(estimator, X, Y, cv=kfold, scoring='neg_mean_squared_error')
print("Baseline: %.2f (%.2f) MSE" % (results.mean(), results.std()))
Adrian, any luck with this broken saving logic ?
I haven't had time to look into this yet. It'll probably be next week.
Hello Adrian. Sorry to bothering you with this, but do you have any progress with fixing this adapter ?
Thanks
I'm looking into #315 and I feel like you may be using a newer version of Keras. What version of keras are you using?
Keras version is 3.0.5.
Okay yes we don't support that version, see #315. I'm going to close this since it will be fixed via that issue.
Ok, thank you
model = Sequential() model.add(Dense(1, activation='relu', kernel_initializer='normal')) model.compile(loss='mean_absolute_error', optimizer='adam', metrics=['mean_absolute_error'])
estimator = KerasRegressor(model=model, epochs=1, batch_size=15, verbose=0) results = cross_val_score(estimator, X, y, cv=cv, scoring='neg_mean_squared_error',)
Error is raised here '_saving_utils.py'
"""Support for Pythons's Pickle protocol.""" with _get_temp_folder() as temp_dir: model.save(tempdir) b = BytesIO() with tarfile.open(fileobj=b, mode="w") as archive: for root, , filenames in tf_io.gfile.walk(temp_dir): for filename in filenames: dest = os.path.join(root, filename) with tf_io.gfile.GFile(dest, "rb") as f: info = tarfile.TarInfo(name=os.path.relpath(dest, temp_dir)) info.size = f.size() archive.addfile(tarinfo=info, fileobj=f) tf_io.gfile.remove(dest) b.seek(0) model_bytes = np.asarray(memoryview(b.read())) return (unpack_keras_model, (model_bytes,))