Closed howard-tran closed 2 years ago
Hello, very sorry you guys
I take a nap, then i start reading more docs Seem like this is what i need, might help someone else :)))
import torch
import tensorflow as tf
import numpy as np
spark.read.parquet(train_hdfs_dir).createOrReplaceTempView("train_view")
train_samples_bigdl = spark.sql("""
select array(tv.cos_dist) as feature,
cast((CASE WHEN tv.label = 0 THEN -1 ELSE 1 END) as int) as label
from train_view as tv
""")
train_size = train_samples_bigdl.count()
def model_creator(config):
x_inputs = tf.keras.Input(shape=(1,))
initializer = tf.keras.initializers.HeNormal()
regularizer = tf.keras.regularizers.L2(0.001)
SVM_layer = tf.keras.layers.Dense(units=10, \
kernel_initializer=initializer, \
bias_initializer=initializer, \
kernel_regularizer=regularizer, \
bias_regularizer=regularizer)
def SVM_linear_loss(y_true, y_pred):
loss_t = tf.math.maximum( \
0., \
tf.math.subtract(1., tf.math.multiply(tf.cast(y_true, tf.float32), y_pred))) \
return tf.math.reduce_mean(loss_t)
model = tf.keras.Model(inputs=x_inputs, outputs=SVM_layer(x_inputs))
model.compile(optimizer="rmsprop", loss=SVM_linear_loss)
return model
from bigdl.orca.learn.tf2.ray_estimator import TensorFlow2Estimator
est = TensorFlow2Estimator(
model_creator=model_creator, \
workers_per_node=10, \
)
batch_size = 4096
est.fit(data=train_samples_bigdl, \
epochs=2, \
batch_size=batch_size, \
steps_per_epoch= train_size // batch_size, \
feature_cols=['feature'], \
label_cols=['label'])
est.get_model().save_weights("/home/hadoop/model/re1/model.h5")
new_model = model_creator(None)
new_model.load_weights("/home/hadoop/model/re1/model.h5")
I followed this tutorial
https://bigdl.readthedocs.io/en/latest/doc/Orca/QuickStart/orca-tf2keras-quickstart.html
However i cannot load the model with this command
new_model = tf.keras.models.load_model("/home/hadoop/model/re1/model.ckpt")
Given this error