Closed jageshmaharjan closed 5 years ago
Nevermind, managed to freeze the model, but it's not showing the signature definition and TensorFlow serving cannot serve the model. for that, I'll open another issue.
from config import parse_args
from iterator import DiscoveryIterator, SmartIterator
from keras.optimizers import RMSprop
from models import ReferringRelationshipsModel
from utils.eval_utils import format_results_eval
from utils.visualization_utils import objdict
from utils.eval_utils import get_metrics
from utils.train_utils import get_loss_func
import json
import os
args = parse_args(evaluation=True)
models_dir = '/mnt/data/savedir/'
heatmap_threshold = 0.5
params = objdict(json.load(open(os.path.join(models_dir, "args.json"), "r")))
params.discovery = args.discovery
params.shuffle = False
metrics = get_metrics(params.output_dim, args.heatmap_threshold)
relationships_model = ReferringRelationshipsModel(params)
model = relationships_model.build_model()
loss_func = get_loss_func(params.w1)
model.compile(loss=[loss_func, loss_func], optimizer=RMSprop(lr=0.01), metrics=metrics)
model.load_weights('/mnt/data/savedir/model22-2.81.h5')
model.inputs
[<tf.Tensor 'input_1:0' shape=(?, 224, 224, 3) dtype=float32>, <tf.Tensor 'input_2:0' shape=(?, 1) dtype=float32>, <tf.Tensor 'input_4:0' shape=(?, 70) dtype=float32>, <tf.Tensor 'input_3:0' shape=(?, 1) dtype=float32>]
model.outputs
[<tf.Tensor 'subject/Reshape:0' shape=(?, 196) dtype=float32>, <tf.Tensor 'object/Reshape:0' shape=(?, 196) dtype=float32>]
from keras import backend as K
import tensorflow as tf
from keras import backend as K
K.set_learning_phase(0)
def freeze_session(session, keep_var_names=None, output_names=None, clear_devices=True):
from tensorflow.python.framework.graph_util import convert_variables_to_constants
graph = session.graph
with graph.as_default():
freeze_var_names = list(set(v.op.name for v in tf.global_variables()).difference(keep_var_names or []))
output_names = output_names or []
output_names += [v.op.name for v in tf.global_variables()]
input_graph_def = graph.as_graph_def()
if clear_devices:
for node in input_graph_def.node:
node.device = ""
frozen_graph = convert_variables_to_constants(session, input_graph_def, output_names, freeze_var_names)
return frozen_graph
frozen_graph = freeze_session(K.get_session(), output_names=[out.op.name for out in model.outputs])
tf.train.write_graph(frozen_graph, "model", "tf_model.pb", as_text=False)
copied and created a version name.
.
├── 1
│ └── saved_model.pb
└── tf_model.pb
I trained on visual genome dataset, and the followings are the saved keras model.
I am trying to use the trained model (h5) to convert into protobuffer(pb) format. Since you mentioned, in your keras checkpointer to save_weight_only as True, so I am loading model graph from model.py, and trying to load the model, i am getting this error.
The way I am loading a graph is as follow: