Prediction from model saved with `tf.estimator.Est

2020-05-19 07:54发布

I am using tf.estimator.Estimator to train a model:

def model_fn(features, labels, mode, params, config):

    input_image = features["input_image"]

    eval_metric_ops = {}
    predictions = {}

    # Create model
    with tf.name_scope('Model'):

        W = tf.Variable(tf.zeros([784, 10]), name="W")
        b = tf.Variable(tf.zeros([10]), name="b")
        logits = tf.nn.softmax(tf.matmul(input_image, W, name="MATMUL") + b, name="logits")

    loss = None
    train_op = None

    if mode != tf.estimator.ModeKeys.PREDICT:
        loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=labels, logits=logits))
        train_op = tf.contrib.layers.optimize_loss(loss=loss,
                                                       global_step=tf.contrib.framework.get_global_step(),
                                                       learning_rate=params["learning_rate"],
                                                       optimizer=params["optimizer"])
    # Add prediction
    classes = tf.as_string(tf.argmax(input=logits, axis=1, name="class"))
    with tf.name_scope('Predictions'):
        predictions["logits"] = logits
        predictions["classes"] = classes

    export_outputs = {"classes": tf.estimator.export.ClassificationOutput(classes=classes)}
    export_outputs = {"classes": tf.estimator.export.PredictOutput({"labels": classes})}

    spec = tf.estimator.EstimatorSpec(mode=mode,
                                      predictions=predictions,
                                      loss=loss,
                                      train_op=train_op,
                                      eval_metric_ops=eval_metric_ops,
                                      export_outputs=export_outputs,
                                      training_chief_hooks=None,
                                      training_hooks=None,
                                      scaffold=None)
    return spec

def input_fn(dataset, n=10):  

    return dataset.images[:n], dataset.labels[:n]


model_params = {"learning_rate": 1e-3,
                "optimizer": "Adam"}

#run_path = os.path.join(runs_path, datetime.now().strftime("%Y-%m-%d-%H-%M-%S"))
run_path = os.path.join(runs_path, "run1")
if os.path.exists(run_path):
    shutil.rmtree(run_path)

estimator = tf.estimator.Estimator(model_fn=model_fn, model_dir=run_path, params=model_params)


# Train
inputs = lambda: input_fn(mnist.train, n=15)
estimator.train(input_fn=inputs, steps=1000)

Model and weights are correctly saved during training.

Now I want to reload the model + weights in another script in order to make predictions.

But I don't know how to specify the input because I have no reference to it in the model_fn function.

# Get some data to predict
input_data = mnist.test.images[:5]

tf.reset_default_graph()
run_path = os.path.join(runs_path, "run1")

# Load the model (graph)
input_checkpoint = os.path.join(run_path, "model.ckpt-1000")
saver = tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=True)

# Restore the weights
sess = tf.InteractiveSession()
saver.restore(sess, input_checkpoint)
graph = sess.graph

# Get the op to compute for prediction
predict_op = graph.get_operation_by_name("Predictions/class")

# predictions = sess.run(predict_op, feed_dict=????)

Here is what returns graph.get_collection("variables"):

[<tf.Variable 'global_step:0' shape=() dtype=int64_ref>,
 <tf.Variable 'Model/W:0' shape=(784, 10) dtype=float32_ref>,
 <tf.Variable 'Model/b:0' shape=(10,) dtype=float32_ref>,
 <tf.Variable 'OptimizeLoss/learning_rate:0' shape=() dtype=float32_ref>,
 <tf.Variable 'OptimizeLoss/beta1_power:0' shape=() dtype=float32_ref>,
 <tf.Variable 'OptimizeLoss/beta2_power:0' shape=() dtype=float32_ref>,
 <tf.Variable 'OptimizeLoss/Model/W/Adam:0' shape=(784, 10) dtype=float32_ref>,
 <tf.Variable 'OptimizeLoss/Model/W/Adam_1:0' shape=(784, 10) dtype=float32_ref>,
 <tf.Variable 'OptimizeLoss/Model/b/Adam:0' shape=(10,) dtype=float32_ref>,
 <tf.Variable 'OptimizeLoss/Model/b/Adam_1:0' shape=(10,) dtype=float32_ref>]

Do I need to specify a tf.placeholder for the input? But then how Tensorflow knows the input should feed this specific placeholder?

Also if I specify something like features = tf.constant(features, name="input") at the beginning of the model, I can't use it because it's not a Tensor but an Operation.


EDIT

After more investigation, I have found that I need to save my model using the Estimator.export_savedmodel() method (and not re-using the automatically saved checkpoints during training with the estimator.

feature_spec = {"input_image": tf.placeholder(dtype=tf.float32, shape=[None, 784])}

input_receiver_fn = tf.estimator.export.build_raw_serving_input_receiver_fn(feature_spec)
estimator.export_savedmodel(model_path, input_receiver_fn, as_text=True)

Then I tried to load the model and do prediction but I don't know how to feed the model with my numpy images:

preds = sess.run("class", feed_dict={"input_image": input_data})

And the excepted error:

/home/hadim/local/conda/envs/ws/lib/python3.6/site-packages/tensorflow/python/client/session.py in run(self, fetches, feed_dict, options, run_metadata)
    776     try:
    777       result = self._run(None, fetches, feed_dict, options_ptr,
--> 778                          run_metadata_ptr)
    779       if run_metadata:
    780         proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

/home/hadim/local/conda/envs/ws/lib/python3.6/site-packages/tensorflow/python/client/session.py in _run(self, handle, fetches, feed_dict, options, run_metadata)
    931           except Exception as e:
    932             raise TypeError('Cannot interpret feed_dict key as Tensor: '
--> 933                             + e.args[0])
    934 
    935           if isinstance(subfeed_val, ops.Tensor):

TypeError: Cannot interpret feed_dict key as Tensor: The name 'input_image' looks like an (invalid) Operation name, not a Tensor. Tensor names must be of the form "<op_name>:<output_index>".

4条回答
放我归山
2楼-- · 2020-05-19 08:28

As for the TypeError, I solve it in this way.

First, name the placeholder:

feature_spec = {"input_image": tf.placeholder(dtype=tf.float32, shape=[None, 784], name='input_image')}

Then you can use it like this:

feed_dict={"input_image:0": input_data}

Hope it can help someone.


EDIT

In this question, afterestimator.export_savedmodel(...) you can do prediction like this:

with tf.Session(graph=tf.Graph()) as sess:
    meta_graph_def = tf.saved_model.loader.load(sess, [tf.saved_model.tag_constants.SERVING], model_path)
    signature = meta_graph_def.signature_def
    x_tensor_name = signature['classes'].inputs['input_image'].name
    y_tensor_name = signature['classes'].outputs['labels'].name
    x = sess.graph.get_tensor_by_name(x_tensor_name)
    y = sess.graph.get_tensor_by_name(y_tensor_name)
    predictions = sess.run(y, {x: mnist.test.images[:5]})
查看更多
We Are One
3楼-- · 2020-05-19 08:34

I predict successfully by using tensorflow.contrib.predictor:

from tensorflow.contrib import predictor

predict_fn = predictor.from_saved_model(
    export_dir='model/1535012949',  # your model path
    signature_def_key='predict', 
)

predictions = predict_fn({'examples': examples})  # FYI, rename to `input_image`

But I want to predict by session and tensors also, so that I can use the traning model with other languages. Expect some perfect answer!

查看更多
再贱就再见
4楼-- · 2020-05-19 08:48

The name of your input Tensor probably is input_image:0.

You can list the signature of your saved model by calling:

print(estimator.signature_def[tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY])

That should list the expected input/output Tensors.

查看更多
够拽才男人
5楼-- · 2020-05-19 08:48

I am working with tf.contrib.learn.Estimator. As I see, the syntax and method signatures are almost the same so I believe the differences are related to the several Tensorflow versions. So you can create Estimator as usual with something like

estimator = learn.Estimator(
         model_fn=your_model,
         model_dir="tmp",
         config=tf.contrib.learn.RunConfig(
             save_checkpoints_steps=10,
             save_summary_steps=10,
             save_checkpoints_secs=None
         )
     )

Then you do the train asestimator.fit(input_fn=input_function, steps=100)

And then you can do the prediction calling

estimator .predict(prediction)

Mote, there is a trick, related with the Tensorflow's known issue. Calling predict does not properly initialize the Estimator, so you need to call

estimator.evaluate(x=prediction, y=label_array, steps=1)

before calling predict.

Hope, this helps.

查看更多
登录 后发表回答