Export an Inference GraphDef File - 1.1 English

AI Optimizer User Guide (UG1333)

Document ID
UG1333
Release Date
2020-07-07
Version
1.1 English

Create a file named export_inf_graph.py and add the following code:

from google.protobuf import text_format
from est_cnn import cnn_model_fn
from tensorflow.keras import backend as K
from tensorflow.python.platform import gfile
import tensorflow as tf

tf.app.flags.DEFINE_string(
    'output_file', '', 'Where to save the resulting file to.')

FLAGS = tf.app.flags.FLAGS

def main(_):
  if not FLAGS.output_file:
    raise ValueError('You must supply the path to save to with --output_file')
  tf.logging.set_verbosity(tf.logging.INFO)

  with tf.Graph().as_default() as graph:
    image = tf.placeholder(name='image', dtype=tf.float32,
                           shape=[1, 28, 28, 1])
    label = tf.placeholder(name='label', dtype=tf.int32, shape=[1])

    cnn_model_fn({"x": image}, label, tf.estimator.ModeKeys.EVAL)
    graph_def = graph.as_graph_def()
    with gfile.GFile(FLAGS.output_file, 'w') as f:
      f.write(text_format.MessageToString(graph_def))
    print("Finish export inference graph")

if __name__ == '__main__':
  tf.app.run()