diff --git a/examples/regression_savedmodel.rs b/examples/regression_savedmodel.rs index 2e39ce789e..aed22b8b8c 100644 --- a/examples/regression_savedmodel.rs +++ b/examples/regression_savedmodel.rs @@ -46,18 +46,31 @@ fn main() -> Result<(), Box> { // Load the saved model exported by regression_savedmodel.py. let mut graph = Graph::new(); - let session = SavedModelBundle::load( - &SessionOptions::new(), - &["train", "serve"], - &mut graph, - export_dir, - )? - .session; - let op_x = graph.operation_by_name_required("x")?; - let op_y = graph.operation_by_name_required("y")?; - let op_train = graph.operation_by_name_required("train")?; - let op_w = graph.operation_by_name_required("w")?; - let op_b = graph.operation_by_name_required("b")?; + let bundle = + SavedModelBundle::load(&SessionOptions::new(), &["serve"], &mut graph, export_dir)?; + let session = &bundle.session; + + // train + let train_signature = bundle.meta_graph_def().get_signature("train")?; + let x_info = train_signature.get_input("x")?; + let y_info = train_signature.get_input("y")?; + let loss_info = train_signature.get_output("loss")?; + let op_x = graph.operation_by_name_required(&x_info.name().name)?; + let op_y = graph.operation_by_name_required(&y_info.name().name)?; + let op_train = graph.operation_by_name_required(&loss_info.name().name)?; + + // internal parameters + let op_b = { + let b_signature = bundle.meta_graph_def().get_signature("b")?; + let b_info = b_signature.get_output("output")?; + graph.operation_by_name_required(&b_info.name().name)? + }; + + let op_w = { + let w_signature = bundle.meta_graph_def().get_signature("w")?; + let w_info = w_signature.get_output("output")?; + graph.operation_by_name_required(&w_info.name().name)? + }; // Train the model (e.g. for fine tuning). let mut train_step = SessionRunArgs::new(); diff --git a/examples/regression_savedmodel/assets/.gitkeep b/examples/regression_savedmodel/assets/.gitkeep new file mode 100644 index 0000000000..e69de29bb2 diff --git a/examples/regression_savedmodel/regression_savedmodel.py b/examples/regression_savedmodel/regression_savedmodel.py index 60a1966a21..d7e24147d8 100644 --- a/examples/regression_savedmodel/regression_savedmodel.py +++ b/examples/regression_savedmodel/regression_savedmodel.py @@ -1,43 +1,47 @@ import tensorflow as tf -from tensorflow.python.saved_model.builder import SavedModelBuilder -from tensorflow.python.saved_model.signature_def_utils import build_signature_def -from tensorflow.python.saved_model.signature_constants import REGRESS_METHOD_NAME -from tensorflow.python.saved_model.tag_constants import TRAINING, SERVING -from tensorflow.python.saved_model.utils import build_tensor_info -x = tf.placeholder(tf.float32, name='x') -y = tf.placeholder(tf.float32, name='y') -w = tf.Variable(tf.random_uniform([1], -1.0, 1.0), name='w') -b = tf.Variable(tf.zeros([1]), name='b') -y_hat = tf.add(w * x, b, name="y_hat") +class LinearRegresstion(tf.Module): + def __init__(self, name=None): + super(LinearRegresstion, self).__init__(name=name) + self.w = tf.Variable(tf.random.uniform([1], -1.0, 1.0), name='w') + self.b = tf.Variable(tf.zeros([1]), name='b') + self.optimizer = tf.keras.optimizers.SGD(0.5) -loss = tf.reduce_mean(tf.square(y_hat - y)) -optimizer = tf.train.GradientDescentOptimizer(0.5) -train = optimizer.minimize(loss, name='train') + @tf.function + def __call__(self, x): + y_hat = self.w * x + self.b + return y_hat -init = tf.variables_initializer(tf.global_variables(), name='init') + @tf.function + def get_w(self): + return {'output': self.w} + + @tf.function + def get_b(self): + return {'output': self.b} + + @tf.function + def train(self, x, y): + with tf.GradientTape() as tape: + y_hat = self(x) + loss = tf.reduce_mean(tf.square(y_hat - y)) + grads = tape.gradient(loss, self.trainable_variables) + _ = self.optimizer.apply_gradients(zip(grads, self.trainable_variables)) + return {'loss': loss} + + +model = LinearRegresstion() + +# Get concrete functions to generate signatures +x = tf.TensorSpec([None], tf.float32, name='x') +y = tf.TensorSpec([None], tf.float32, name='y') + +train = model.train.get_concrete_function(x, y) +w = model.get_w.get_concrete_function() +b = model.get_b.get_concrete_function() + +signatures = {'train': train, 'w': w, 'b': b} directory = 'examples/regression_savedmodel' -builder = SavedModelBuilder(directory) - -with tf.Session(graph=tf.get_default_graph()) as sess: - sess.run(init) - - signature_inputs = { - "x": build_tensor_info(x), - "y": build_tensor_info(y) - } - signature_outputs = { - "out": build_tensor_info(y_hat) - } - signature_def = build_signature_def( - signature_inputs, signature_outputs, - REGRESS_METHOD_NAME) - builder.add_meta_graph_and_variables( - sess, [TRAINING, SERVING], - signature_def_map={ - REGRESS_METHOD_NAME: signature_def - }, - assets_collection=tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS)) - builder.save(as_text=False) +tf.saved_model.save(model, directory, signatures=signatures) \ No newline at end of file diff --git a/examples/regression_savedmodel/saved_model.pb b/examples/regression_savedmodel/saved_model.pb index 980fdc5198..4ad70d4bfa 100644 Binary files a/examples/regression_savedmodel/saved_model.pb and b/examples/regression_savedmodel/saved_model.pb differ diff --git a/examples/regression_savedmodel/variables/variables.data-00000-of-00001 b/examples/regression_savedmodel/variables/variables.data-00000-of-00001 index 294ceed15c..69088c8ec5 100644 Binary files a/examples/regression_savedmodel/variables/variables.data-00000-of-00001 and b/examples/regression_savedmodel/variables/variables.data-00000-of-00001 differ diff --git a/examples/regression_savedmodel/variables/variables.index b/examples/regression_savedmodel/variables/variables.index index 9f37a8aa4c..b75ff9f90e 100644 Binary files a/examples/regression_savedmodel/variables/variables.index and b/examples/regression_savedmodel/variables/variables.index differ