-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathseq_serving.py
89 lines (64 loc) · 3.07 KB
/
seq_serving.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import os
import tensorflow as tf
def build_serving_inputs(num_steps, verbose=False):
if verbose:
print("About to create inputs using num_steps:", num_steps)
serialized_input = tf.placeholder(tf.string, name='serialized_input')
feature_configs = dict()
names = []
if verbose:
print("About to create feature dict")
# Create the feature config dict
# which includes the <insert thing here>
for i in range(num_steps):
name = "encoder{0}".format(i)
names.append(name)
shape = []
dtype = tf.int64
#feature_configs[name] = tf.FixedLenFeature(shape=shape, dtype=dtype)
#FixedLenSequenceFeature.
feature_configs[name] = tf.FixedLenFeature(shape=shape, dtype=dtype)
if verbose:
print("Feature dict creates")
parsed_example = tf.parse_example(serialized_input, feature_configs)
if verbose:
print("About to assign name to inputs")
# use tf.identity() to assign name
encoder_inputs = []
for name in names:
encoder_inputs.append(tf.identity(parsed_example[name], name=name))
if verbose:
print("Returning inputs")
return serialized_input, encoder_inputs
def export_model_to_serving(sess, export_path_base, version, serialized_tf_example, inputs, outputs):
export_path = os.path.join(tf.compat.as_bytes(export_path_base), tf.compat.as_bytes(str(version)))
print('Exporting trained model to', export_path)
builder = tf.saved_model.builder.SavedModelBuilder(export_path)
# Build the signature_def_map.
prediction_inputs = tf.saved_model.utils.build_tensor_info(serialized_tf_example)
prediction_outputs = tf.saved_model.utils.build_tensor_info(outputs)
prediction_signature = tf.saved_model.signature_def_utils.build_signature_def(
inputs={
tf.saved_model.signature_constants.PREDICT_INPUTS: prediction_inputs
},
outputs={
tf.saved_model.signature_constants.PREDICT_OUTPUTS: prediction_outputs
},
method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME)
tensor_infos_input = dict()
for input_ in inputs:
tensor_infos_input[input_.name] = tf.saved_model.utils.build_tensor_info(input_)
tensor_infos_output = dict()
for output in outputs:
tensor_infos_output[output.name] = tf.saved_model.utils.build_tensor_info(output)
add_signature = tf.saved_model.signature_def_utils.build_signature_def(inputs=tensor_infos_input,
outputs=tensor_infos_output,
method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME)
legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')
builder.add_meta_graph_and_variables(sess,
[tf.saved_model.tag_constants.SERVING],
signature_def_map={'joke': add_signature,
tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: prediction_signature},
legacy_init_op=legacy_init_op)
builder.save()
print('Done exporting!')