gpt4 book ai didi

tensorflow : serving model return always the same prediction

转载 作者:行者123 更新时间:2023-12-04 14:24:51 26 4
gpt4 key购买 nike

我需要你的帮助我现在有点卡住了。

我重新训练了一个分类 tensorflow 模型,它给出了非常好的结果。
现在我想通过 tensorflow 服务来服务它。
我设法提供了它,但是当我使用它时,无论输入是什么,它总是给我相同的结果。

我认为我导出模型的方式有问题,但我不知道是什么。下面是我的代码。

有人可以帮助我吗?非常感谢各位

这是将我的输入图像转换为 tf 可读对象的函数:

def read_tensor_from_image_file(file_name, input_height=299, input_width=299,
input_mean=0, input_std=255):
input_name = "file_reader"
output_name = "normalized"
file_reader = tf.read_file(file_name, input_name)
if file_name.endswith(".png"):
image_reader = tf.image.decode_png(file_reader, channels = 3,
name='png_reader')
elif file_name.endswith(".gif"):
image_reader = tf.squeeze(tf.image.decode_gif(file_reader,
name='gif_reader'))
elif file_name.endswith(".bmp"):
image_reader = tf.image.decode_bmp(file_reader, name='bmp_reader')
else:
image_reader = tf.image.decode_jpeg(file_reader, channels = 3,
name='jpeg_reader')
float_caster = tf.cast(image_reader, tf.float32)
dims_expander = tf.expand_dims(float_caster, 0);
resized = tf.image.resize_bilinear(dims_expander, [input_height, input_width])
normalized = tf.divide(tf.subtract(resized, [input_mean]), [input_std])
sess = tf.Session()
result = sess.run(normalized)

return result,normalized

这就是我导出模型的方式:
  # Getting graph from the saved pb file
graph = tf.Graph()
graph_def = tf.GraphDef()
with open(model_file, "rb") as f:
graph_def.ParseFromString(f.read())
with graph.as_default():
tf.import_graph_def(graph_def)

# below, var "t" is the result of the transformation, "tf_input" a tensor before computation.
t,predict_inputs_tensor = read_tensor_from_image_file(file_name,
input_height=input_height,
input_width=input_width,
input_mean=input_mean,
input_std=input_std)

input_name = "import/" + input_layer
output_name = "import/" + output_layer

input_operation = graph.get_operation_by_name(input_name);
output_operation = graph.get_operation_by_name(output_name);

# Let's predict result to get an exemple output
with tf.Session(graph=graph) as sess:
results = sess.run(output_operation.outputs[0],
{input_operation.outputs[0]: t})
results = np.squeeze(results)


# Creating labels
class_descriptions = []
labels = load_labels(label_file)
for s in labels:
class_descriptions.append(s)
classes_output_tensor = tf.constant(class_descriptions)
table =
tf.contrib.lookup.index_to_string_table_from_tensor(classes_output_tensor)
classes = table.lookup(tf.to_int64(labels))

top_k = results.argsort()[-len(labels):][::-1]
scores_output_tensor, indices =tf.nn.top_k(results, len(labels))

# Display
for i in top_k:
print(labels[i], results[i])


version=1
path="/Users/dboudeau/depot/tensorflow-for-poets-2/tf_files"

tf.app.flags.DEFINE_integer('version', version, 'version number of the model.')
tf.app.flags.DEFINE_string('work_dir', path, 'your older model directory.')
tf.app.flags.DEFINE_string('model_dir', '/tmp/magic_model', 'saved model directory')
FLAGS = tf.app.flags.FLAGS

with tf.Session() as sess:
classify_inputs_tensor_info =
tf.saved_model.utils.build_tensor_info(predict_inputs_tensor)

export_path = os.path.join(
tf.compat.as_bytes(FLAGS.model_dir)
,tf.compat.as_bytes(str(FLAGS.version))
)

print(export_path)
builder = tf.saved_model.builder.SavedModelBuilder(export_path)

# define the signature def map here

predict_inputs_tensor_info=tf.saved_model.utils.build_tensor_info(predict_inputs_tensor)
classes_output_tensor_info=tf.saved_model.utils.build_tensor_info(classes_output_tensor)
score_output_tensor_info=tf.saved_model.utils.build_tensor_info(scores_output_tensor)
  classification_signature = (
tf.saved_model.signature_def_utils.build_signature_def(
inputs={
tf.saved_model.signature_constants.CLASSIFY_INPUTS:
classify_inputs_tensor_info
},
outputs={
tf.saved_model.signature_constants.CLASSIFY_OUTPUT_CLASSES:
classes_output_tensor_info,
tf.saved_model.signature_constants.CLASSIFY_OUTPUT_SCORES:
scores_output_tensor_info
},
method_name=tf.saved_model.signature_constants.
CLASSIFY_METHOD_NAME))

prediction_signature = (
tf.saved_model.signature_def_utils.build_signature_def(
inputs={'images': predict_inputs_tensor_info},
outputs={
'classes': classes_output_tensor_info,
'scores': scores_output_tensor_info
},

method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME
))

legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')

# This one does'
final_sdn={

tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:classification_signature,
}
  builder.add_meta_graph_and_variables(
sess, [tf.saved_model.tag_constants.SERVING],
signature_def_map=final_sdn,
legacy_init_op=legacy_init_op)

builder.save()

最佳答案

我遇到了同样的问题,我已经战斗了一段时间。最后,我在模型中为 float32 发送了 Double 类型,并且以某种方式 tensowflow 将这个 double 转换为 0 值。这意味着,无论您通过 RPC 发送什么,在模型中都被视为 0。希望能帮助到你。

关于 tensorflow : serving model return always the same prediction,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/47912161/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com