gpt4 book ai didi

python - 将 Tensorflow Keras 模型(编码器 - 解码器)保存为 SavedModel 格式

转载 作者:行者123 更新时间:2023-12-04 12:35:32 24 4
gpt4 key购买 nike

我已经(在 TF 教程的帮助下)编写了一个图像字幕模型,该模型使用具有注意力的编码器-解码器模型。

现在,我想将其转换为 TfLite 并最终将其部署在 Flutter 中。

我正在尝试将编码器和解码器模型保存为 SavedModel 格式,然后我可以将其转换为 TfLite。

注意力模型:

    class BahdanauAttention(tf.keras.Model):
def __init__(self, units):
super(BahdanauAttention, self).__init__()
self.W1 = tf.keras.layers.Dense(units)
self.W2 = tf.keras.layers.Dense(units)
self.V = tf.keras.layers.Dense(1)

def call(self, features, hidden):
# features(CNN_encoder output) shape == (batch_size, 64, embedding_dim)

# hidden shape == (batch_size, hidden_size)
# hidden_with_time_axis shape == (batch_size, 1, hidden_size)
hidden_with_time_axis = tf.expand_dims(hidden, 1)

# score shape == (batch_size, 64, hidden_size)
score = tf.nn.tanh(self.W1(features) + self.W2(hidden_with_time_axis))

# attention_weights shape == (batch_size, 64, 1)
# you get 1 at the last axis because you are applying score to self.V
attention_weights = tf.nn.softmax(self.V(score), axis=1)

# context_vector shape after sum == (batch_size, hidden_size)
context_vector = attention_weights * features
context_vector = tf.reduce_sum(context_vector, axis=1)

return context_vector, attention_weights

编码器型号:
    class CNN_Encoder(tf.keras.Model):
# This encoder passes the extracted features through a Fully connected layer
def __init__(self, embedding_dim):
super(CNN_Encoder, self).__init__()
# shape after fc == (batch_size, 64, embedding_dim)
self.fc = tf.keras.layers.Dense(embedding_dim)

@tf.function
def call(self, x):
inp = x
y = self.fc(inp)
z = tf.nn.relu(y)
return z

解码器型号:
    class RNN_Decoder(tf.keras.Model):
def __init__(self, embedding_dim, units, vocab_size):
super(RNN_Decoder, self).__init__()
self.units = units

self.embedding = tf.keras.layers.Embedding(vocab_size, embedding_dim)
self.gru = tf.keras.layers.GRU(self.units,
return_sequences=True,
return_state=True,
recurrent_initializer='glorot_uniform')
self.fc1 = tf.keras.layers.Dense(self.units)
self.fc2 = tf.keras.layers.Dense(vocab_size)

self.attention = BahdanauAttention(self.units)

@tf.function
def call(self, x, features1, hidden):
# defining attention as a separate model
features1 = features1
hidden1 = hidden
context_vector, attention_weights = self.attention(features1, hidden1)

# x shape after passing through embedding == (batch_size, 1, embedding_dim)
x = self.embedding(x)

# x shape after concatenation == (batch_size, 1, embedding_dim + hidden_size)
x = tf.concat([tf.expand_dims(context_vector, 1), x], axis=-1)

# passing the concatenated vector to the GRU
output, state = self.gru(x)

# shape == (batch_size, max_length, hidden_size)
x = self.fc1(output)

# x shape == (batch_size * max_length, hidden_size)
x = tf.reshape(x, (-1, x.shape[2]))

# output shape == (batch_size * max_length, vocab)
x = self.fc2(x)

return x, state, attention_weights

def reset_state(self, batch_size):
return tf.zeros((batch_size, self.units))

现在,在保存模型时,编码器被保存为 SavedModel,没有错误,但解码器没有。
    tf.saved_model.save(decoder, 'decoder_model', signatures=decoder.call.get_concrete_function(
[
tf.TensorSpec(shape=[1, 1], dtype=tf.int32, name='x'),
tf.TensorSpec(shape=[1, 64, 256], dtype=tf.float32, name="features1"),
tf.TensorSpec(shape=[1, 512], dtype=tf.float32, name="hidden"),
]
))

错误:
    ---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-66-da4712d61d18> in <module>
3 tf.TensorSpec(shape=[1, 1], dtype=tf.int32, name='x'),
4 tf.TensorSpec(shape=[1, 64, 256], dtype=tf.float32, name="features1"),
----> 5 tf.TensorSpec(shape=[1, 512], dtype=tf.float32, name="hidden"),
6 ]
7 ))

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\eager\def_function.py in
get_concrete_function(self, *args, **kwargs)
913 # In this case we have created variables on the first call, so we run the
914 # defunned version which is guaranteed to never create variables.
--> 915 return self._stateless_fn.get_concrete_function(*args, **kwargs)
916 elif self._stateful_fn is not None:
917 # In this case we have not created variables on the first call. So we can

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\eager\function.py in
get_concrete_function(self, *args, **kwargs)
2432 args, kwargs = None, None
2433 with self._lock:
-> 2434 graph_function, args, kwargs = self._maybe_define_function(args, kwargs)
2435 if self.input_signature:
2436 args = self.input_signature

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\eager\function.py in
_maybe_define_function(self, args, kwargs)
2701
2702 self._function_cache.missed.add(call_context_key)
-> 2703 graph_function = self._create_graph_function(args, kwargs)
2704 self._function_cache.primary[cache_key] = graph_function
2705 return graph_function, args, kwargs

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\eager\function.py in
_create_graph_function(self, args, kwargs, override_flat_arg_shapes)
2591 arg_names=arg_names,
2592 override_flat_arg_shapes=override_flat_arg_shapes,
-> 2593 capture_by_value=self._capture_by_value),
2594 self._function_attributes,
2595 # Tell the ConcreteFunction to clean up its graph once it goes out of

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\framework\func_graph.py in
func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph,
autograph_options, add_control_dependencies, arg_names, op_return_value, collections,
capture_by_value, override_flat_arg_shapes)
976 converted_func)
977
--> 978 func_outputs = python_func(*func_args, **func_kwargs)
979
980 # invariant: `func_outputs` contains only Tensors, CompositeTensors,

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\eager\def_function.py in
wrapped_fn(*args, **kwds)
437 # __wrapped__ allows AutoGraph to swap in a converted function. We give
438 # the function a weak reference to itself to avoid a reference cycle.
--> 439 return weak_wrapped_fn().__wrapped__(*args, **kwds)
440 weak_wrapped_fn = weakref.ref(wrapped_fn)
441

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\eager\function.py in
bound_method_wrapper(*args, **kwargs)
3209 # However, the replacer is still responsible for attaching self properly.
3210 # TODO(mdan): Is it possible to do it here instead?
-> 3211 return wrapped_fn(*args, **kwargs)
3212 weak_bound_method_wrapper = weakref.ref(bound_method_wrapper)
3213

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\framework\func_graph.py in
wrapper(*args, **kwargs)
966 except Exception as e: # pylint:disable=broad-except
967 if hasattr(e, "ag_error_metadata"):
--> 968 raise e.ag_error_metadata.to_exception(e)
969 else:
970 raise

TypeError: in converted code:


TypeError: tf__call() missing 2 required positional arguments: 'features' and 'hidden'

我花了过去 4 天的时间试图解决这个错误,但无济于事:(

对此的任何帮助将不胜感激!

编辑:

我按照 的建议修复了代码帕拉索火车 它修复了这个错误,但现在出现了另一个错误(问题出在注意力部分):
WARNING:tensorflow:Skipping full serialization of Keras model <__main__.RNN_Decoder object at 0x0000023F61D37278>, because its inputs are not defined.
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-45-4e1cfeda04ea> in <module>
2 tf.TensorSpec(shape=[1, 1], dtype=tf.int32, name='x'),
3 tf.TensorSpec(shape=[1, 64, 256], dtype=tf.float32, name="features1"),
----> 4 tf.TensorSpec(shape=[1, 512], dtype=tf.float32, name="hidden"),
5 ))

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\saved_model\save.py in save(obj, export_dir, signatures, options)
897 # Note we run this twice since, while constructing the view the first time
898 # there can be side effects of creating variables.
--> 899 _ = _SaveableView(checkpoint_graph_view)
900 saveable_view = _SaveableView(checkpoint_graph_view)
901

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\saved_model\save.py in __init__(self, checkpoint_view)
163 self.checkpoint_view = checkpoint_view
164 trackable_objects, node_ids, slot_variables = (
--> 165 self.checkpoint_view.objects_ids_and_slot_variables())
166 self.nodes = trackable_objects
167 self.node_ids = node_ids

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\training\tracking\graph_view.py in objects_ids_and_slot_variables(self)
413 A tuple of (trackable objects, object -> node id, slot variables)
414 """
--> 415 trackable_objects, path_to_root = self._breadth_first_traversal()
416 object_names = object_identity.ObjectIdentityDictionary()
417 for obj, path in path_to_root.items():

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\training\tracking\graph_view.py in _breadth_first_traversal(self)
197 % (current_trackable,))
198 bfs_sorted.append(current_trackable)
--> 199 for name, dependency in self.list_dependencies(current_trackable):
200 if dependency not in path_to_root:
201 path_to_root[dependency] = (

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\saved_model\save.py in list_dependencies(self, obj)
107 def list_dependencies(self, obj):
108 """Overrides a parent method to include `add_object` objects."""
--> 109 extra_dependencies = self.list_extra_dependencies(obj)
110 extra_dependencies.update(self._extra_dependencies.get(obj, {}))
111

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\saved_model\save.py in list_extra_dependencies(self, obj)
134 def list_extra_dependencies(self, obj):
135 return obj._list_extra_dependencies_for_serialization( # pylint: disable=protected-access
--> 136 self._serialization_cache)
137
138 def list_functions(self, obj):

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\keras\engine\base_layer.py in _list_extra_dependencies_for_serialization(self, serialization_cache)
2414 def _list_extra_dependencies_for_serialization(self, serialization_cache):
2415 return (self._trackable_saved_model_saver
-> 2416 .list_extra_dependencies_for_serialization(serialization_cache))
2417
2418 def _list_functions_for_serialization(self, serialization_cache):

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\keras\saving\saved_model\base_serialization.py in list_extra_dependencies_for_serialization(self, serialization_cache)
76 of attributes are listed in the `saved_model._LayerAttributes` class.
77 """
---> 78 return self.objects_to_serialize(serialization_cache)
79
80 def list_functions_for_serialization(self, serialization_cache):

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\keras\saving\saved_model\layer_serialization.py in objects_to_serialize(self, serialization_cache)
74 def objects_to_serialize(self, serialization_cache):
75 return (self._get_serialized_attributes(
---> 76 serialization_cache).objects_to_serialize)
77
78 def functions_to_serialize(self, serialization_cache):

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\keras\saving\saved_model\layer_serialization.py in _get_serialized_attributes(self, serialization_cache)
93
94 object_dict, function_dict = self._get_serialized_attributes_internal(
---> 95 serialization_cache)
96
97 serialized_attr.set_and_validate_objects(object_dict)

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\keras\saving\saved_model\model_serialization.py in _get_serialized_attributes_internal(self, serialization_cache)
51 objects, functions = (
52 super(ModelSavedModelSaver, self)._get_serialized_attributes_internal(
---> 53 serialization_cache))
54 functions['_default_save_signature'] = default_signature
55 return objects, functions

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\keras\saving\saved_model\layer_serialization.py in _get_serialized_attributes_internal(self, serialization_cache)
102 """Returns dictionary of serialized attributes."""
103 objects = save_impl.wrap_layer_objects(self.obj, serialization_cache)
--> 104 functions = save_impl.wrap_layer_functions(self.obj, serialization_cache)
105 # Attribute validator requires that the default save signature is added to
106 # function dict, even if the value is None.

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\keras\saving\saved_model\save_impl.py in wrap_layer_functions(layer, serialization_cache)
198 for fn in fns.values():
199 if fn is not None and fn.input_signature is not None:
--> 200 fn.get_concrete_function()
201
202 # Restore overwritten functions and losses

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\keras\saving\saved_model\save_impl.py in get_concrete_function(self, *args, **kwargs)
554 def get_concrete_function(self, *args, **kwargs):
555 if not self.call_collection.tracing:
--> 556 self.call_collection.add_trace(*args, **kwargs)
557 return super(LayerCall, self).get_concrete_function(*args, **kwargs)
558

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\keras\saving\saved_model\save_impl.py in add_trace(self, *args, **kwargs)
429 trace_with_training(False)
430 else:
--> 431 fn.get_concrete_function(*args, **kwargs)
432 self.tracing = False
433

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\keras\saving\saved_model\save_impl.py in get_concrete_function(self, *args, **kwargs)
555 if not self.call_collection.tracing:
556 self.call_collection.add_trace(*args, **kwargs)
--> 557 return super(LayerCall, self).get_concrete_function(*args, **kwargs)
558
559

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\eager\def_function.py in get_concrete_function(self, *args, **kwargs)
907 if self._stateful_fn is None:
908 initializers = []
--> 909 self._initialize(args, kwargs, add_initializers_to=initializers)
910 self._initialize_uninitialized_variables(initializers)
911

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\eager\def_function.py in _initialize(self, args, kwds, add_initializers_to)
495 self._concrete_stateful_fn = (
496 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
--> 497 *args, **kwds))
498
499 def invalid_creator_scope(*unused_args, **unused_kwds):

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\eager\function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2387 args, kwargs = None, None
2388 with self._lock:
-> 2389 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2390 return graph_function
2391

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\eager\function.py in _maybe_define_function(self, args, kwargs)
2701
2702 self._function_cache.missed.add(call_context_key)
-> 2703 graph_function = self._create_graph_function(args, kwargs)
2704 self._function_cache.primary[cache_key] = graph_function
2705 return graph_function, args, kwargs

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\eager\function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
2591 arg_names=arg_names,
2592 override_flat_arg_shapes=override_flat_arg_shapes,
-> 2593 capture_by_value=self._capture_by_value),
2594 self._function_attributes,
2595 # Tell the ConcreteFunction to clean up its graph once it goes out of

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\framework\func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
976 converted_func)
977
--> 978 func_outputs = python_func(*func_args, **func_kwargs)
979
980 # invariant: `func_outputs` contains only Tensors, CompositeTensors,

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\eager\def_function.py in wrapped_fn(*args, **kwds)
437 # __wrapped__ allows AutoGraph to swap in a converted function. We give
438 # the function a weak reference to itself to avoid a reference cycle.
--> 439 return weak_wrapped_fn().__wrapped__(*args, **kwds)
440 weak_wrapped_fn = weakref.ref(wrapped_fn)
441

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\keras\saving\saved_model\save_impl.py in wrapper(*args, **kwargs)
532 saving=True):
533 with base_layer_utils.autocast_context_manager(layer._compute_dtype): # pylint: disable=protected-access
--> 534 ret = method(*args, **kwargs)
535 _restore_layer_losses(original_losses)
536 return ret

~\anaconda3\envs\tf\lib\site-packages\tensorflow_core\python\keras\saving\saved_model\save_impl.py in call_and_return_conditional_losses(inputs, *args, **kwargs)
574 layer_call = _get_layer_call_method(layer)
575 def call_and_return_conditional_losses(inputs, *args, **kwargs):
--> 576 return layer_call(inputs, *args, **kwargs), layer.get_losses_for(inputs)
577 return _create_call_fn_decorator(layer, call_and_return_conditional_losses)
578

TypeError: call() missing 1 required positional argument: 'hidden1'

最佳答案

decoder.call.get_concrete_function 的参数不是一个列表。如果你有 3 个规范对象,你应该通过 get_concrete_function(s1,s2,s3) 调用而不是 get_concrete_function( [s1,s2,s3])
尝试删除 []

tf.saved_model.save(decoder, 'decoder_model', signatures=decoder.call.get_concrete_function(
tf.TensorSpec(shape=[1, 1], dtype=tf.int32, name='x'),
tf.TensorSpec(shape=[1, 64, 256], dtype=tf.float32, name="features1"),
tf.TensorSpec(shape=[1, 512], dtype=tf.float32, name="hidden")
))

关于python - 将 Tensorflow Keras 模型(编码器 - 解码器)保存为 SavedModel 格式,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/62250441/

24 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com