gpt4 book ai didi

python - 如何使用 tf.keras.Model.summary 查看父模型中子模型的层?

转载 作者:行者123 更新时间:2023-11-30 08:56:31 25 4
gpt4 key购买 nike

我有一个 tf.keras.Model 的子类模型,代码如下

import tensorflow as tf


class Mymodel(tf.keras.Model):

def __init__(self, classes, backbone_model, *args, **kwargs):
super(Mymodel, self).__init__(self, args, kwargs)
self.backbone = backbone_model
self.classify_layer = tf.keras.layers.Dense(classes,activation='sigmoid')

def call(self, inputs):
x = self.backbone(inputs)
x = self.classify_layer(x)
return x

inputs = tf.keras.Input(shape=(224, 224, 3))
model = Mymodel(inputs=inputs, classes=61,
backbone_model=tf.keras.applications.MobileNet())
model.build(input_shape=(20, 224, 224, 3))
model.summary()

结果是:

_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
mobilenet_1.00_224 (Model) (None, 1000) 4253864
_________________________________________________________________
dense (Dense) multiple 61061
=================================================================
Total params: 4,314,925
Trainable params: 4,293,037
Non-trainable params: 21,888
_________________________________________________________________

但我想查看 mobilenet 的所有层,然后我尝试提取 mobilenet 的所有层并放入模型中:

import tensorflow as tf


class Mymodel(tf.keras.Model):

def __init__(self, classes, backbone_model, *args, **kwargs):
super(Mymodel, self).__init__(self, args, kwargs)
self.backbone = backbone_model
self.classify_layer = tf.keras.layers.Dense(classes,activation='sigmoid')

def my_process_layers(self,inputs):
layers = self.backbone.layers
tmp_x = inputs
for i in range(1,len(layers)):
tmp_x = layers[i](tmp_x)
return tmp_x

def call(self, inputs):
x = self.my_process_layers(inputs)
x = self.classify_layer(x)
return x

inputs = tf.keras.Input(shape=(224, 224, 3))
model = Mymodel(inputs=inputs, classes=61,
backbone_model=tf.keras.applications.MobileNet())
model.build(input_shape=(20, 224, 224, 3))
model.summary()

那么结果没有改变。

    _________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
mobilenet_1.00_224 (Model) (None, 1000) 4253864
_________________________________________________________________
dense (Dense) multiple 61061
=================================================================
Total params: 4,314,925
Trainable params: 4,293,037
Non-trainable params: 21,888
_________________________________________________________________

然后我尝试提取一层插入到模型中:

import tensorflow as tf


class Mymodel(tf.keras.Model):

def __init__(self, classes, backbone_model, *args, **kwargs):
super(Mymodel, self).__init__(self, args, kwargs)
self.backbone = backbone_model
self.classify_layer = tf.keras.layers.Dense(classes,activation='sigmoid')

def call(self, inputs):
x = self.backbone.layers[1](inputs)
x = self.classify_layer(x)
return x

inputs = tf.keras.Input(shape=(224, 224, 3))
model = Mymodel(inputs=inputs, classes=61,
backbone_model=tf.keras.applications.MobileNet())
model.build(input_shape=(20, 224, 224, 3))
model.summary()

它也没有改变。我很困惑。

_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
mobilenet_1.00_224 (Model) (None, 1000) 4253864
_________________________________________________________________
dense (Dense) multiple 244
=================================================================
Total params: 4,254,108
Trainable params: 4,232,220
Non-trainable params: 21,888
_________________________________________________________________

但是我发现dense层的参数改变了,我不知道发生了什么。

最佳答案

@Ioannis 的答案非常好,但不幸的是它放弃了 keras ' Model Subclassing ' 问题中存在的结构。如果像我一样,您想要保留此模型子类化并仍然在摘要中显示所有层,则可以使用 for 循环分支到更复杂模型的所有各个层:

class MyMobileNet(tf.keras.Sequential):
def __init__(self, input_shape=(224, 224, 3), classes=61):
super(MyMobileNet, self).__init__()
self.backbone_model = [layer for layer in
tf.keras.applications.MobileNet(input_shape, include_top=False, pooling='avg').layers]
self.classificator = tf.keras.layers.Dense(classes,activation='sigmoid', name='classificator')

def call(self, inputs):
x = inputs
for layer in self.backbone_model:
x = layer(x)
x = self.classificator(x)
return x
model = MyMobileNet()

之后我们可以直接构建模型并调用summary:

model.build(input_shape=(None, 224, 224, 3))
model.summary()

>
Model: "my_mobile_net"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv1_pad (ZeroPadding2D) (None, 225, 225, 3) 0
_________________________________________________________________
conv1 (Conv2D) (None, 112, 112, 32) 864
_________________________________________________________________
conv1_bn (BatchNormalization (None, 112, 112, 32) 128
_________________________________________________________________
....
....
conv_pw_13 (Conv2D) (None, 7, 7, 1024) 1048576
_________________________________________________________________
conv_pw_13_bn (BatchNormaliz (None, 7, 7, 1024) 4096
_________________________________________________________________
conv_pw_13_relu (ReLU) (None, 7, 7, 1024) 0
_________________________________________________________________
global_average_pooling2d_13 (None, 1024) 0
_________________________________________________________________
classificator (Dense) multiple 62525
=================================================================
Total params: 3,291,389
Trainable params: 3,269,501
Non-trainable params: 21,888
_________________________________________________________________

关于python - 如何使用 tf.keras.Model.summary 查看父模型中子模型的层?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/58657003/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com