实际通过tf.loadLayersModel拿到的mobilenet的模型和课程中不一样要怎么处理呢?后续出现报错
下面是调用
const mobilenet = await tf.loadLayersModel(MOBILENET_URL)
mobilenet.summary()
打印的数据如下
_________________________________________________________________
Layer (type) Output shape Param #
=================================================================
input_1 (InputLayer) [null,null,null,3] 0
_________________________________________________________________
conv1 (Conv2D) [null,null,null,32] 864
_________________________________________________________________
conv1_bn (BatchNormalization [null,null,null,32] 128
_________________________________________________________________
conv1_relu (ReLU) [null,null,null,32] 0
_________________________________________________________________
conv_dw_1 (DepthwiseConv2D) [null,null,null,32] 288
_________________________________________________________________
conv_dw_1_bn (BatchNormaliza [null,null,null,32] 128
_________________________________________________________________
conv_dw_1_relu (ReLU) [null,null,null,32] 0
_________________________________________________________________
conv_pw_1 (Conv2D) [null,null,null,64] 2048
_________________________________________________________________
conv_pw_1_bn (BatchNormaliza [null,null,null,64] 256
_________________________________________________________________
conv_pw_1_relu (ReLU) [null,null,null,64] 0
_________________________________________________________________
conv_pad_2 (ZeroPadding2D) [null,null,null,64] 0
_________________________________________________________________
conv_dw_2 (DepthwiseConv2D) [null,null,null,64] 576
_________________________________________________________________
conv_dw_2_bn (BatchNormaliza [null,null,null,64] 256
_________________________________________________________________
conv_dw_2_relu (ReLU) [null,null,null,64] 0
_________________________________________________________________
conv_pw_2 (Conv2D) [null,null,null,128] 8192
_________________________________________________________________
conv_pw_2_bn (BatchNormaliza [null,null,null,128] 512
_________________________________________________________________
conv_pw_2_relu (ReLU) [null,null,null,128] 0
_________________________________________________________________
conv_dw_3 (DepthwiseConv2D) [null,null,null,128] 1152
_________________________________________________________________
conv_dw_3_bn (BatchNormaliza [null,null,null,128] 512
_________________________________________________________________
conv_dw_3_relu (ReLU) [null,null,null,128] 0
_________________________________________________________________
conv_pw_3 (Conv2D) [null,null,null,128] 16384
_________________________________________________________________
conv_pw_3_bn (BatchNormaliza [null,null,null,128] 512
_________________________________________________________________
conv_pw_3_relu (ReLU) [null,null,null,128] 0
_________________________________________________________________
conv_pad_4 (ZeroPadding2D) [null,null,null,128] 0
_________________________________________________________________
conv_dw_4 (DepthwiseConv2D) [null,null,null,128] 1152
_________________________________________________________________
conv_dw_4_bn (BatchNormaliza [null,null,null,128] 512
_________________________________________________________________
conv_dw_4_relu (ReLU) [null,null,null,128] 0
_________________________________________________________________
conv_pw_4 (Conv2D) [null,null,null,256] 32768
_________________________________________________________________
conv_pw_4_bn (BatchNormaliza [null,null,null,256] 1024
_________________________________________________________________
conv_pw_4_relu (ReLU) [null,null,null,256] 0
_________________________________________________________________
conv_dw_5 (DepthwiseConv2D) [null,null,null,256] 2304
_________________________________________________________________
conv_dw_5_bn (BatchNormaliza [null,null,null,256] 1024
_________________________________________________________________
conv_dw_5_relu (ReLU) [null,null,null,256] 0
_________________________________________________________________
conv_pw_5 (Conv2D) [null,null,null,256] 65536
_________________________________________________________________
conv_pw_5_bn (BatchNormaliza [null,null,null,256] 1024
_________________________________________________________________
conv_pw_5_relu (ReLU) [null,null,null,256] 0
_________________________________________________________________
conv_pad_6 (ZeroPadding2D) [null,null,null,256] 0
_________________________________________________________________
conv_dw_6 (DepthwiseConv2D) [null,null,null,256] 2304
_________________________________________________________________
conv_dw_6_bn (BatchNormaliza [null,null,null,256] 1024
_________________________________________________________________
conv_dw_6_relu (ReLU) [null,null,null,256] 0
_________________________________________________________________
conv_pw_6 (Conv2D) [null,null,null,512] 131072
_________________________________________________________________
conv_pw_6_bn (BatchNormaliza [null,null,null,512] 2048
_________________________________________________________________
conv_pw_6_relu (ReLU) [null,null,null,512] 0
_________________________________________________________________
conv_dw_7 (DepthwiseConv2D) [null,null,null,512] 4608
_________________________________________________________________
conv_dw_7_bn (BatchNormaliza [null,null,null,512] 2048
_________________________________________________________________
conv_dw_7_relu (ReLU) [null,null,null,512] 0
_________________________________________________________________
conv_pw_7 (Conv2D) [null,null,null,512] 262144
_________________________________________________________________
conv_pw_7_bn (BatchNormaliza [null,null,null,512] 2048
_________________________________________________________________
conv_pw_7_relu (ReLU) [null,null,null,512] 0
_________________________________________________________________
conv_dw_8 (DepthwiseConv2D) [null,null,null,512] 4608
_________________________________________________________________
conv_dw_8_bn (BatchNormaliza [null,null,null,512] 2048
_________________________________________________________________
conv_dw_8_relu (ReLU) [null,null,null,512] 0
_________________________________________________________________
conv_pw_8 (Conv2D) [null,null,null,512] 262144
_________________________________________________________________
conv_pw_8_bn (BatchNormaliza [null,null,null,512] 2048
_________________________________________________________________
conv_pw_8_relu (ReLU) [null,null,null,512] 0
_________________________________________________________________
conv_dw_9 (DepthwiseConv2D) [null,null,null,512] 4608
_________________________________________________________________
conv_dw_9_bn (BatchNormaliza [null,null,null,512] 2048
_________________________________________________________________
conv_dw_9_relu (ReLU) [null,null,null,512] 0
_________________________________________________________________
conv_pw_9 (Conv2D) [null,null,null,512] 262144
_________________________________________________________________
conv_pw_9_bn (BatchNormaliza [null,null,null,512] 2048
_________________________________________________________________
conv_pw_9_relu (ReLU) [null,null,null,512] 0
_________________________________________________________________
conv_dw_10 (DepthwiseConv2D) [null,null,null,512] 4608
_________________________________________________________________
conv_dw_10_bn (BatchNormaliz [null,null,null,512] 2048
_________________________________________________________________
conv_dw_10_relu (ReLU) [null,null,null,512] 0
_________________________________________________________________
conv_pw_10 (Conv2D) [null,null,null,512] 262144
_________________________________________________________________
conv_pw_10_bn (BatchNormaliz [null,null,null,512] 2048
_________________________________________________________________
conv_pw_10_relu (ReLU) [null,null,null,512] 0
_________________________________________________________________
conv_dw_11 (DepthwiseConv2D) [null,null,null,512] 4608
_________________________________________________________________
conv_dw_11_bn (BatchNormaliz [null,null,null,512] 2048
_________________________________________________________________
conv_dw_11_relu (ReLU) [null,null,null,512] 0
_________________________________________________________________
conv_pw_11 (Conv2D) [null,null,null,512] 262144
_________________________________________________________________
conv_pw_11_bn (BatchNormaliz [null,null,null,512] 2048
_________________________________________________________________
conv_pw_11_relu (ReLU) [null,null,null,512] 0
_________________________________________________________________
conv_pad_12 (ZeroPadding2D) [null,null,null,512] 0
_________________________________________________________________
conv_dw_12 (DepthwiseConv2D) [null,null,null,512] 4608
_________________________________________________________________
conv_dw_12_bn (BatchNormaliz [null,null,null,512] 2048
_________________________________________________________________
conv_dw_12_relu (ReLU) [null,null,null,512] 0
_________________________________________________________________
conv_pw_12 (Conv2D) [null,null,null,1024] 524288
_________________________________________________________________
conv_pw_12_bn (BatchNormaliz [null,null,null,1024] 4096
_________________________________________________________________
conv_pw_12_relu (ReLU) [null,null,null,1024] 0
_________________________________________________________________
conv_dw_13 (DepthwiseConv2D) [null,null,null,1024] 9216
_________________________________________________________________
conv_dw_13_bn (BatchNormaliz [null,null,null,1024] 4096
_________________________________________________________________
conv_dw_13_relu (ReLU) [null,null,null,1024] 0
_________________________________________________________________
conv_pw_13 (Conv2D) [null,null,null,1024] 1048576
_________________________________________________________________
conv_pw_13_bn (BatchNormaliz [null,null,null,1024] 4096
_________________________________________________________________
conv_pw_13_relu (ReLU) [null,null,null,1024] 0
=================================================================
Total params: 3228864
Trainable params: 3206976
Non-trainable params: 21888
拿到的模型就只有86层
const model = tf.sequential()
for(let i = 0; i < 86; i++) {
const layer = mobilenet.layers[i]
layer.trainable = false
model.add(layer)
}
// 连接自己的双层神经网络
model.add(tf.layers.flatten())
调用model.add(tf.layers.flatten()) 出现下面的报错
Error: The shape of the input to "Flatten" is not fully defined (got ,,1024). Make sure to pass a complete "input_shape" or "batch_input_shape" argument to the first layer in your model.