首页 > 解决方案 > ValueError:使用不是符号张量的输入调用了层 activation_1

问题描述

from keras.layers import AveragePooling2D
from keras.models import Sequential
from keras.layers.normalization import BatchNormalization
from keras.layers.convolutional import Conv2D
from keras.layers.convolutional import MaxPooling2D
from keras.layers.core import Activation
from keras.layers.core import Flatten
from keras.layers.core import Dropout
from keras.layers.core import Dense
from keras import backend as K


class SmallerVGGNet:
    @staticmethod
    def build(width, height, depth, classes, finalAct="softmax"):

        x = (height, width, depth)
        output = -1


        # CONV => RELU => POOL
        x = (Conv2D(16, (3, 3), padding="same", input_shape=x))
        x = (Activation("relu")(x))
        x = (BatchNormalization(axis=output)(x))
        x = (MaxPooling2D(pool_size=(3, 3))(x))

        x = (Conv2D(32, (3, 3), padding="same")(x))
        x = (Activation("relu")(x))
        x = (BatchNormalization(axis=output)(x))
        x = (MaxPooling2D(pool_size=(3, 3))(x))
        x = (BatchNormalization(axis=output)(x))

        # (CONV => RELU) * 2 => POOL
        x = (Conv2D(64, (3, 3), padding="same")(x))
        x = (Activation("relu")(x))
        x = (BatchNormalization(axis=output)(x))
        x = (Conv2D(64, (3, 3), padding="same")(x))
        x = (Activation("relu")(x))
        x = (BatchNormalization(axis=output)(x))
        x = (AveragePooling2D(pool_size=(2, 2))(x))

        # (CONV => RELU) * 2 => POOL
        x = (Conv2D(128, (3, 3), padding="same")(x))
        x = (Activation("relu")(x))
        x = (BatchNormalization(axis=output)(x))
        x = (Conv2D(128, (3, 3))(x))
        x = (Activation("relu")(x))
        x = (BatchNormalization(axis=output)(x))
        x = (MaxPooling2D(pool_size=(2, 2))(x))

        # first (and only) set of FC => RELU layers
        x = (Flatten()(x))
        x = (Dense(128)(x))
        x = (Activation("relu")(x))
        x = (BatchNormalization()(x))
        x = (Dropout(0.5)(x))

        # softmax classifier
        x = (Dense(classes)(x))
        x = (Activation(finalAct)(x))

        x.summary()

        # return the constructed network architecture
[enter image description here][2]

为什么当我运行它说的代码时会出现这种情况 层激活是使用不是符号张量的输入调用的。请帮我解决这个问题

ValueError:使用不是符号张量的输入调用层 activation_1。接收类型:. 完整输入:[]。该层的所有输入都应该是张量。

标签: pythonopencvkerasconv-neural-network

解决方案


x = (Conv2D(16, (3, 3), padding="same", input_shape=x))您没有给出任何输入的行中。因此,当您运行代码并到达x = (Activation("relu")(x))x 是层而不是张量的行时,它会给出上述错误。因此,如评论中所述,您必须将输入传递给第一层。编辑后的代码如下(注意我使用的是 tensorflow.keras 库而不是 keras)

from tensorflow.compat.v1.keras.layers import AveragePooling2D
from tensorflow.compat.v1.keras.models import Sequential, Model
from tensorflow.compat.v1.keras.layers import Input, Dense, Dropout, Flatten, Activation, BatchNormalization, Conv2D, MaxPooling2D
from tensorflow.compat.v1.keras import backend as K


class SmallerVGGNet:
    @staticmethod
    def build(width, height, depth, classes, finalAct="softmax"):

        x = (height, width, depth)
        output = -1


        # CONV => RELU => POOL
        inputs = Input(shape=x)
        x = (Conv2D(16, (3, 3), padding="same", input_shape=x)(inputs))
        x = (Activation("relu")(x))
        x = (BatchNormalization(axis=output)(x))
        x = (MaxPooling2D(pool_size=(3, 3))(x))

        x = (Conv2D(32, (3, 3), padding="same")(x))
        x = (Activation("relu")(x))
        x = (BatchNormalization(axis=output)(x))
        x = (MaxPooling2D(pool_size=(3, 3))(x))
        x = (BatchNormalization(axis=output)(x))

        # (CONV => RELU) * 2 => POOL
        x = (Conv2D(64, (3, 3), padding="same")(x))
        x = (Activation("relu")(x))
        x = (BatchNormalization(axis=output)(x))
        x = (Conv2D(64, (3, 3), padding="same")(x))
        x = (Activation("relu")(x))
        x = (BatchNormalization(axis=output)(x))
        x = (AveragePooling2D(pool_size=(2, 2))(x))

        # (CONV => RELU) * 2 => POOL
        x = (Conv2D(128, (3, 3), padding="same")(x))
        x = (Activation("relu")(x))
        x = (BatchNormalization(axis=output)(x))
        x = (Conv2D(128, (3, 3))(x))
        x = (Activation("relu")(x))
        x = (BatchNormalization(axis=output)(x))
        x = (MaxPooling2D(pool_size=(2, 2))(x))

        # first (and only) set of FC => RELU layers
        x = (Flatten()(x))
        x = (Dense(128)(x))
        x = (Activation("relu")(x))
        x = (BatchNormalization()(x))
        x = (Dropout(0.5)(x))

        # softmax classifier
        x = (Dense(classes)(x))
        x = (Activation(finalAct)(x))
        model = Model(inputs,x)
        model.summary()

a = SmallerVGGNet()
a.build(100,100,100,10)

推荐阅读