from keras.layers import AveragePooling2D
from keras.models import Sequential
from keras.layers.normalization import BatchNormalization
from keras.layers.convolutional import Conv2D
from keras.layers.convolutional import MaxPooling2D
from keras.layers.core import Activation
from keras.layers.core import Flatten
from keras.layers.core import Dropout
from keras.layers.core import Dense
from keras import backend as K
class SmallerVGGNet:
@staticmethod
def build(width, height, depth, classes, finalAct="softmax"):
x = (height, width, depth)
output = -1
# CONV => RELU => POOL
x = (Conv2D(16, (3, 3), padding="same", input_shape=x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (MaxPooling2D(pool_size=(3, 3))(x))
x = (Conv2D(32, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (MaxPooling2D(pool_size=(3, 3))(x))
x = (BatchNormalization(axis=output)(x))
# (CONV => RELU) * 2 => POOL
x = (Conv2D(64, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (Conv2D(64, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (AveragePooling2D(pool_size=(2, 2))(x))
# (CONV => RELU) * 2 => POOL
x = (Conv2D(128, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (Conv2D(128, (3, 3))(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (MaxPooling2D(pool_size=(2, 2))(x))
# first (and only) set of FC => RELU layers
x = (Flatten()(x))
x = (Dense(128)(x))
x = (Activation("relu")(x))
x = (BatchNormalization()(x))
x = (Dropout(0.5)(x))
# softmax classifier
x = (Dense(classes)(x))
x = (Activation(finalAct)(x))
x.summary()
# return the constructed network architecture
[enter image description here][2]
为什么在我运行代码时会出现这种情况,原因是使用不是符号张量的输入调用了层激活。请帮助我解决此问题
ValueError:层激活_1的调用不是符号张量。收到的类型:。完整输入:[]。该层的所有输入应为张量。
最佳答案
在x = (Conv2D(16, (3, 3), padding="same", input_shape=x))
行中,您没有输入任何内容。因此,当您运行代码时,代码到达x = (Activation("relu")(x))
行,其中x是一个层而不是张量,并且会出现上述错误。因此,如注释中所述,您必须将输入传递给第一层。编辑后的代码如下(注意,我使用了tensorflow.keras库而不是keras)
from tensorflow.compat.v1.keras.layers import AveragePooling2D
from tensorflow.compat.v1.keras.models import Sequential, Model
from tensorflow.compat.v1.keras.layers import Input, Dense, Dropout, Flatten, Activation, BatchNormalization, Conv2D, MaxPooling2D
from tensorflow.compat.v1.keras import backend as K
class SmallerVGGNet:
@staticmethod
def build(width, height, depth, classes, finalAct="softmax"):
x = (height, width, depth)
output = -1
# CONV => RELU => POOL
inputs = Input(shape=x)
x = (Conv2D(16, (3, 3), padding="same", input_shape=x)(inputs))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (MaxPooling2D(pool_size=(3, 3))(x))
x = (Conv2D(32, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (MaxPooling2D(pool_size=(3, 3))(x))
x = (BatchNormalization(axis=output)(x))
# (CONV => RELU) * 2 => POOL
x = (Conv2D(64, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (Conv2D(64, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (AveragePooling2D(pool_size=(2, 2))(x))
# (CONV => RELU) * 2 => POOL
x = (Conv2D(128, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (Conv2D(128, (3, 3))(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (MaxPooling2D(pool_size=(2, 2))(x))
# first (and only) set of FC => RELU layers
x = (Flatten()(x))
x = (Dense(128)(x))
x = (Activation("relu")(x))
x = (BatchNormalization()(x))
x = (Dropout(0.5)(x))
# softmax classifier
x = (Dense(classes)(x))
x = (Activation(finalAct)(x))
model = Model(inputs,x)
model.summary()
a = SmallerVGGNet()
a.build(100,100,100,10)
关于python - ValueError:层激活_1调用的输入不是符号张量,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/59608911/