1 回答
TA贡献1906条经验 获得超3个赞
使用此代码(我删除了 SelfAttention,所以将其添加回来):
import tensorflow as tf
class my_model(tf.keras.layers.Layer):
def __init__(self):
super(my_model, self).__init__()
keep_prob_=0.5
input_features=tf.keras.layers.Input(shape=(29, 1664))
drop3=tf.keras.layers.Dropout(keep_prob_)(input_features)
Layer_norm_feat=tf.keras.layers.Add()([input_features, drop3])
Layer_norm=tf.keras.layers.LayerNormalization(axis=-1)(Layer_norm_feat)
ff_out=tf.keras.layers.Dense(Layer_norm.shape[2], activation='relu')(Layer_norm)
ff_out=tf.keras.layers.Dense(Layer_norm.shape[2])(ff_out)
drop4=tf.keras.layers.Dropout(keep_prob_)(ff_out)
Layer_norm_input=tf.keras.layers.Add()([Layer_norm, drop4])
Attention_block_out=tf.keras.layers.LayerNormalization(axis=-1)(Layer_norm_input)
self.intraEpoch_att_block=tf.keras.Model(inputs=input_features, outputs=Attention_block_out)
def call(self, inp, training=False):
x = self.intraEpoch_att_block(inp)
return x
model1 = my_model()
model2 = my_model()
添加回答
举报