Error:
NotImplementedError: Layer attention has arguments in `__init__` and therefore must override `get_config`.
import numpy as np
import tensorflow as tf
from tensorflow.keras.layers import *
from tensorflow.keras.models import *
from tensorflow.keras import backend as K
class attention(Layer):
def __init__(self, return_sequences=True):
self.return_sequences = return_sequences
super(attention,self).__init__()
def build(self, input_shape):
self.W=self.add_weight(name="att_weight", shape=(input_shape[-1],1),
initializer="normal")
self.b=self.add_weight(name="att_bias", shape=(input_shape[1],1),
initializer="zeros")
super(attention,self).build(input_shape)
def call(self, x):
e = K.tanh(K.dot(x,self.W)+self.b)
a = K.softmax(e, axis=1)
output = x*a
if self.return_sequences:
return output
return K.sum(output, axis=1)
model = Sequential()
model.add(Embedding(batch_size, 21, input_length=x_train.shape[1]))
model.add(Bidirectional(LSTM(128, return_sequences=True,
input_shape=(timesteps, 21),
kernel_initializer='he_normal')))
model.add(Dropout(0.5))
model.add(attention(return_sequences=False)) # receive 3D and output 2D
model.add(Dense(2, activation='sigmoid'))
Sincerely Yours,
Subash Chandra Pakhrin
PhD Student
Wichita State University
Wichita, kansas