from keras.layers import LeakyReLU model = Sequential() # here change your line to leave out an activation model.add(Dense(90)) # now add a ReLU layer explicitly: model.add(LeakyReLU(alpha=0.05))