I am working on Keras in Python and I have a neural network (see code below). Currently it works with only a ReLu activation.
For experimental reasons I would like to have some neurons on ReLu and some on softmax (or any other activation function). for example in a Layer with 20 neurons, I would like to have 10 with ReLu and 10 with Softmax.
I have tried some different ways, but always failed to get an output.
Would you know how I should do this?
# - Libraries
from keras.layers import Dense
from keras.models import Sequential
from keras.callbacks import EarlyStopping
early_spotting_monitor = EarlyStopping(patience=2)
layers = 4
neurons = 20
act = "ReLu"
# - Create Neural Network
model = Sequential()
model.add(Dense(neurons,activation=act,input_dim=X_train.shape[1]))
layers -= 1
while layers > 0:
model.add(Dense(neurons,activation=act))
layers -= 1
model.add(Dense(n_months))
model.compile(optimizer="adam",loss="mean_absolute_error")
model.fit(X_train,Y_train,validation_split=0.10,epochs=13,callbacks=[early_spotting_monitor])
EDIT: this is my (working) code now:
# - Libraries
from keras.callbacks import EarlyStopping
early_spotting_monitor = EarlyStopping(patience=2)
from keras.layers import Input, Dense
from keras.models import Model
from keras.layers.merge import concatenate
# input layer
visible = Input(shape=(X_train.shape[1],))
hidden11 = Dense(14, activation='relu')(visible)
hidden12 = Dense(3, activation='softplus')(visible)
hidden13 = Dense(2, activation='linear')(visible)
hidden13 = Dense(2, activation='selu')(visible)
merge1 = concatenate([hidden11, hidden12, hidden13])
hidden21 = Dense(14, activation='relu')(merge1)
hidden22 = Dense(3, activation='softplus')(merge1)
hidden23 = Dense(2, activation='linear')(merge1)
hidden13 = Dense(2, activation='selu')(visible)
merge2 = concatenate([hidden21, hidden22, hidden23])
hidden3 = Dense(20, activation='relu')(merge2)
output = Dense(Y_train.shape[1],activation="linear")(hidden3)
model = Model(inputs=visible, outputs=output)
model.compile(optimizer="adam",loss="mean_absolute_error")
model.fit(X_train,Y_train,validation_split=0.10,epochs=13,callbacks=[early_spotting_monitor]) # starts training
return model
slice
the layerx
into two (or more)x_1,x_2,x_3...
, then use your different activation functionsy_n = act_n(x_n)
, thenconcat
y_n
s. – Suk