Commit c79dc8b5 by Pamela Osuna

multilabel version

parent 19fd77b7
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Flatten
from tensorflow.keras.layers import Conv1D, MaxPooling1D, LeakyReLU
# models
num_classes = 2
def model_architecture(c):
"""
(c,b,e) corresponds to the combinations of the specific hyperparameters to build the model
c belongs to {0,1,2,3} and represents the layer architecture
b belongs to {0,1} and represents the batch size
e belongs to {0,1} and represents the number of epochs
"""
#defining the keras model
model = Sequential()
if c == 0:
model.add(Conv1D(64, kernel_size = 3, activation = 'linear', input_shape = (30, 1)))
model.add(Conv1D(64,3, activation = 'relu'))
model.add(MaxPooling1D(pool_size = 2))
model.add(Flatten())
model.add(Dense(100, activation = 'relu'))
model.add(Dense(num_classes, activation = 'sigmoid'))
if c == 1:
model.add(Conv1D(16, kernel_size = 3, activation = 'linear', input_shape = (30, 1), padding = 'same'))
model.add(LeakyReLU(alpha = 0.1))
model.add(MaxPooling1D(2, padding = 'same'))
model.add(Conv1D(64,3, activation = 'linear', padding = 'same'))
model.add(LeakyReLU(alpha = 0.1))
model.add(MaxPooling1D(pool_size = 2, padding = 'same'))
model.add(Flatten())
model.add(Dense(128, activation = 'linear'))
model.add(LeakyReLU(alpha = 0.1))
model.add(Dense(num_classes, activation = 'sigmoid'))
if c == 2:
model.add(Conv1D(filters=64, kernel_size=3, activation='relu', input_shape=(30,1)))
model.add(Conv1D(filters=64, kernel_size=3, activation='relu'))
model.add(Dropout(0.5))
model.add(MaxPooling1D(pool_size=2))
model.add(Flatten())
model.add(Dense(100, activation='relu'))
model.add(Dense(num_classes, activation='sigmoid'))
if c == 3:
model.add(Conv1D(32, kernel_size = 3, activation='relu', input_shape=(30,1)))
model.add(Conv1D(32, 3, activation='relu'))
model.add(MaxPooling1D(pool_size=2))
model.add(Dropout(0.25))
model.add(Conv1D(64, 3, activation='relu'))
model.add(Conv1D(64, 3, activation='relu'))
model.add(MaxPooling1D(pool_size=2))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='sigmoid'))
return model
def choose_batch_epochs(b,e):
if b == 0 and e == 0:
return 16, 12
if b == 0 and e == 1:
return 16, 512
if b == 1 and e == 0:
return 64, 12
if b == 1 and e == 1:
return 64, 512
def choose_balancing_method(o):
if o == 0:
return 'smote'
elif o == 1:
return 'adasyn'
elif o == 2:
return 'class_weight'
elif o == 3:
return 'undersampling'
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment