Та же val_accuracy .. что-то не так в моем CNN

У меня такая же val_Accuracy... Я исправил регуляризатор ядра, выпал и изменил RELU на сигмоид. Но это не было исправлено.

В чем была проблема моей архи CNN?

Я ничего не знаю..

Я использую adam и ategorical_crossentropy... batchsize 256, epochs 50

import keras as keras
## 내가 처음에 짠 코드를 수정한 내용임.

input_shape = (48, 48, 1)
num_features = 64
num_labels = 7

model_2 = models.Sequential()

# 1. CONV2D-64 RELU -> kernel_size를 2.2에서 5.5로 변경
model_2.add(Conv2D(num_features, kernel_regularizer=l2(0.001), bias_regularizer=l2(0.01), kernel_size=(5, 5), strides=(1,1), activation='sigmoid', input_shape=input_shape))
# +. maxpool2d 추가
model_2.add(MaxPooling2D(pool_size=(5,5), strides=(2, 2)))

# 2. CONV2D-64 RELU MAXPOOL2D DROPOUT , 한번 더 conv2d 추가
model_2.add(Conv2D(num_features, kernel_regularizer=l2(0.001), bias_regularizer=l2(0.01),kernel_size=(3, 3), strides=(1,1), activation='sigmoid', padding='same'))
model_2.add(Conv2D(num_features, kernel_regularizer=l2(0.001), bias_regularizer=l2(0.01),kernel_size=(3, 3), strides=(1,1), activation='sigmoid', padding='same'))
model_2.add(layers.AveragePooling2D(pool_size=(3,3), strides=(2, 2))) #maxpooling을 average로 바꿈
#model_2.add(Dropout(0.1)) 

# 3. CONV2D-128 RELU
model_2.add(Conv2D(num_features*2, kernel_regularizer=l2(0.001), bias_regularizer=l2(0.01),kernel_size=(3, 3), strides=(1,1), activation='sigmoid', padding='same'))

# 4. CONV2D-128 RELU
model_2.add(Conv2D(num_features*2, kernel_regularizer=l2(0.001), bias_regularizer=l2(0.01),kernel_size=(3, 3), strides=(1,1), activation='sigmoid', padding='same'))
model_2.add(layers.AveragePooling2D(pool_size=(3,3), strides=(2, 2))) # 이거 추가

# 5. CONV2D-256 RELU 
model_2.add(Conv2D(num_features*2*2, kernel_regularizer=l2(0.001), bias_regularizer=l2(0.01),kernel_size=(3, 3), strides=(1,1), activation='sigmoid', padding='same'))

# 6. CONV2D-256 RELU MAXPOOL2D DROPOUT => maxpool을 averagepooling2d로 바꿈
model_2.add(Conv2D(num_features*2*2, kernel_regularizer=l2(0.001), bias_regularizer=l2(0.01),kernel_size=(3, 3), strides=(1,1), activation='sigmoid', padding='same'))
model_2.add(layers.AveragePooling2D(pool_size=(3,3), strides=(2, 2)))
#model_2.add(Dropout(0.1))

# 7. FLATTEN
model_2.add(Flatten())

# 8. FULLY CONNECTED RELU DROPOUT 
model_2.add(Dense(2*2*2*2*num_features, kernel_regularizer=l2(0.001), bias_regularizer=l2(0.01),activation='sigmoid'))
model_2.add(Dropout(0.2))
model_2.add(Dense(2*2*2*2*num_features, kernel_regularizer=l2(0.001), bias_regularizer=l2(0.01),activation='sigmoid'))
model_2.add(Dropout(0.2))
#model_2.add(Dense(2*num_features, activation='relu'))
#model_2.add(Dropout(0.5))

# 9. FULLY CONNECTED SOFTMAX
model_2.add(Dense(num_labels, activation='softmax'))

0 ответов

Другие вопросы по тегам